summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--common/shaders/math/sdf_shapes.wgsl32
-rw-r--r--common/shaders/render/raymarching.wgsl4
-rw-r--r--doc/HEADLESS_MODE.md10
-rw-r--r--doc/HOWTO.md26
-rw-r--r--src/app/main.cc47
-rw-r--r--src/app/test_demo.cc7
-rw-r--r--src/audio/audio.cc29
-rw-r--r--src/audio/audio.h6
-rw-r--r--src/audio/tracker.cc10
-rw-r--r--src/tests/audio/test_jittered_audio.cc5
-rw-r--r--src/tests/audio/test_silent_backend.cc8
-rw-r--r--src/tests/audio/test_wav_dump.cc2
-rw-r--r--tools/timeline_editor/index.html210
-rw-r--r--workspaces/main/beat_test.track44
-rw-r--r--workspaces/main/pop_punk_drums.track2
-rw-r--r--workspaces/main/shaders/scene1.wgsl81
-rw-r--r--workspaces/main/shaders/sdf_test.wgsl9
-rw-r--r--workspaces/main/workspace.cfg3
18 files changed, 406 insertions, 129 deletions
diff --git a/common/shaders/math/sdf_shapes.wgsl b/common/shaders/math/sdf_shapes.wgsl
index 64df4fc..4dcfdd6 100644
--- a/common/shaders/math/sdf_shapes.wgsl
+++ b/common/shaders/math/sdf_shapes.wgsl
@@ -23,34 +23,8 @@ fn sdBox2D(p: vec2<f32>, b: vec2<f32>) -> f32 {
return length(max(d, vec2<f32>(0.0))) + min(max(d.x, d.y), 0.0);
}
+// Approximate
fn sdEllipse(p: vec2<f32>, ab: vec2<f32>) -> f32 {
- var p_abs = abs(p);
- if (p_abs.x > p_abs.y) {
- p_abs = vec2<f32>(p_abs.y, p_abs.x);
- }
- let l = ab.y * ab.y - ab.x * ab.x;
- let m = ab.x * p_abs.x / l;
- let n = ab.y * p_abs.y / l;
- let m2 = m * m;
- let n2 = n * n;
- let c = (m2 + n2 - 1.0) / 3.0;
- let c3 = c * c * c;
- let d = c3 + m2 * n2;
- let g = m + m * n2;
- var co: f32;
- if (d < 0.0) {
- let h = acos((c3 + m2 * n2 * 2.0) / c3) / 3.0;
- let s = cos(h);
- let t = sin(h) * sqrt(3.0);
- co = (sqrt(-c * (s + t * 2.0) + m2) + sign(l) * sqrt(-c * (s - t * 2.0) + m2) + abs(g) / (sqrt(-c * (s + t * 2.0) + m2) * sqrt(-c * (s - t * 2.0) + m2)) - m) / 2.0;
- } else {
- let h = 2.0 * m * n * sqrt(d);
- let s = sign(c3 + m2 * n2 + h) * pow(abs(c3 + m2 * n2 + h), 1.0 / 3.0);
- let u = sign(c3 + m2 * n2 - h) * pow(abs(c3 + m2 * n2 - h), 1.0 / 3.0);
- let rx = -s - u + m2 * 2.0;
- let ry = (s - u) * sqrt(3.0);
- co = (ry / sqrt(sqrt(rx * rx + ry * ry) - rx) + 2.0 * g / sqrt(rx * rx + ry * ry) - m) / 2.0;
- }
- let si = sqrt(max(0.0, 1.0 - co * co));
- return length(p_abs - vec2<f32>(ab.x * co, ab.y * si)) * sign(p_abs.y * ab.x * co - p_abs.x * ab.y * si);
+ let d = length(p / ab);
+ return length(p) * (1.0 - 1.0 / d);
}
diff --git a/common/shaders/render/raymarching.wgsl b/common/shaders/render/raymarching.wgsl
index 3adec8d..7d05528 100644
--- a/common/shaders/render/raymarching.wgsl
+++ b/common/shaders/render/raymarching.wgsl
@@ -26,8 +26,8 @@ fn normal(pos: vec3<f32>) -> vec3<f32> {
// Performs the raymarching operation.
// Returns the distance along the ray to the surface, or MAX_RAY_LENGTH if no surface is hit.
-fn rayMarch(ro: vec3<f32>, rd: vec3<f32>, initt: f32) -> f32 {
- var t = initt;
+fn rayMarch(ro: vec3<f32>, rd: vec3<f32>, tmin: f32) -> f32 {
+ var t = tmin;
for (var i = 0; i < MAX_RAY_MARCHES; i++) {
if (t > MAX_RAY_LENGTH) {
t = MAX_RAY_LENGTH;
diff --git a/doc/HEADLESS_MODE.md b/doc/HEADLESS_MODE.md
index f139317..85abbaf 100644
--- a/doc/HEADLESS_MODE.md
+++ b/doc/HEADLESS_MODE.md
@@ -17,10 +17,18 @@ cmake --build build_headless -j4
# Custom duration
./build_headless/demo64k --headless --duration 60
-# Audio validation
+# Audio validation (full demo or 60s)
./build_headless/demo64k --dump-wav test.wav
+
+# Render specific time range
+./build_headless/demo64k --dump-wav test.wav --dump-wav-start 10 --dump-wav-duration 5
```
+**WAV Dump Options:**
+- `--dump-wav [FILE]` - Output filename (default: audio_dump.wav)
+- `--dump-wav-start TIME` - Start at time (seeks first, default: 0)
+- `--dump-wav-duration TIME` - Duration limit (default: demo length or 60s)
+
Test script: `./scripts/test_headless.sh`
## vs STRIP_EXTERNAL_LIBS
diff --git a/doc/HOWTO.md b/doc/HOWTO.md
index 506bf0a..0dc9ec7 100644
--- a/doc/HOWTO.md
+++ b/doc/HOWTO.md
@@ -25,7 +25,15 @@ cmake -S . -B build
cmake --build build -j4
./build/demo64k
```
-Options: `--fullscreen`, `--resolution WxH`, `--seek TIME`, `--hot-reload`
+
+**CLI Options:**
+- `--fullscreen` - Fullscreen mode
+- `--resolution WxH` - Window resolution (e.g., 1920x1080)
+- `--seek TIME` - Start at time (seconds)
+- `--hot-reload` - Watch config files for changes
+- `--dump-wav [FILE]` - Render audio to WAV file
+- `--dump-wav-start TIME` - Start WAV dump at time (seeks first)
+- `--dump-wav-duration TIME` - Limit WAV dump duration
### Production Builds
```bash
@@ -252,6 +260,21 @@ Features: Drag/drop, beat-based editing, audio playback, waveform visualization,
## Audio
+### Rendering Audio to WAV
+
+```bash
+# Render full demo
+./build/demo64k --dump-wav output.wav
+
+# Render specific time range
+./build/demo64k --dump-wav output.wav --dump-wav-start 10 --dump-wav-duration 5
+
+# Render first 30 seconds
+./build/demo64k --dump-wav output.wav --dump-wav-duration 30
+```
+
+### API Usage
+
```cpp
#include "audio/audio_engine.h"
@@ -262,6 +285,7 @@ g_audio_engine.update(music_time);
g_audio_engine.shutdown();
audio_shutdown();
```
+
See `doc/TRACKER.md` for music system.
---
diff --git a/src/app/main.cc b/src/app/main.cc
index 90e3015..537da74 100644
--- a/src/app/main.cc
+++ b/src/app/main.cc
@@ -45,6 +45,8 @@ int main(int argc, char** argv) {
int width = 1280;
int height = 720;
bool dump_wav = false;
+ float dump_wav_start = -1.0f;
+ float dump_wav_duration = -1.0f;
bool tempo_test_enabled = false;
bool headless_mode = false;
float headless_duration = 30.0f;
@@ -73,6 +75,12 @@ int main(int argc, char** argv) {
if (i + 1 < argc && argv[i + 1][0] != '-') {
wav_output_file = argv[++i];
}
+ } else if (strcmp(argv[i], "--dump-wav-start") == 0 && i + 1 < argc) {
+ dump_wav_start = atof(argv[i + 1]);
+ ++i;
+ } else if (strcmp(argv[i], "--dump-wav-duration") == 0 && i + 1 < argc) {
+ dump_wav_duration = atof(argv[i + 1]);
+ ++i;
} else if (strcmp(argv[i], "--tempo") == 0) {
tempo_test_enabled = true;
#if defined(DEMO_HEADLESS)
@@ -198,8 +206,8 @@ int main(int argc, char** argv) {
}
#endif /* !defined(STRIP_ALL) */
- // Pre-fill using same pattern as main loop (100ms)
- fill_audio_buffer(0.1f, 0.0);
+ // Pre-fill ring buffer to target lookahead (prevents startup delay)
+ fill_audio_buffer(audio_get_required_prefill_time(), 0.0);
audio_start();
g_last_audio_time = audio_get_playback_time(); // Initialize after start
@@ -247,10 +255,29 @@ int main(int argc, char** argv) {
#if !defined(STRIP_ALL)
// In WAV dump mode, run headless simulation and write audio to file
if (dump_wav) {
- printf("Running WAV dump simulation...\n");
-
+ // Determine start and end times
+ const float start_time = (dump_wav_start >= 0.0f) ? dump_wav_start : 0.0f;
const float demo_duration = GetDemoDuration();
- const float max_duration = (demo_duration > 0.0f) ? demo_duration : 60.0f;
+ float end_time;
+ if (dump_wav_duration >= 0.0f) {
+ end_time = start_time + dump_wav_duration;
+ } else {
+ end_time = (demo_duration > 0.0f) ? demo_duration : 60.0f;
+ }
+
+ printf("Running WAV dump simulation (%.1fs - %.1fs)...\n", start_time,
+ end_time);
+
+ // Seek to start time if needed
+ if (start_time > 0.0f) {
+ const double step = 1.0 / 60.0;
+ for (double t = 0.0; t < start_time; t += step) {
+ fill_audio_buffer(step, t);
+ audio_render_silent((float)step);
+ }
+ printf("Seeked to %.1fs\n", start_time);
+ }
+
const float update_dt = 1.0f / 60.0f; // 60Hz update rate
const int frames_per_update = (int)(32000 * update_dt); // ~533 frames
const int samples_per_update = frames_per_update * 2; // Stereo
@@ -258,8 +285,8 @@ int main(int argc, char** argv) {
AudioRingBuffer* ring_buffer = audio_get_ring_buffer();
std::vector<float> chunk_buffer(samples_per_update);
- double physical_time = 0.0;
- while (physical_time < max_duration) {
+ double physical_time = start_time;
+ while (physical_time < end_time) {
// Update music time and tracker (using tempo logic from
// fill_audio_buffer)
fill_audio_buffer(update_dt, physical_time);
@@ -278,13 +305,13 @@ int main(int argc, char** argv) {
if ((int)physical_time % 1 == 0 &&
physical_time - update_dt < (int)physical_time) {
printf(" Rendering: %.1fs / %.1fs (music: %.1fs, tempo: %.2fx)\r",
- physical_time, max_duration, g_music_time, g_tempo_scale);
+ physical_time, end_time, g_music_time, g_tempo_scale);
fflush(stdout);
}
}
- printf("\nWAV dump complete: %.2fs physical, %.2fs music time\n",
- physical_time, g_music_time);
+ printf("\nWAV dump complete: %.2fs (%.2fs - %.2fs), music: %.2fs\n",
+ physical_time - start_time, start_time, physical_time, g_music_time);
#if defined(DEMO_HEADLESS)
g_wav_backend_ptr = nullptr;
diff --git a/src/app/test_demo.cc b/src/app/test_demo.cc
index 39dbcba..5775e74 100644
--- a/src/app/test_demo.cc
+++ b/src/app/test_demo.cc
@@ -280,14 +280,13 @@ int main(int argc, char** argv) {
g_tempo_scale = 1.0f; // No tempo variation
}
- g_music_time += audio_dt * g_tempo_scale;
-
g_audio_engine.update(g_music_time, audio_dt * g_tempo_scale);
audio_render_ahead(g_music_time, audio_dt * g_tempo_scale);
+ g_music_time += audio_dt * g_tempo_scale;
};
- // Pre-fill using same pattern as main loop (100ms)
- fill_audio_buffer(0.1f, 0.0);
+ // Pre-fill ring buffer to target lookahead (prevents startup delay)
+ fill_audio_buffer(audio_get_required_prefill_time(), 0.0);
audio_start();
g_last_audio_time = audio_get_playback_time();
diff --git a/src/audio/audio.cc b/src/audio/audio.cc
index d044b00..ba76a28 100644
--- a/src/audio/audio.cc
+++ b/src/audio/audio.cc
@@ -57,11 +57,35 @@ void audio_init() {
g_audio_backend->init();
}
+float audio_get_required_prefill_time() {
+ return (float)RING_BUFFER_LOOKAHEAD_MS / 1000.0f;
+}
+
+bool audio_is_prefilled() {
+ const int buffered = g_ring_buffer.available_read();
+ const float buffered_time =
+ (float)buffered / (RING_BUFFER_SAMPLE_RATE * RING_BUFFER_CHANNELS);
+ const float required = audio_get_required_prefill_time();
+ return buffered_time >= (required - 0.001f); // 1ms tolerance
+}
+
void audio_start() {
if (g_audio_backend == nullptr) {
printf("Cannot start: audio not initialized.\n");
return;
}
+
+#if !defined(STRIP_ALL)
+ if (!audio_is_prefilled()) {
+ const int buffered = g_ring_buffer.available_read();
+ const float buffered_ms =
+ (float)buffered / (RING_BUFFER_SAMPLE_RATE * RING_BUFFER_CHANNELS) *
+ 1000.0f;
+ printf("WARNING: Audio buffer not pre-filled (%.1fms < %.1fms)\n",
+ buffered_ms, audio_get_required_prefill_time() * 1000.0f);
+ }
+#endif
+
g_audio_backend->start();
}
@@ -73,6 +97,11 @@ void audio_render_ahead(float music_time, float dt, float target_fill) {
// Render in small chunks to keep synth time synchronized with tracker
// Chunk size: one frame's worth of audio (~16.6ms @ 60fps)
+ // TODO(timing): CRITICAL BUG - Truncation here may cause 180ms drift over 63 beats
+ // (int) cast loses fractional samples: 0.333 samples/frame * 2560 frames = 853 samples = 27ms
+ // But observed drift is 180ms, so this is not the only source (27ms < 180ms)
+ // NOTE: This is NOT a float vs double precision issue - floats handle <500s times fine
+ // See also: tracker.cc BPM timing calculation
const int chunk_frames = (int)(dt * RING_BUFFER_SAMPLE_RATE);
const int chunk_samples = chunk_frames * RING_BUFFER_CHANNELS;
diff --git a/src/audio/audio.h b/src/audio/audio.h
index 9d521e6..beb994f 100644
--- a/src/audio/audio.h
+++ b/src/audio/audio.h
@@ -23,6 +23,12 @@ struct SpecHeader {
void audio_init();
void audio_start(); // Starts the audio device callback
+// Get required pre-fill time (matches ring buffer lookahead)
+float audio_get_required_prefill_time();
+
+// Check if buffer is sufficiently pre-filled
+bool audio_is_prefilled();
+
// Ring buffer audio rendering (main thread fills buffer)
// target_fill: Target buffer fill time in seconds (default:
// RING_BUFFER_LOOKAHEAD_MS/1000)
diff --git a/src/audio/tracker.cc b/src/audio/tracker.cc
index 1c0a9b2..37f0683 100644
--- a/src/audio/tracker.cc
+++ b/src/audio/tracker.cc
@@ -234,6 +234,16 @@ static void trigger_note_event(const TrackerEvent& event,
}
void tracker_update(float music_time_sec, float dt_music_sec) {
+ // TODO(timing): CRITICAL BUG - Events trigger ~180ms early over 63 beats @ BPM=90
+ // Observed: Beat 63 snare at 41.82s in WAV, should be at 42.00s (180ms drift)
+ // NOTE: This is NOT a float vs double precision issue - floats handle <500s times fine
+ // Root cause unknown - suspects:
+ // 1. Systematic bias in time calculation (not random accumulation)
+ // 2. Truncation in audio.cc:103 chunk_frames = (int)(dt * sample_rate)
+ // 3. BPM calculation precision below (unit_duration_sec)
+ // 4. Mismatch between tracker time and actual sample rendering
+ // See also: audio.cc sample rendering truncation
+
// Unit-less timing: 1 unit = 4 beats (by convention)
const float BEATS_PER_UNIT = 4.0f;
const float unit_duration_sec =
diff --git a/src/tests/audio/test_jittered_audio.cc b/src/tests/audio/test_jittered_audio.cc
index d8260ec..d7c6a7d 100644
--- a/src/tests/audio/test_jittered_audio.cc
+++ b/src/tests/audio/test_jittered_audio.cc
@@ -42,11 +42,10 @@ void test_jittered_audio_basic() {
float music_time = 0.0f;
for (float t = 0.0f; t < total_time; t += dt) {
- music_time += dt; // Normal tempo
-
// Update tracker and fill buffer
tracker_update(music_time, dt);
audio_render_ahead(music_time, dt);
+ music_time += dt;
// Sleep minimal time to let audio thread run
std::this_thread::sleep_for(std::chrono::milliseconds(1));
@@ -114,7 +113,7 @@ void test_jittered_audio_with_acceleration() {
// Update tracker and fill buffer
tracker_update(music_time, dt * tempo_scale);
- audio_render_ahead(music_time, dt);
+ (void)audio_render_ahead(music_time, dt);
// Sleep minimal time to let audio thread run
std::this_thread::sleep_for(std::chrono::milliseconds(1));
diff --git a/src/tests/audio/test_silent_backend.cc b/src/tests/audio/test_silent_backend.cc
index 3dc1cd4..cecf72c 100644
--- a/src/tests/audio/test_silent_backend.cc
+++ b/src/tests/audio/test_silent_backend.cc
@@ -97,7 +97,7 @@ void test_silent_backend_tracking() {
assert(backend.get_voice_trigger_count() == 1);
// Render audio (calls on_frames_rendered)
- audio_render_ahead(0.0f, 0.1f); // Render ~0.1 seconds
+ (void)audio_render_ahead(0.0f, 0.1f); // Render ~0.1 seconds
assert(backend.get_frames_rendered() > 0);
// Reset stats
@@ -123,7 +123,7 @@ void test_audio_playback_time() {
assert(t0 == 0.0f);
// Render some audio
- audio_render_ahead(0.5f, 0.1f); // Advance music time to 0.5s
+ (void)audio_render_ahead(0.5f, 0.1f); // Advance music time to 0.5s
// Playback time should advance based on frames rendered
// Note: audio_get_playback_time() tracks cumulative frames consumed
@@ -131,7 +131,7 @@ void test_audio_playback_time() {
assert(t1 >= 0.0f); // Should have advanced
// Render more
- audio_render_ahead(1.0f, 0.5f);
+ (void)audio_render_ahead(1.0f, 0.5f);
float t2 = audio_get_playback_time();
assert(t2 >= t1); // Should continue advancing
@@ -152,7 +152,7 @@ void test_audio_buffer_partial_writes() {
// Note: With SilentBackend, frames_rendered won't increase because
// there's no audio callback consuming from the ring buffer
for (int i = 0; i < 10; ++i) {
- audio_render_ahead((float)i * 0.1f, 0.1f);
+ (void)audio_render_ahead((float)i * 0.1f, 0.1f);
}
// Buffer should have handled multiple writes correctly (no crash)
diff --git a/src/tests/audio/test_wav_dump.cc b/src/tests/audio/test_wav_dump.cc
index a0f2a4a..ce161a4 100644
--- a/src/tests/audio/test_wav_dump.cc
+++ b/src/tests/audio/test_wav_dump.cc
@@ -60,10 +60,10 @@ void test_wav_format_matches_live_audio() {
for (float t = 0.0f; t < duration; t += update_dt) {
// Update audio engine (triggers patterns)
fixture.engine().update(music_time, update_dt);
- music_time += update_dt;
// Render audio ahead
audio_render_ahead(music_time, update_dt);
+ music_time += update_dt;
// Read from ring buffer
if (ring_buffer != nullptr) {
diff --git a/tools/timeline_editor/index.html b/tools/timeline_editor/index.html
index eca7b97..775330f 100644
--- a/tools/timeline_editor/index.html
+++ b/tools/timeline_editor/index.html
@@ -42,14 +42,14 @@
.timeline-container { background: var(--bg-medium); border-radius: 8px; position: relative; height: calc(100vh - 280px); min-height: 500px; display: flex; flex-direction: column; }
.timeline-content { flex: 1; overflow: auto; position: relative; padding: 0 20px 20px 20px; scrollbar-width: none; -ms-overflow-style: none; }
.timeline-content::-webkit-scrollbar { display: none; }
- .timeline { position: relative; min-height: 100%; border-left: 2px solid var(--bg-light); }
+ .timeline { position: relative; min-height: 100%; }
.sticky-header { position: sticky; top: 0; background: var(--bg-medium); z-index: 100; padding: 20px 20px 10px 20px; border-bottom: 2px solid var(--bg-light); flex-shrink: 0; }
.waveform-container { position: relative; height: 80px; overflow: hidden; background: rgba(0, 0, 0, 0.3); border-radius: var(--radius); cursor: crosshair; }
#cpuLoadCanvas { position: absolute; left: 0; bottom: 0; height: 10px; display: block; z-index: 1; }
#waveformCanvas { position: absolute; left: 0; top: 0; height: 80px; display: block; z-index: 2; }
- .playback-indicator { position: absolute; top: 0; left: 0; width: 2px; background: var(--accent-red); box-shadow: 0 0 4px rgba(244, 135, 113, 0.8); pointer-events: none; z-index: 90; display: block; }
+ .playback-indicator { position: absolute; top: 0; bottom: 0; left: 20px; width: 2px; background: var(--accent-red); box-shadow: 0 0 4px rgba(244, 135, 113, 0.8); pointer-events: none; z-index: 110; display: none; }
.time-markers { position: relative; height: 30px; margin-top: var(--gap); border-bottom: 1px solid var(--bg-light); }
.time-marker { position: absolute; top: 0; font-size: 12px; color: var(--text-muted); }
@@ -125,7 +125,7 @@
<label>Zoom: <input type="range" id="zoomSlider" min="10" max="200" value="100" step="10"></label>
<span id="zoomLevel">100%</span>
<label style="margin-left: 20px">BPM: <input type="range" id="bpmSlider" min="60" max="200" value="120" step="1"></label>
- <span id="currentBPM">120</span>
+ <input type="number" id="currentBPM" value="120" min="60" max="200" step="1" style="width: 60px; padding: 4px; background: var(--bg-light); border: 1px solid var(--border-color); border-radius: var(--radius); color: var(--text-primary); text-align: center;">
<label class="checkbox-label" style="margin-left: 20px">
<input type="checkbox" id="showBeatsCheckbox" checked>Show Beats
</label>
@@ -143,22 +143,22 @@
<div id="playbackControls" style="display: none; margin-left: 20px; gap: 10px; align-items: center;">
<span id="playbackTime">0.00s (0.00b)</span>
<button id="playPauseBtn">▶ Play</button>
+ <button id="replayBtn" disabled>↻ Replay</button>
</div>
</div>
<div id="messageArea"></div>
<div class="timeline-container">
+ <div class="playback-indicator" id="playbackIndicator"></div>
<div class="sticky-header">
<div class="waveform-container" id="waveformContainer">
<canvas id="cpuLoadCanvas"></canvas>
<canvas id="waveformCanvas"></canvas>
- <div class="playback-indicator" id="waveformPlaybackIndicator"></div>
</div>
<div class="time-markers" id="timeMarkers"></div>
</div>
<div class="timeline-content" id="timelineContent">
- <div class="playback-indicator" id="playbackIndicator"></div>
<div class="timeline" id="timeline"></div>
</div>
</div>
@@ -182,13 +182,28 @@
'SolarizeEffect', 'VignetteEffect', 'ChromaAberrationEffect', 'DistortEffect',
'ThemeModulationEffect', 'CNNEffect', 'CNNv2Effect']);
+ const TIMELINE_LEFT_PADDING = 20;
+ const SCROLL_VIEWPORT_FRACTION = 0.4;
+ const SMOOTH_SCROLL_SPEED = 0.1;
+ const VERTICAL_SCROLL_SPEED = 0.3;
+ const SEQUENCE_GAP = 10;
+ const SEQUENCE_DEFAULT_WIDTH = 10;
+ const SEQUENCE_DEFAULT_DURATION = 16;
+ const SEQUENCE_MIN_HEIGHT = 70;
+ const SEQUENCE_COLLAPSED_HEIGHT = 35;
+ const SEQUENCE_TOP_PADDING = 20;
+ const SEQUENCE_BOTTOM_PADDING = 5;
+ const EFFECT_SPACING = 30;
+ const EFFECT_HEIGHT = 26;
+ const WAVEFORM_AMPLITUDE_SCALE = 0.4;
+
// State
const state = {
sequences: [], currentFile: null, selectedItem: null, pixelsPerSecond: 100,
showBeats: true, quantizeUnit: 1, bpm: 120, isDragging: false, dragOffset: { x: 0, y: 0 },
lastActiveSeqIndex: -1, isDraggingHandle: false, handleType: null, handleDragOffset: 0,
audioBuffer: null, audioDuration: 0, audioSource: null, audioContext: null,
- isPlaying: false, playbackStartTime: 0, playbackOffset: 0, animationFrameId: null,
+ isPlaying: false, playbackStartTime: 0, playbackOffset: 0, playStartPosition: 0, animationFrameId: null,
lastExpandedSeqIndex: -1, dragMoved: false
};
@@ -215,9 +230,9 @@
stats: document.getElementById('stats'),
playbackControls: document.getElementById('playbackControls'),
playPauseBtn: document.getElementById('playPauseBtn'),
+ replayBtn: document.getElementById('replayBtn'),
playbackTime: document.getElementById('playbackTime'),
playbackIndicator: document.getElementById('playbackIndicator'),
- waveformPlaybackIndicator: document.getElementById('waveformPlaybackIndicator'),
panelToggle: document.getElementById('panelToggle'),
panelCollapseBtn: document.getElementById('panelCollapseBtn'),
bpmSlider: document.getElementById('bpmSlider'),
@@ -328,7 +343,9 @@
state.audioDuration = state.audioBuffer.duration;
renderWaveform();
dom.playbackControls.style.display = 'flex';
+ dom.playbackIndicator.style.display = 'block';
dom.clearAudioBtn.disabled = false;
+ dom.replayBtn.disabled = false;
showMessage(`Audio loaded: ${state.audioDuration.toFixed(2)}s`, 'success');
renderTimeline();
} catch (err) {
@@ -339,18 +356,28 @@
function renderWaveform() {
if (!state.audioBuffer) return;
const canvas = dom.waveformCanvas, ctx = canvas.getContext('2d');
- const w = timeToBeats(state.audioDuration) * state.pixelsPerSecond, h = 80;
+
+ // Calculate maxTime same as timeline to ensure alignment
+ let maxTime = 60;
+ for (const seq of state.sequences) {
+ maxTime = Math.max(maxTime, seq.startTime + SEQUENCE_DEFAULT_DURATION);
+ for (const effect of seq.effects) maxTime = Math.max(maxTime, seq.startTime + effect.endTime);
+ }
+ if (state.audioDuration > 0) maxTime = Math.max(maxTime, state.audioDuration * state.bpm / 60.0);
+
+ const w = maxTime * state.pixelsPerSecond, h = 80;
canvas.width = w; canvas.height = h;
canvas.style.width = `${w}px`; canvas.style.height = `${h}px`;
- dom.waveformPlaybackIndicator.style.height = `${h}px`;
ctx.fillStyle = 'rgba(0, 0, 0, 0.3)'; ctx.fillRect(0, 0, w, h);
const channelData = state.audioBuffer.getChannelData(0);
- const samplesPerPixel = Math.ceil(channelData.length / w);
- const centerY = h / 2, amplitudeScale = h * 0.4;
+ const audioBeats = timeToBeats(state.audioDuration);
+ const audioPixelWidth = audioBeats * state.pixelsPerSecond;
+ const samplesPerPixel = Math.ceil(channelData.length / audioPixelWidth);
+ const centerY = h / 2, amplitudeScale = h * WAVEFORM_AMPLITUDE_SCALE;
ctx.strokeStyle = '#4ec9b0'; ctx.lineWidth = 1; ctx.beginPath();
- for (let x = 0; x < w; x++) {
+ for (let x = 0; x < audioPixelWidth; x++) {
const start = Math.floor(x * samplesPerPixel);
const end = Math.min(start + samplesPerPixel, channelData.length);
let min = 1.0, max = -1.0;
@@ -364,7 +391,18 @@
}
ctx.stroke();
ctx.strokeStyle = 'rgba(255, 255, 255, 0.1)';
- ctx.beginPath(); ctx.moveTo(0, centerY); ctx.lineTo(w, centerY); ctx.stroke();
+ ctx.beginPath(); ctx.moveTo(0, centerY); ctx.lineTo(audioPixelWidth, centerY); ctx.stroke();
+
+ // Draw beat markers across full maxTime width
+ ctx.strokeStyle = 'rgba(255, 255, 255, 0.15)';
+ ctx.lineWidth = 1;
+ for (let beat = 0; beat <= maxTime; beat++) {
+ const x = beat * state.pixelsPerSecond;
+ ctx.beginPath();
+ ctx.moveTo(x, 0);
+ ctx.lineTo(x, h);
+ ctx.stroke();
+ }
}
function computeCPULoad() {
@@ -450,12 +488,16 @@
}
function clearAudio() {
- stopPlayback(); state.audioBuffer = null; state.audioDuration = 0;
+ stopPlayback(); state.audioBuffer = null; state.audioDuration = 0; state.playbackOffset = 0;
+ state.playStartPosition = 0;
dom.playbackControls.style.display = 'none';
+ dom.playbackIndicator.style.display = 'none';
dom.clearAudioBtn.disabled = true;
+ dom.replayBtn.disabled = true;
const ctx = dom.waveformCanvas.getContext('2d');
ctx.clearRect(0, 0, dom.waveformCanvas.width, dom.waveformCanvas.height);
- renderTimeline(); showMessage('Audio cleared', 'success');
+ renderTimeline();
+ showMessage('Audio cleared', 'success');
}
async function startPlayback() {
@@ -493,10 +535,7 @@
const currentTime = state.playbackOffset + elapsed;
const currentBeats = timeToBeats(currentTime);
dom.playbackTime.textContent = `${currentTime.toFixed(2)}s (${currentBeats.toFixed(2)}b)`;
- const indicatorX = currentBeats * state.pixelsPerSecond;
- dom.playbackIndicator.style.left = dom.waveformPlaybackIndicator.style.left = `${indicatorX}px`;
- const scrollDiff = indicatorX - dom.timelineContent.clientWidth * 0.4 - dom.timelineContent.scrollLeft;
- if (Math.abs(scrollDiff) > 5) dom.timelineContent.scrollLeft += scrollDiff * 0.1;
+ updateIndicatorPosition(currentBeats, true);
expandSequenceAtTime(currentBeats);
state.animationFrameId = requestAnimationFrame(updatePlaybackPosition);
}
@@ -520,13 +559,24 @@
}
}
+ function updateIndicatorPosition(beats, smoothScroll = false) {
+ const timelineX = beats * state.pixelsPerSecond;
+ const scrollLeft = dom.timelineContent.scrollLeft;
+ dom.playbackIndicator.style.left = `${timelineX - scrollLeft + TIMELINE_LEFT_PADDING}px`;
+ if (smoothScroll) {
+ const targetScroll = timelineX - dom.timelineContent.clientWidth * SCROLL_VIEWPORT_FRACTION;
+ const scrollDiff = targetScroll - scrollLeft;
+ if (Math.abs(scrollDiff) > 5) dom.timelineContent.scrollLeft += scrollDiff * SMOOTH_SCROLL_SPEED;
+ }
+ }
+
// Render
function renderTimeline() {
renderCPULoad();
dom.timeline.innerHTML = ''; document.getElementById('timeMarkers').innerHTML = '';
let maxTime = 60;
for (const seq of state.sequences) {
- maxTime = Math.max(maxTime, seq.startTime + 16);
+ maxTime = Math.max(maxTime, seq.startTime + SEQUENCE_DEFAULT_DURATION);
for (const effect of seq.effects) maxTime = Math.max(maxTime, seq.startTime + effect.endTime);
}
if (state.audioDuration > 0) maxTime = Math.max(maxTime, state.audioDuration * state.bpm / 60.0);
@@ -548,24 +598,24 @@
marker.textContent = `${t}s`; timeMarkers.appendChild(marker);
}
}
- let cumulativeY = 0, sequenceGap = 10;
+ let cumulativeY = 0;
state.sequences.forEach((seq, seqIndex) => {
const seqDiv = document.createElement('div');
seqDiv.className = 'sequence'; seqDiv.dataset.index = seqIndex;
- let seqVisualStart = seq.startTime, seqVisualEnd = seq.startTime + 10;
+ let seqVisualStart = seq.startTime, seqVisualEnd = seq.startTime + SEQUENCE_DEFAULT_WIDTH;
if (seq.effects.length > 0) {
seqVisualStart = seq.startTime + Math.min(...seq.effects.map(e => e.startTime));
seqVisualEnd = seq.startTime + Math.max(...seq.effects.map(e => e.endTime));
}
if (seq._collapsed === undefined) seq._collapsed = false;
- const numEffects = seq.effects.length, effectSpacing = 30;
- const fullHeight = Math.max(70, 20 + numEffects * effectSpacing + 5);
- const seqHeight = seq._collapsed ? 35 : fullHeight;
+ const numEffects = seq.effects.length;
+ const fullHeight = Math.max(SEQUENCE_MIN_HEIGHT, SEQUENCE_TOP_PADDING + numEffects * EFFECT_SPACING + SEQUENCE_BOTTOM_PADDING);
+ const seqHeight = seq._collapsed ? SEQUENCE_COLLAPSED_HEIGHT : fullHeight;
seqDiv.style.left = `${seqVisualStart * state.pixelsPerSecond}px`;
seqDiv.style.top = `${cumulativeY}px`;
seqDiv.style.width = `${(seqVisualEnd - seqVisualStart) * state.pixelsPerSecond}px`;
seqDiv.style.height = `${seqHeight}px`; seqDiv.style.minHeight = `${seqHeight}px`; seqDiv.style.maxHeight = `${seqHeight}px`;
- seq._yPosition = cumulativeY; cumulativeY += seqHeight + sequenceGap; totalTimelineHeight = cumulativeY;
+ seq._yPosition = cumulativeY; cumulativeY += seqHeight + SEQUENCE_GAP; totalTimelineHeight = cumulativeY;
const seqHeaderDiv = document.createElement('div'); seqHeaderDiv.className = 'sequence-header';
const headerName = document.createElement('span'); headerName.className = 'sequence-header-name';
headerName.textContent = seq.name || `Sequence ${seqIndex + 1}`;
@@ -591,9 +641,9 @@
Object.assign(effectDiv.dataset, { seqIndex, effectIndex });
Object.assign(effectDiv.style, {
left: `${(seq.startTime + effect.startTime) * state.pixelsPerSecond}px`,
- top: `${seq._yPosition + 20 + effectIndex * 30}px`,
+ top: `${seq._yPosition + SEQUENCE_TOP_PADDING + effectIndex * EFFECT_SPACING}px`,
width: `${(effect.endTime - effect.startTime) * state.pixelsPerSecond}px`,
- height: '26px'
+ height: `${EFFECT_HEIGHT}px`
});
effectDiv.innerHTML = `<div class="effect-handle left"></div><small>${effect.className}</small><div class="effect-handle right"></div>`;
const conflictWarning = conflicts.has(effectIndex) ?
@@ -616,7 +666,6 @@
}
});
dom.timeline.style.minHeight = `${Math.max(totalTimelineHeight, dom.timelineContent.offsetHeight)}px`;
- if (dom.playbackIndicator) dom.playbackIndicator.style.height = `${Math.max(totalTimelineHeight, dom.timelineContent.offsetHeight)}px`;
updateStats();
}
@@ -800,9 +849,11 @@
if (!response.ok) throw new Error(`HTTP ${response.status}`);
const content = await response.text(), parsed = parseSeqFile(content);
state.sequences = parsed.sequences; state.bpm = parsed.bpm;
- dom.currentBPM.textContent = state.bpm; dom.bpmSlider.value = state.bpm;
+ dom.currentBPM.value = state.bpm; dom.bpmSlider.value = state.bpm;
state.currentFile = seqURL.split('/').pop();
+ state.playbackOffset = 0;
renderTimeline(); dom.saveBtn.disabled = false; dom.addSequenceBtn.disabled = false; dom.reorderBtn.disabled = false;
+ updateIndicatorPosition(0, false);
showMessage(`Loaded ${state.currentFile} from URL`, 'success');
} catch (err) { showMessage(`Error loading seq file: ${err.message}`, 'error'); }
}
@@ -826,8 +877,10 @@
try {
const parsed = parseSeqFile(e.target.result);
state.sequences = parsed.sequences; state.bpm = parsed.bpm;
- dom.currentBPM.textContent = state.bpm; dom.bpmSlider.value = state.bpm;
+ dom.currentBPM.value = state.bpm; dom.bpmSlider.value = state.bpm;
+ state.playbackOffset = 0;
renderTimeline(); dom.saveBtn.disabled = false; dom.addSequenceBtn.disabled = false; dom.reorderBtn.disabled = false;
+ updateIndicatorPosition(0, false);
showMessage(`Loaded ${state.currentFile} - ${state.sequences.length} sequences`, 'success');
} catch (err) { showMessage(`Error parsing file: ${err.message}`, 'error'); }
};
@@ -845,7 +898,20 @@
dom.clearAudioBtn.addEventListener('click', () => { clearAudio(); dom.audioInput.value = ''; });
dom.playPauseBtn.addEventListener('click', async () => {
if (state.isPlaying) stopPlayback();
- else { if (state.playbackOffset >= state.audioDuration) state.playbackOffset = 0; await startPlayback(); }
+ else {
+ if (state.playbackOffset >= state.audioDuration) state.playbackOffset = 0;
+ state.playStartPosition = state.playbackOffset;
+ await startPlayback();
+ }
+ });
+
+ dom.replayBtn.addEventListener('click', async () => {
+ stopPlayback(false);
+ state.playbackOffset = state.playStartPosition;
+ const replayBeats = timeToBeats(state.playbackOffset);
+ dom.playbackTime.textContent = `${state.playbackOffset.toFixed(2)}s (${replayBeats.toFixed(2)}b)`;
+ updateIndicatorPosition(replayBeats, false);
+ await startPlayback();
});
dom.waveformContainer.addEventListener('click', async e => {
@@ -860,8 +926,7 @@
state.playbackOffset = Math.max(0, Math.min(clickTime, state.audioDuration));
const pausedBeats = timeToBeats(state.playbackOffset);
dom.playbackTime.textContent = `${state.playbackOffset.toFixed(2)}s (${pausedBeats.toFixed(2)}b)`;
- const indicatorX = pausedBeats * state.pixelsPerSecond;
- dom.playbackIndicator.style.left = dom.waveformPlaybackIndicator.style.left = `${indicatorX}px`;
+ updateIndicatorPosition(pausedBeats, false);
if (wasPlaying) await startPlayback();
});
@@ -899,13 +964,32 @@
});
dom.zoomSlider.addEventListener('input', e => {
- state.pixelsPerSecond = parseInt(e.target.value); dom.zoomLevel.textContent = `${state.pixelsPerSecond}%`;
- if (state.audioBuffer) renderWaveform(); renderTimeline();
+ state.pixelsPerSecond = parseInt(e.target.value);
+ dom.zoomLevel.textContent = `${state.pixelsPerSecond}%`;
+ if (state.audioBuffer) renderWaveform();
+ renderTimeline();
+ updateIndicatorPosition(timeToBeats(state.playbackOffset), false);
});
dom.bpmSlider.addEventListener('input', e => {
- state.bpm = parseInt(e.target.value); dom.currentBPM.textContent = state.bpm;
- if (state.audioBuffer) renderWaveform(); renderTimeline();
+ state.bpm = parseInt(e.target.value);
+ dom.currentBPM.value = state.bpm;
+ if (state.audioBuffer) renderWaveform();
+ renderTimeline();
+ updateIndicatorPosition(timeToBeats(state.playbackOffset), false);
+ });
+
+ dom.currentBPM.addEventListener('change', e => {
+ const bpm = parseInt(e.target.value);
+ if (!isNaN(bpm) && bpm >= 60 && bpm <= 200) {
+ state.bpm = bpm;
+ dom.bpmSlider.value = bpm;
+ if (state.audioBuffer) renderWaveform();
+ renderTimeline();
+ updateIndicatorPosition(timeToBeats(state.playbackOffset), false);
+ } else {
+ e.target.value = state.bpm;
+ }
});
dom.showBeatsCheckbox.addEventListener('change', e => { state.showBeats = e.target.checked; renderTimeline(); });
@@ -917,7 +1001,7 @@
dom.timeline.addEventListener('dblclick', async e => {
if (e.target !== dom.timeline) return;
const containerRect = dom.timelineContent.getBoundingClientRect();
- const clickX = e.clientX - containerRect.left + dom.timelineContent.scrollLeft;
+ const clickX = e.clientX - containerRect.left + dom.timelineContent.scrollLeft - TIMELINE_LEFT_PADDING;
const clickBeats = clickX / state.pixelsPerSecond;
const clickTime = beatsToTime(clickBeats);
if (state.audioBuffer) {
@@ -926,21 +1010,23 @@
state.playbackOffset = Math.max(0, Math.min(clickTime, state.audioDuration));
const pausedBeats = timeToBeats(state.playbackOffset);
dom.playbackTime.textContent = `${state.playbackOffset.toFixed(2)}s (${pausedBeats.toFixed(2)}b)`;
- const indicatorX = pausedBeats * state.pixelsPerSecond;
- dom.playbackIndicator.style.left = dom.waveformPlaybackIndicator.style.left = `${indicatorX}px`;
+ updateIndicatorPosition(pausedBeats, false);
if (wasPlaying) await startPlayback();
showMessage(`Seek to ${clickTime.toFixed(2)}s (${clickBeats.toFixed(2)}b)`, 'success');
}
});
document.addEventListener('keydown', e => {
- if (e.code === 'Space' && state.audioBuffer) { e.preventDefault(); dom.playPauseBtn.click(); }
+ const isTyping = document.activeElement.tagName === 'INPUT' || document.activeElement.tagName === 'TEXTAREA';
+ if (e.code === 'Space' && state.audioBuffer && !isTyping) { e.preventDefault(); dom.playPauseBtn.click(); }
// Quantize hotkeys: 0=Off, 1=1beat, 2=1/2, 3=1/4, 4=1/8, 5=1/16, 6=1/32
- const quantizeMap = { '0': '0', '1': '1', '2': '2', '3': '4', '4': '8', '5': '16', '6': '32' };
- if (quantizeMap[e.key]) {
- state.quantizeUnit = parseFloat(quantizeMap[e.key]);
- dom.quantizeSelect.value = quantizeMap[e.key];
- e.preventDefault();
+ if (!isTyping) {
+ const quantizeMap = { '0': '0', '1': '1', '2': '2', '3': '4', '4': '8', '5': '16', '6': '32' };
+ if (quantizeMap[e.key]) {
+ state.quantizeUnit = parseFloat(quantizeMap[e.key]);
+ dom.quantizeSelect.value = quantizeMap[e.key];
+ e.preventDefault();
+ }
}
});
@@ -948,20 +1034,25 @@
const scrollLeft = dom.timelineContent.scrollLeft;
dom.cpuLoadCanvas.style.left = `-${scrollLeft}px`;
dom.waveformCanvas.style.left = `-${scrollLeft}px`;
- dom.waveformPlaybackIndicator.style.transform = `translateX(-${scrollLeft}px)`;
+ document.getElementById('timeMarkers').style.transform = `translateX(-${scrollLeft}px)`;
+ updateIndicatorPosition(timeToBeats(state.playbackOffset), false);
});
- dom.timelineContent.addEventListener('wheel', e => {
+ const handleWheel = e => {
e.preventDefault();
if (e.ctrlKey || e.metaKey) {
const rect = dom.timelineContent.getBoundingClientRect(), mouseX = e.clientX - rect.left;
const scrollLeft = dom.timelineContent.scrollLeft, timeUnderCursor = (scrollLeft + mouseX) / state.pixelsPerSecond;
- const zoomDelta = e.deltaY > 0 ? -10 : 10, oldPixelsPerSecond = state.pixelsPerSecond;
+ const zoomDelta = e.deltaY > 0 ? -10 : 10;
const newPixelsPerSecond = Math.max(10, Math.min(500, state.pixelsPerSecond + zoomDelta));
- if (newPixelsPerSecond !== oldPixelsPerSecond) {
- state.pixelsPerSecond = newPixelsPerSecond; dom.zoomSlider.value = state.pixelsPerSecond; dom.zoomLevel.textContent = `${state.pixelsPerSecond}%`;
- if (state.audioBuffer) renderWaveform(); renderTimeline();
+ if (newPixelsPerSecond !== state.pixelsPerSecond) {
+ state.pixelsPerSecond = newPixelsPerSecond;
+ dom.zoomSlider.value = state.pixelsPerSecond;
+ dom.zoomLevel.textContent = `${state.pixelsPerSecond}%`;
+ if (state.audioBuffer) renderWaveform();
+ renderTimeline();
dom.timelineContent.scrollLeft = timeUnderCursor * newPixelsPerSecond - mouseX;
+ updateIndicatorPosition(timeToBeats(state.playbackOffset), false);
}
return;
}
@@ -982,8 +1073,17 @@
}
const targetScrollTop = state.sequences[targetSeqIndex]?._yPosition || 0;
const currentScrollTop = dom.timelineContent.scrollTop, scrollDiff = targetScrollTop - currentScrollTop;
- if (Math.abs(scrollDiff) > 5) dom.timelineContent.scrollTop += scrollDiff * 0.3;
- }, { passive: false });
+ if (Math.abs(scrollDiff) > 5) dom.timelineContent.scrollTop += scrollDiff * VERTICAL_SCROLL_SPEED;
+ };
+
+ dom.timelineContent.addEventListener('wheel', handleWheel, { passive: false });
+ dom.waveformContainer.addEventListener('wheel', handleWheel, { passive: false });
+
+ // Prevent wheel events from bubbling up from UI containers
+ document.querySelector('header').addEventListener('wheel', e => e.stopPropagation());
+ dom.propertiesPanel.addEventListener('wheel', e => e.stopPropagation());
+ document.querySelector('.zoom-controls').addEventListener('wheel', e => e.stopPropagation());
+ document.querySelector('.stats').addEventListener('wheel', e => e.stopPropagation());
window.addEventListener('resize', renderTimeline);
renderTimeline(); loadFromURLParams();
diff --git a/workspaces/main/beat_test.track b/workspaces/main/beat_test.track
new file mode 100644
index 0000000..1b7f9b1
--- /dev/null
+++ b/workspaces/main/beat_test.track
@@ -0,0 +1,44 @@
+# Pop-Punk High-Energy Drum Track
+# Converted from track.md drum sequence
+# 4/4 time signature, 16th note resolution
+
+BPM 90
+
+# Drum samples (General MIDI mapping)
+SAMPLE ASSET_KICK_1
+SAMPLE ASSET_SNARE_1
+
+# Pattern A: Main Driving Groove (bars 1-3)
+# 1 unit = 4 beats, 16th notes = 0.0625 units apart
+PATTERN main_groove LENGTH 1.0
+ # Snare: beats 2 and 4 (strong)
+ 0.2500, ASSET_SNARE_1, 1.0, 0.0
+ 0.7500, ASSET_SNARE_1, 1.0, 0.0
+ # Kick: syncopated pattern (galloping)
+ 0.0000, ASSET_KICK_1, 1.0, 0.0
+ 0.5000, ASSET_KICK_1, 1.0, 0.0
+ # Crash on beat 1
+ 0.0000, ASSET_CRASH_1, 0.9, 0.0
+
+
+# Score
+SCORE
+ 0.0, main_groove_crash
+ 1.0, main_groove_crash
+ 2.0, main_groove_crash
+ 3.0, main_groove_crash
+
+ 4.0, main_groove_crash
+ 5.0, main_groove_crash
+ 6.0, main_groove_crash
+ 7.0, main_groove_crash
+
+ 8.0, main_groove_crash
+ 9.0, main_groove_crash
+ 10.0, main_groove_crash
+ 11.0, main_groove_crash
+
+ 12.0, main_groove_crash
+ 13.0, main_groove_crash
+ 14.0, main_groove_crash
+ 15.0, main_groove_crash
diff --git a/workspaces/main/pop_punk_drums.track b/workspaces/main/pop_punk_drums.track
index f54bf9d..236b79f 100644
--- a/workspaces/main/pop_punk_drums.track
+++ b/workspaces/main/pop_punk_drums.track
@@ -1,6 +1,6 @@
# Pop-Punk High-Energy Drum Track
# Converted from track.md drum sequence
-# 165 BPM, 4/4 time signature, 16th note resolution
+# 4/4 time signature, 16th note resolution
BPM 90
diff --git a/workspaces/main/shaders/scene1.wgsl b/workspaces/main/shaders/scene1.wgsl
index 2723b66..8d5d5db 100644
--- a/workspaces/main/shaders/scene1.wgsl
+++ b/workspaces/main/shaders/scene1.wgsl
@@ -50,19 +50,22 @@ fn render0(ro: vec3<f32>, rd: vec3<f32>) -> vec3<f32> {
return clamp(col, vec3<f32>(0.0), vec3<f32>(10.0));
}
+const OBJ_BACKGROUND: f32 = 0.0;
+const OBJ_CUBE: f32 = 1.0;
+const OBJ_SPHERE: f32 = 2.0;
+const OBJ_PLANE: f32 = 3.0;
+
fn df(p_in: vec3<f32>) -> f32 {
var p = p_in;
p.x = p_in.x * g_rot0[0][0] + p_in.z * g_rot0[0][1];
p.z = p_in.x * g_rot0[1][0] + p_in.z * g_rot0[1][1];
// Cube
- var pc = p;
- pc -= vec3<f32>(-1.9, 0.0, 0.0);
+ var pc = p - vec3<f32>(-1.9, 0.0, 0.0);
let dCube = sdBox(pc, vec3<f32>(1.6));
// Sphere
- var ps = p;
- ps -= vec3<f32>(1.3, 0.0, 0.0);
+ var ps = p - vec3<f32>(1.3, 0.0, 0.0);
let dSphere = sdSphere(ps, 1.2);
// Ground plane
@@ -75,6 +78,41 @@ fn df(p_in: vec3<f32>) -> f32 {
return d;
}
+fn dfWithID(p_in: vec3<f32>) -> RayMarchResult {
+ var p = p_in;
+ p.x = p_in.x * g_rot0[0][0] + p_in.z * g_rot0[0][1];
+ p.z = p_in.x * g_rot0[1][0] + p_in.z * g_rot0[1][1];
+
+ // Cube
+ var pc = p - vec3<f32>(-1.9, 0.0, 0.0);
+ let dCube = sdBox(pc, vec3<f32>(1.6));
+
+ // Sphere
+ var ps = p - vec3<f32>(1.3, 0.0, 0.0);
+ let dSphere = sdSphere(ps, 1.2);
+
+ // Ground plane
+ let dPlane = p.y + 1.0;
+
+ // Find closest object
+ var result: RayMarchResult;
+ result.distance = dCube;
+ result.object_id = OBJ_CUBE;
+
+ if (dSphere < result.distance) {
+ result.distance = dSphere;
+ result.object_id = OBJ_SPHERE;
+ }
+
+ if (dPlane < result.distance) {
+ result.distance = dPlane;
+ result.object_id = OBJ_PLANE;
+ }
+
+ result.distance_max = result.distance;
+ return result;
+}
+
fn boxCol(col: vec3<f32>, nsp: vec3<f32>, rd: vec3<f32>, nnor: vec3<f32>, nrcol: vec3<f32>, nshd1: f32, nshd2: f32) -> vec3<f32> {
var nfre = 1.0 + dot(rd, nnor);
nfre *= nfre;
@@ -102,24 +140,35 @@ fn render1(ro: vec3<f32>, rd: vec3<f32>) -> vec3<f32> {
let skyCol_local = render0(ro, rd);
var col = skyCol_local;
- let nt = rayMarch(ro, rd, 0.0);
- if (nt < MAX_RAY_LENGTH) {
- let nsp = ro + rd * nt;
- let nnor = normal(nsp);
+ var init: RayMarchResult;
+ init.distance = 0.0;
+ init.distance_max = 0.0;
+ init.object_id = OBJ_BACKGROUND;
+
+ let result = rayMarchWithID(ro, rd, init);
+ if (result.distance < MAX_RAY_LENGTH) {
+ let nsp = reconstructPosition(ro, rd, result);
+ let nnor = normalWithID(nsp);
let nref = reflect(rd, nnor);
- let nrt = rayMarch(nsp, nref, 0.2);
+ var refl_init: RayMarchResult;
+ refl_init.distance = 0.2;
+ refl_init.distance_max = 0.2;
+ refl_init.object_id = OBJ_BACKGROUND;
+ let nrt_result = rayMarchWithID(nsp, nref, refl_init);
var nrcol = render0(nsp, nref);
- if (nrt < MAX_RAY_LENGTH) {
- let nrsp = nsp + nref * nrt;
- let nrnor = normal(nrsp);
+ if (nrt_result.distance < MAX_RAY_LENGTH) {
+ let nrsp = reconstructPosition(nsp, nref, nrt_result);
+ let nrnor = normalWithID(nrsp);
let nrref = reflect(nref, nrnor);
nrcol = boxCol(nrcol, nrsp, nref, nrnor, render0(nrsp, nrref), 1.0, 1.0);
}
- let nshd1 = mix(0.0, 1.0, shadow(nsp, normalize(lightPos1 - nsp), 0.1, distance(lightPos1, nsp)));
- let nshd2 = mix(0.0, 1.0, shadow(nsp, normalize(lightPos2 - nsp), 0.1, distance(lightPos2, nsp)));
+ let light_dist1 = distance(lightPos1, nsp);
+ let light_dist2 = distance(lightPos2, nsp);
+ let nshd1 = mix(0.0, 1.0, shadowWithStoredDistance(nsp, normalize(lightPos1 - nsp), light_dist1));
+ let nshd2 = mix(0.0, 1.0, shadowWithStoredDistance(nsp, normalize(lightPos2 - nsp), light_dist2));
col = boxCol(col, nsp, rd, nnor, nrcol, nshd1, nshd2);
}
@@ -146,9 +195,7 @@ fn effect(p: vec2<f32>) -> vec3<f32> {
#include "render/fullscreen_vs"
@fragment fn fs_main(@builtin(position) p: vec4<f32>) -> @location(0) vec4<f32> {
- // Flip Y to match ShaderToy convention (origin at bottom-left)
- let flipped = vec2<f32>(p.x, uniforms.resolution.y - p.y);
- let q = flipped / uniforms.resolution;
+ let q = p.xy / uniforms.resolution;
var coord = -1.0 + 2.0 * q;
coord.x *= uniforms.resolution.x / uniforms.resolution.y;
var col = effect(coord);
diff --git a/workspaces/main/shaders/sdf_test.wgsl b/workspaces/main/shaders/sdf_test.wgsl
index 3c97613..71310f2 100644
--- a/workspaces/main/shaders/sdf_test.wgsl
+++ b/workspaces/main/shaders/sdf_test.wgsl
@@ -22,6 +22,15 @@ fn df(p: vec3<f32>) -> f32 {
return min(d_sphere, d_box);
}
+// Two-pass distance function (required by raymarching.wgsl)
+fn dfWithID(p: vec3<f32>) -> RayMarchResult {
+ var result: RayMarchResult;
+ result.distance = df(p);
+ result.distance_max = result.distance;
+ result.object_id = 0.0;
+ return result;
+}
+
// Simple lighting
fn shade(pos: vec3<f32>, rd: vec3<f32>) -> vec3<f32> {
let n = normal(pos);
diff --git a/workspaces/main/workspace.cfg b/workspaces/main/workspace.cfg
index 1c2f4c0..5eff423 100644
--- a/workspaces/main/workspace.cfg
+++ b/workspaces/main/workspace.cfg
@@ -6,7 +6,8 @@ version = "1.0"
[build]
target = "demo64k"
timeline = "timeline.seq"
-music = "pop_punk_drums.track"
+# music = "pop_punk_drums.track"
+music = "beat_test.track"
assets = "assets.txt"
asset_dirs = ["music/", "weights/", "obj/"]
shader_dirs = ["shaders/"]