summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorskal <pascal.massimino@gmail.com>2026-02-05 22:56:04 +0100
committerskal <pascal.massimino@gmail.com>2026-02-05 22:56:04 +0100
commitb4d7e651caf7723a659e299ac76111cbc6c0462c (patch)
treef9683964c0957c0de187a92f6c986dbb688a4149
parentdff825ecd7af9051b7f8bad420dace8165d2fdde (diff)
feat(timeline-editor): Add WAV file waveform visualization
Added audio waveform feature for visual reference when placing sequences: Features: - Load Audio (WAV) button to upload audio files - Waveform canvas displayed above timeline with 80px height - Waveform scales with zoom (pixels per second) - Auto-extends timeline to fit audio duration - Clear Audio button to remove waveform - Waveform uses Web Audio API for decoding - Min/max amplitude visualization for better clarity - Semi-transparent background with center line UI: - Waveform positioned above time markers - Crosshair cursor for precision - Cyan (#4ec9b0) waveform color matching editor theme - Scrolls horizontally with timeline No audio playback - visualization only for sequence placement assistance.
-rw-r--r--tools/timeline_editor/index.html140
1 files changed, 140 insertions, 0 deletions
diff --git a/tools/timeline_editor/index.html b/tools/timeline_editor/index.html
index e85a31c..f85f914 100644
--- a/tools/timeline_editor/index.html
+++ b/tools/timeline_editor/index.html
@@ -121,6 +121,16 @@
border-left: 2px solid #3c3c3c;
}
+ #waveformCanvas {
+ position: relative;
+ height: 80px;
+ width: 100%;
+ margin-bottom: 10px;
+ background: rgba(0, 0, 0, 0.3);
+ border-radius: 4px;
+ cursor: crosshair;
+ }
+
.time-markers {
position: relative;
height: 30px;
@@ -407,6 +417,11 @@
<input type="file" id="fileInput" accept=".seq">
</label>
<button id="saveBtn" disabled>💾 Save demo.seq</button>
+ <label class="file-label">
+ 🎵 Load Audio (WAV)
+ <input type="file" id="audioInput" accept=".wav">
+ </label>
+ <button id="clearAudioBtn" disabled>✖ Clear Audio</button>
<button id="addSequenceBtn" disabled>➕ Add Sequence</button>
<button id="deleteBtn" disabled>🗑️ Delete Selected</button>
<button id="reorderBtn" disabled>🔄 Re-order by Time</button>
@@ -425,6 +440,7 @@
<div id="messageArea"></div>
<div class="timeline-container">
+ <canvas id="waveformCanvas" style="display: none;"></canvas>
<div class="time-markers" id="timeMarkers"></div>
<div class="timeline" id="timeline"></div>
</div>
@@ -455,12 +471,17 @@
let lastActiveSeqIndex = -1;
let isDraggingHandle = false;
let handleType = null; // 'left' or 'right'
+ let audioBuffer = null; // Decoded audio data
+ let audioDuration = 0; // Duration in seconds
// DOM elements
const timeline = document.getElementById('timeline');
const timelineContainer = document.querySelector('.timeline-container');
const fileInput = document.getElementById('fileInput');
const saveBtn = document.getElementById('saveBtn');
+ const audioInput = document.getElementById('audioInput');
+ const clearAudioBtn = document.getElementById('clearAudioBtn');
+ const waveformCanvas = document.getElementById('waveformCanvas');
const addSequenceBtn = document.getElementById('addSequenceBtn');
const deleteBtn = document.getElementById('deleteBtn');
const reorderBtn = document.getElementById('reorderBtn');
@@ -590,6 +611,106 @@
return output;
}
+ // Audio waveform visualization
+ async function loadAudioFile(file) {
+ try {
+ const arrayBuffer = await file.arrayBuffer();
+ const audioContext = new (window.AudioContext || window.webkitAudioContext)();
+ audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
+ audioDuration = audioBuffer.duration;
+
+ renderWaveform();
+ waveformCanvas.style.display = 'block';
+ clearAudioBtn.disabled = false;
+ showMessage(`Audio loaded: ${audioDuration.toFixed(2)}s`, 'success');
+
+ // Extend timeline if audio is longer than current max time
+ renderTimeline();
+ } catch (err) {
+ showMessage(`Error loading audio: ${err.message}`, 'error');
+ }
+ }
+
+ function renderWaveform() {
+ if (!audioBuffer) return;
+
+ const canvas = waveformCanvas;
+ const ctx = canvas.getContext('2d');
+
+ // Set canvas size based on audio duration and zoom
+ const canvasWidth = audioDuration * pixelsPerSecond;
+ const canvasHeight = 80;
+
+ // Set actual canvas resolution (for sharp rendering)
+ canvas.width = canvasWidth;
+ canvas.height = canvasHeight;
+
+ // Set CSS size to match
+ canvas.style.width = `${canvasWidth}px`;
+ canvas.style.height = `${canvasHeight}px`;
+
+ // Clear canvas
+ ctx.fillStyle = 'rgba(0, 0, 0, 0.3)';
+ ctx.fillRect(0, 0, canvasWidth, canvasHeight);
+
+ // Get audio data (use first channel for mono, or mix for stereo)
+ const channelData = audioBuffer.getChannelData(0);
+ const sampleRate = audioBuffer.sampleRate;
+ const samplesPerPixel = Math.ceil(channelData.length / canvasWidth);
+
+ // Draw waveform
+ ctx.strokeStyle = '#4ec9b0';
+ ctx.lineWidth = 1;
+ ctx.beginPath();
+
+ const centerY = canvasHeight / 2;
+ const amplitudeScale = canvasHeight * 0.4; // Use 80% of height
+
+ for (let x = 0; x < canvasWidth; x++) {
+ const startSample = Math.floor(x * samplesPerPixel);
+ const endSample = Math.min(startSample + samplesPerPixel, channelData.length);
+
+ // Find min and max amplitude in this pixel range (for better visualization)
+ let min = 1.0;
+ let max = -1.0;
+ for (let i = startSample; i < endSample; i++) {
+ const sample = channelData[i];
+ if (sample < min) min = sample;
+ if (sample > max) max = sample;
+ }
+
+ // Draw vertical line from min to max
+ const yMin = centerY - min * amplitudeScale;
+ const yMax = centerY - max * amplitudeScale;
+
+ if (x === 0) {
+ ctx.moveTo(x, yMin);
+ } else {
+ ctx.lineTo(x, yMin);
+ }
+ ctx.lineTo(x, yMax);
+ }
+
+ ctx.stroke();
+
+ // Draw center line
+ ctx.strokeStyle = 'rgba(255, 255, 255, 0.1)';
+ ctx.lineWidth = 1;
+ ctx.beginPath();
+ ctx.moveTo(0, centerY);
+ ctx.lineTo(canvasWidth, centerY);
+ ctx.stroke();
+ }
+
+ function clearAudio() {
+ audioBuffer = null;
+ audioDuration = 0;
+ waveformCanvas.style.display = 'none';
+ clearAudioBtn.disabled = true;
+ renderTimeline();
+ showMessage('Audio cleared', 'success');
+ }
+
// Render timeline
function renderTimeline() {
timeline.innerHTML = '';
@@ -607,6 +728,11 @@
}
}
+ // Extend timeline to fit audio if loaded
+ if (audioDuration > 0) {
+ maxTime = Math.max(maxTime, audioDuration);
+ }
+
// Render time markers
const timelineWidth = maxTime * pixelsPerSecond;
timeline.style.width = `${timelineWidth}px`;
@@ -1048,6 +1174,17 @@
showMessage('File saved', 'success');
});
+ audioInput.addEventListener('change', (e) => {
+ const file = e.target.files[0];
+ if (!file) return;
+ loadAudioFile(file);
+ });
+
+ clearAudioBtn.addEventListener('click', () => {
+ clearAudio();
+ audioInput.value = ''; // Reset file input
+ });
+
addSequenceBtn.addEventListener('click', () => {
sequences.push({
type: 'sequence',
@@ -1105,6 +1242,9 @@
pixelsPerSecond = zoom;
zoomLevel.textContent = `${zoom}%`;
pixelsPerSecLabel.textContent = zoom;
+ if (audioBuffer) {
+ renderWaveform(); // Re-render waveform at new zoom
+ }
renderTimeline();
});