// timeline-playback.js - Audio playback and waveform rendering export class PlaybackController { constructor(state, dom, viewportController, renderCallback, showMessage) { this.state = state; this.dom = dom; this.viewport = viewportController; this.renderCallback = renderCallback; this.showMessage = showMessage; // Constants this.WAVEFORM_AMPLITUDE_SCALE = 0.4; this.SEQUENCE_DEFAULT_DURATION = 16; this.init(); } init() { this.dom.audioInput.addEventListener('change', e => { const file = e.target.files[0]; if (file) this.loadAudioFile(file); }); this.dom.clearAudioBtn.addEventListener('click', () => { this.clearAudio(); this.dom.audioInput.value = ''; }); this.dom.playPauseBtn.addEventListener('click', async () => { if (this.state.isPlaying) this.stopPlayback(); else { if (this.state.playbackOffset >= this.state.audioDurationSeconds) { this.state.playbackOffset = 0; } this.state.playStartPosition = this.state.playbackOffset; await this.startPlayback(); } }); this.dom.replayBtn.addEventListener('click', async () => { this.stopPlayback(false); this.state.playbackOffset = this.state.playStartPosition; const replayBeats = this.timeToBeats(this.state.playbackOffset); this.dom.playbackTime.textContent = `${this.state.playbackOffset.toFixed(2)}s (${replayBeats.toFixed(2)}b)`; this.viewport.updateIndicatorPosition(replayBeats, false); await this.startPlayback(); }); this.dom.waveformContainer.addEventListener('click', async e => { if (!this.state.audioBuffer) return; const rect = this.dom.waveformContainer.getBoundingClientRect(); const canvasOffset = parseFloat(this.dom.waveformCanvas.style.left) || 0; const clickX = e.clientX - rect.left - canvasOffset; const clickBeats = clickX / this.state.pixelsPerBeat; const clickTime = this.beatsToTime(clickBeats); const wasPlaying = this.state.isPlaying; if (wasPlaying) this.stopPlayback(false); this.state.playbackOffset = Math.max(0, Math.min(clickTime, this.state.audioDurationSeconds)); const pausedBeats = this.timeToBeats(this.state.playbackOffset); this.dom.playbackTime.textContent = `${this.state.playbackOffset.toFixed(2)}s (${pausedBeats.toFixed(2)}b)`; this.viewport.updateIndicatorPosition(pausedBeats, false); if (wasPlaying) await this.startPlayback(); }); } async loadAudioFile(file) { try { const arrayBuffer = await file.arrayBuffer(); // Detect original WAV sample rate before decoding const dataView = new DataView(arrayBuffer); let originalSampleRate = 32000; // Default assumption // Parse WAV header to get original sample rate // "RIFF" at 0, "WAVE" at 8, "fmt " at 12, sample rate at 24 if (dataView.getUint32(0, false) === 0x52494646 && // "RIFF" dataView.getUint32(8, false) === 0x57415645) { // "WAVE" originalSampleRate = dataView.getUint32(24, true); // Little-endian console.log(`Detected WAV sample rate: ${originalSampleRate}Hz`); } if (!this.state.audioContext) { this.state.audioContext = new (window.AudioContext || window.webkitAudioContext)(); } this.state.audioBuffer = await this.state.audioContext.decodeAudioData(arrayBuffer); this.state.audioDurationSeconds = this.state.audioBuffer.duration; this.state.originalSampleRate = originalSampleRate; this.state.resampleRatio = this.state.audioContext.sampleRate / originalSampleRate; console.log(`AudioContext rate: ${this.state.audioContext.sampleRate}Hz, resample ratio: ${this.state.resampleRatio.toFixed(3)}x`); this.renderWaveform(); this.dom.playbackControls.style.display = 'flex'; this.dom.playbackIndicator.style.display = 'block'; this.dom.clearAudioBtn.disabled = false; this.dom.replayBtn.disabled = false; this.showMessage(`Audio loaded: ${this.state.audioDurationSeconds.toFixed(2)}s @ ${originalSampleRate}Hz`, 'success'); this.renderCallback('audioLoaded'); } catch (err) { this.showMessage(`Error loading audio: ${err.message}`, 'error'); } } renderWaveform() { if (!this.state.audioBuffer) return; const canvas = this.dom.waveformCanvas; const ctx = canvas.getContext('2d'); // Calculate maxTimeBeats same as timeline let maxTimeBeats = 60; for (const seq of this.state.sequences) { maxTimeBeats = Math.max(maxTimeBeats, seq.startTime + this.SEQUENCE_DEFAULT_DURATION); for (const effect of seq.effects) { maxTimeBeats = Math.max(maxTimeBeats, seq.startTime + effect.endTime); } } if (this.state.audioDurationSeconds > 0) { maxTimeBeats = Math.max(maxTimeBeats, this.state.audioDurationSeconds * this.state.beatsPerSecond); } const w = maxTimeBeats * this.state.pixelsPerBeat; const h = 80; canvas.width = w; canvas.height = h; canvas.style.width = `${w}px`; canvas.style.height = `${h}px`; ctx.fillStyle = 'rgba(0, 0, 0, 0.3)'; ctx.fillRect(0, 0, w, h); const channelData = this.state.audioBuffer.getChannelData(0); const audioBeats = this.timeToBeats(this.state.audioDurationSeconds); const audioPixelWidth = audioBeats * this.state.pixelsPerBeat; const samplesPerPixel = Math.ceil(channelData.length / audioPixelWidth); const centerY = h / 2; const amplitudeScale = h * this.WAVEFORM_AMPLITUDE_SCALE; ctx.strokeStyle = '#4ec9b0'; ctx.lineWidth = 1; ctx.beginPath(); for (let x = 0; x < audioPixelWidth; x++) { const start = Math.floor(x * samplesPerPixel); const end = Math.min(start + samplesPerPixel, channelData.length); let min = 1.0, max = -1.0; for (let i = start; i < end; i++) { min = Math.min(min, channelData[i]); max = Math.max(max, channelData[i]); } const yMin = centerY - min * amplitudeScale; const yMax = centerY - max * amplitudeScale; if (x === 0) ctx.moveTo(x, yMin); else ctx.lineTo(x, yMin); ctx.lineTo(x, yMax); } ctx.stroke(); // Center line ctx.strokeStyle = 'rgba(255, 255, 255, 0.1)'; ctx.beginPath(); ctx.moveTo(0, centerY); ctx.lineTo(audioPixelWidth, centerY); ctx.stroke(); // Beat markers ctx.strokeStyle = 'rgba(255, 255, 255, 0.15)'; ctx.lineWidth = 1; for (let beat = 0; beat <= maxTimeBeats; beat++) { const x = beat * this.state.pixelsPerBeat; ctx.beginPath(); ctx.moveTo(x, 0); ctx.lineTo(x, h); ctx.stroke(); } } clearAudio() { this.stopPlayback(); this.state.audioBuffer = null; this.state.audioDurationSeconds = 0; this.state.playbackOffset = 0; this.state.playStartPosition = 0; this.dom.playbackControls.style.display = 'none'; this.dom.playbackIndicator.style.display = 'none'; this.dom.clearAudioBtn.disabled = true; this.dom.replayBtn.disabled = true; const ctx = this.dom.waveformCanvas.getContext('2d'); ctx.clearRect(0, 0, this.dom.waveformCanvas.width, this.dom.waveformCanvas.height); this.renderCallback('audioClear'); this.showMessage('Audio cleared', 'success'); } async startPlayback() { if (!this.state.audioBuffer || !this.state.audioContext) return; if (this.state.audioSource) { try { this.state.audioSource.stop(); } catch (e) {} this.state.audioSource = null; } if (this.state.audioContext.state === 'suspended') { await this.state.audioContext.resume(); } try { this.state.audioSource = this.state.audioContext.createBufferSource(); this.state.audioSource.buffer = this.state.audioBuffer; this.state.audioSource.connect(this.state.audioContext.destination); this.state.audioSource.start(0, this.state.playbackOffset); this.state.playbackStartTime = this.state.audioContext.currentTime; this.state.isPlaying = true; this.dom.playPauseBtn.textContent = '⏸ Pause'; this.updatePlaybackPosition(); this.state.audioSource.onended = () => { if (this.state.isPlaying) this.stopPlayback(); }; } catch (e) { console.error('Failed to start playback:', e); this.showMessage('Playback failed: ' + e.message, 'error'); this.state.audioSource = null; this.state.isPlaying = false; } } stopPlayback(savePosition = true) { if (this.state.audioSource) { try { this.state.audioSource.stop(); } catch (e) {} this.state.audioSource = null; } if (this.state.animationFrameId) { cancelAnimationFrame(this.state.animationFrameId); this.state.animationFrameId = null; } if (this.state.isPlaying && savePosition) { const elapsed = this.state.audioContext.currentTime - this.state.playbackStartTime; this.state.playbackOffset = Math.min(this.state.playbackOffset + elapsed, this.state.audioDurationSeconds); } this.state.isPlaying = false; this.dom.playPauseBtn.textContent = '▶ Play'; } updatePlaybackPosition() { if (!this.state.isPlaying) return; const elapsed = this.state.audioContext.currentTime - this.state.playbackStartTime; const currentTime = this.state.playbackOffset + elapsed; const currentBeats = this.timeToBeats(currentTime); this.dom.playbackTime.textContent = `${currentTime.toFixed(2)}s (${currentBeats.toFixed(2)}b)`; this.viewport.updateIndicatorPosition(currentBeats, true); this.expandSequenceAtTime(currentBeats); this.state.animationFrameId = requestAnimationFrame(() => this.updatePlaybackPosition()); } expandSequenceAtTime(currentBeats) { let activeSeqIndex = -1; for (let i = 0; i < this.state.sequences.length; i++) { const seq = this.state.sequences[i]; const seqEndBeats = seq.startTime + (seq.effects.length > 0 ? Math.max(...seq.effects.map(e => e.endTime)) : 0); if (currentBeats >= seq.startTime && currentBeats <= seqEndBeats) { activeSeqIndex = i; break; } } if (activeSeqIndex !== this.state.lastExpandedSeqIndex) { const seqDivs = this.dom.timeline.querySelectorAll('.sequence'); if (this.state.lastExpandedSeqIndex >= 0 && seqDivs[this.state.lastExpandedSeqIndex]) { seqDivs[this.state.lastExpandedSeqIndex].classList.remove('active-playing'); } if (activeSeqIndex >= 0 && seqDivs[activeSeqIndex]) { seqDivs[activeSeqIndex].classList.add('active-playing'); } this.state.lastExpandedSeqIndex = activeSeqIndex; } } seekTo(clickBeats, clickTime) { if (!this.state.audioBuffer) return; const wasPlaying = this.state.isPlaying; if (wasPlaying) this.stopPlayback(false); this.state.playbackOffset = Math.max(0, Math.min(clickTime, this.state.audioDurationSeconds)); const pausedBeats = this.timeToBeats(this.state.playbackOffset); this.dom.playbackTime.textContent = `${this.state.playbackOffset.toFixed(2)}s (${pausedBeats.toFixed(2)}b)`; this.viewport.updateIndicatorPosition(pausedBeats, false); if (wasPlaying) this.startPlayback(); return { clickTime, clickBeats }; } // Helpers beatsToTime(beats) { return beats * this.state.secondsPerBeat; } timeToBeats(seconds) { return seconds * this.state.beatsPerSecond; } }