summaryrefslogtreecommitdiff
path: root/tools/spectral_editor/script.js
diff options
context:
space:
mode:
Diffstat (limited to 'tools/spectral_editor/script.js')
-rw-r--r--tools/spectral_editor/script.js1774
1 files changed, 1774 insertions, 0 deletions
diff --git a/tools/spectral_editor/script.js b/tools/spectral_editor/script.js
new file mode 100644
index 0000000..6c6dd49
--- /dev/null
+++ b/tools/spectral_editor/script.js
@@ -0,0 +1,1774 @@
+// Spectral Brush Editor - Main Script
+// Implements Bezier curve editing, spectrogram rendering, and audio playback
+
+// ============================================================================
+// State Management
+// ============================================================================
+
+const SAMPLE_RATE = 32000;
+const DCT_SIZE = 512;
+
+// Frequency range for log-scale display
+const FREQ_MIN = 20.0; // 20 Hz (lowest audible bass)
+const FREQ_MAX = 16000.0; // 16 kHz (Nyquist for 32kHz sample rate)
+const USE_LOG_SCALE = true; // Enable logarithmic frequency axis
+
+const state = {
+ // Reference audio data
+ referenceSpectrogram: null, // Float32Array or null
+ referenceDctSize: DCT_SIZE,
+ referenceNumFrames: 0,
+
+ // Procedural curves
+ curves: [], // Array of {id, controlPoints: [{frame, freqHz, amplitude}], profile: {type, param1, param2}}
+ nextCurveId: 0,
+ selectedCurveId: null,
+ selectedControlPointIdx: null,
+
+ // Canvas state
+ canvasWidth: 0,
+ canvasHeight: 0,
+ pixelsPerFrame: 2.0, // Zoom level (pixels per frame)
+ pixelsPerBin: 1.0, // Vertical scale (pixels per frequency bin)
+
+ // Audio playback
+ audioContext: null,
+ isPlaying: false,
+ currentSource: null,
+ currentGainNode: null, // Keep reference to gain node for live volume updates
+ playbackVolume: 1.0, // Global volume for playback (0.0-1.0, increased from 0.7)
+ referenceOpacity: 0.5, // Opacity for reference spectrogram (0.0-1.0, increased from 0.3)
+
+ // Playhead indicator
+ playbackStartTime: null,
+ playbackDuration: 0,
+ playbackCurrentFrame: 0,
+
+ // Mouse hover state
+ mouseX: -1,
+ mouseY: -1,
+ mouseFrame: 0,
+ mouseFreq: 0,
+
+ // Undo/Redo
+ history: [],
+ historyIndex: -1,
+ maxHistorySize: 50
+};
+
+// ============================================================================
+// Initialization
+// ============================================================================
+
+document.addEventListener('DOMContentLoaded', () => {
+ initCanvas();
+ initUI();
+ initKeyboardShortcuts();
+ initAudioContext();
+
+ console.log('Spectral Brush Editor initialized');
+});
+
+function initCanvas() {
+ const canvas = document.getElementById('spectrogramCanvas');
+ const container = canvas.parentElement;
+
+ // Set canvas size to match container
+ const resizeCanvas = () => {
+ canvas.width = container.clientWidth;
+ canvas.height = container.clientHeight;
+ state.canvasWidth = canvas.width;
+ state.canvasHeight = canvas.height;
+ render();
+ };
+
+ window.addEventListener('resize', resizeCanvas);
+ resizeCanvas();
+
+ // Mouse event handlers
+ canvas.addEventListener('mousedown', onCanvasMouseDown);
+ canvas.addEventListener('mousemove', onCanvasMouseMove);
+ canvas.addEventListener('mouseup', onCanvasMouseUp);
+ canvas.addEventListener('contextmenu', onCanvasRightClick);
+
+ // Mouse hover handlers (for crosshair)
+ canvas.addEventListener('mousemove', onCanvasHover);
+ canvas.addEventListener('mouseleave', onCanvasLeave);
+}
+
+function initUI() {
+ // File loading
+ document.getElementById('loadWavBtn').addEventListener('click', () => {
+ document.getElementById('fileInput').click();
+ });
+
+ document.getElementById('fileInput').addEventListener('change', onFileSelected);
+
+ // Curve management
+ document.getElementById('addCurveBtn').addEventListener('click', addCurve);
+ document.getElementById('deleteCurveBtn').addEventListener('click', deleteSelectedCurve);
+ document.getElementById('curveSelect').addEventListener('change', onCurveSelected);
+
+ // Profile controls
+ document.getElementById('profileType').addEventListener('change', onProfileChanged);
+ document.getElementById('sigmaSlider').addEventListener('input', onSigmaChanged);
+ document.getElementById('sigmaValue').addEventListener('input', onSigmaValueChanged);
+ document.getElementById('curveVolumeSlider').addEventListener('input', onCurveVolumeChanged);
+ document.getElementById('curveVolumeValue').addEventListener('input', onCurveVolumeValueChanged);
+
+ // Display controls
+ document.getElementById('refOpacitySlider').addEventListener('input', onRefOpacityChanged);
+ document.getElementById('refOpacityValue').addEventListener('input', onRefOpacityValueChanged);
+
+ // Playback controls
+ document.getElementById('volumeSlider').addEventListener('input', onVolumeChanged);
+ document.getElementById('volumeValue').addEventListener('input', onVolumeValueChanged);
+ document.getElementById('playProceduralBtn').addEventListener('click', () => playAudio('procedural'));
+ document.getElementById('playOriginalBtn').addEventListener('click', () => playAudio('original'));
+ document.getElementById('stopBtn').addEventListener('click', stopAudio);
+
+ // Action buttons
+ document.getElementById('undoBtn').addEventListener('click', undo);
+ document.getElementById('redoBtn').addEventListener('click', redo);
+ document.getElementById('saveParamsBtn').addEventListener('click', saveProceduralParams);
+ document.getElementById('generateCodeBtn').addEventListener('click', generateCppCode);
+ document.getElementById('helpBtn').addEventListener('click', showHelp);
+
+ // Help modal
+ document.getElementById('closeHelpModal').addEventListener('click', hideHelp);
+ document.getElementById('helpModal').addEventListener('click', (e) => {
+ if (e.target.id === 'helpModal') hideHelp();
+ });
+}
+
+function initKeyboardShortcuts() {
+ document.addEventListener('keydown', (e) => {
+ // Playback shortcuts
+ if (e.key === '1') {
+ playAudio('procedural');
+ return;
+ }
+ if (e.key === '2') {
+ playAudio('original');
+ return;
+ }
+ if (e.key === ' ') {
+ e.preventDefault();
+ stopAudio();
+ return;
+ }
+
+ // Edit shortcuts
+ if (e.key === 'Delete') {
+ deleteSelectedControlPoint();
+ return;
+ }
+ if (e.key === 'Escape') {
+ deselectAll();
+ return;
+ }
+
+ // Undo/Redo
+ if (e.ctrlKey && e.shiftKey && e.key === 'Z') {
+ e.preventDefault();
+ redo();
+ return;
+ }
+ if (e.ctrlKey && e.key === 'z') {
+ e.preventDefault();
+ undo();
+ return;
+ }
+
+ // File operations
+ if (e.ctrlKey && e.shiftKey && e.key === 'S') {
+ e.preventDefault();
+ generateCppCode();
+ return;
+ }
+ if (e.ctrlKey && e.key === 's') {
+ e.preventDefault();
+ saveProceduralParams();
+ return;
+ }
+ if (e.ctrlKey && e.key === 'o') {
+ e.preventDefault();
+ document.getElementById('fileInput').click();
+ return;
+ }
+
+ // Help
+ if (e.key === '?') {
+ showHelp();
+ return;
+ }
+ });
+}
+
+function initAudioContext() {
+ try {
+ state.audioContext = new (window.AudioContext || window.webkitAudioContext)({
+ sampleRate: SAMPLE_RATE
+ });
+ console.log('Audio context initialized:', state.audioContext.sampleRate, 'Hz');
+ } catch (error) {
+ console.error('Failed to initialize audio context:', error);
+ alert('Audio playback unavailable. Your browser may not support Web Audio API.');
+ }
+}
+
+// ============================================================================
+// File Loading
+// ============================================================================
+
+function onFileSelected(e) {
+ const file = e.target.files[0];
+ if (!file) return;
+
+ // Check if there are unsaved curves
+ if (state.curves.length > 0) {
+ const confirmLoad = confirm(
+ 'You have unsaved curves. Loading a new file will reset all curves.\n\n' +
+ 'Do you want to save your work first?\n\n' +
+ 'Click "OK" to save, or "Cancel" to discard and continue loading.'
+ );
+
+ if (confirmLoad) {
+ // User wants to save first
+ saveProceduralParams();
+ // After saving, ask again if they want to proceed
+ const proceedLoad = confirm('File saved. Proceed with loading new file?');
+ if (!proceedLoad) {
+ // User changed their mind, reset file input
+ e.target.value = '';
+ return;
+ }
+ }
+ }
+
+ const fileName = file.name;
+ const fileExt = fileName.split('.').pop().toLowerCase();
+
+ if (fileExt === 'wav') {
+ loadWavFile(file);
+ } else if (fileExt === 'spec') {
+ loadSpecFile(file);
+ } else {
+ alert('Unsupported file format. Please load a .wav or .spec file.');
+ }
+}
+
+function loadWavFile(file) {
+ const reader = new FileReader();
+ reader.onload = (e) => {
+ const arrayBuffer = e.target.result;
+ state.audioContext.decodeAudioData(arrayBuffer, (audioBuffer) => {
+ console.log('Decoded WAV:', audioBuffer.length, 'samples,', audioBuffer.numberOfChannels, 'channels');
+
+ // Convert to spectrogram (simplified: just use first channel)
+ const audioData = audioBuffer.getChannelData(0);
+ const spectrogram = audioToSpectrogram(audioData);
+
+ state.referenceSpectrogram = spectrogram.data;
+ state.referenceDctSize = spectrogram.dctSize;
+ state.referenceNumFrames = spectrogram.numFrames;
+
+ onReferenceLoaded(file.name);
+ }, (error) => {
+ console.error('Failed to decode WAV:', error);
+ alert('Failed to decode WAV file. Make sure it is a valid audio file.');
+ });
+ };
+ reader.readAsArrayBuffer(file);
+}
+
+function loadSpecFile(file) {
+ const reader = new FileReader();
+ reader.onload = (e) => {
+ const arrayBuffer = e.target.result;
+ const spec = parseSpecFile(arrayBuffer);
+
+ if (!spec) {
+ alert('Failed to parse .spec file. Invalid format.');
+ return;
+ }
+
+ state.referenceSpectrogram = spec.data;
+ state.referenceDctSize = spec.dctSize;
+ state.referenceNumFrames = spec.numFrames;
+
+ onReferenceLoaded(file.name);
+ };
+ reader.readAsArrayBuffer(file);
+}
+
+function parseSpecFile(arrayBuffer) {
+ const view = new DataView(arrayBuffer);
+ let offset = 0;
+
+ // Read header: "SPEC" magic (4 bytes)
+ const magic = String.fromCharCode(
+ view.getUint8(offset++),
+ view.getUint8(offset++),
+ view.getUint8(offset++),
+ view.getUint8(offset++)
+ );
+
+ if (magic !== 'SPEC') {
+ console.error('Invalid .spec file: wrong magic', magic);
+ return null;
+ }
+
+ // Read version (uint32)
+ const version = view.getUint32(offset, true);
+ offset += 4;
+
+ // Read dct_size (uint32)
+ const dctSize = view.getUint32(offset, true);
+ offset += 4;
+
+ // Read num_frames (uint32)
+ const numFrames = view.getUint32(offset, true);
+ offset += 4;
+
+ console.log('.spec header:', {version, dctSize, numFrames});
+
+ // Read spectral data (float32 array)
+ const dataLength = dctSize * numFrames;
+ const data = new Float32Array(dataLength);
+
+ for (let i = 0; i < dataLength; i++) {
+ data[i] = view.getFloat32(offset, true);
+ offset += 4;
+ }
+
+ return {dctSize, numFrames, data};
+}
+
+function audioToSpectrogram(audioData) {
+ // Simplified STFT: divide audio into frames and apply DCT
+ // Frame overlap: 50% (hop size = DCT_SIZE / 2)
+ const hopSize = DCT_SIZE / 2;
+ const numFrames = Math.floor((audioData.length - DCT_SIZE) / hopSize) + 1;
+
+ const spectrogram = new Float32Array(DCT_SIZE * numFrames);
+ const window = hanningWindowArray;
+
+ for (let frameIdx = 0; frameIdx < numFrames; frameIdx++) {
+ const frameStart = frameIdx * hopSize;
+ const frame = new Float32Array(DCT_SIZE);
+
+ // Extract windowed frame
+ for (let i = 0; i < DCT_SIZE; i++) {
+ if (frameStart + i < audioData.length) {
+ frame[i] = audioData[frameStart + i] * window[i];
+ }
+ }
+
+ // Compute DCT (forward transform)
+ const dctCoeffs = javascript_dct_512(frame);
+
+ // Store in spectrogram
+ for (let b = 0; b < DCT_SIZE; b++) {
+ spectrogram[frameIdx * DCT_SIZE + b] = dctCoeffs[b];
+ }
+ }
+
+ return {dctSize: DCT_SIZE, numFrames, data: spectrogram};
+}
+
+// Forward DCT (not in dct.js, add here)
+// Fast O(N log N) DCT using FFT (delegates to dct.js implementation)
+function javascript_dct_512(input) {
+ return javascript_dct_512_fft(input);
+}
+
+function onReferenceLoaded(fileName) {
+ console.log('Reference loaded:', fileName);
+ document.getElementById('fileInfo').textContent = fileName;
+ document.getElementById('canvasOverlay').classList.add('hidden');
+ document.getElementById('playOriginalBtn').disabled = false;
+
+ // Reset curves when loading new file
+ state.curves = [];
+ state.nextCurveId = 0;
+ state.selectedCurveId = null;
+ state.selectedControlPointIdx = null;
+
+ // Clear history
+ state.history = [];
+ state.historyIndex = -1;
+
+ // Reset mouse to frame 0
+ state.mouseFrame = 0;
+
+ // Adjust zoom to fit
+ state.pixelsPerFrame = Math.max(1.0, state.canvasWidth / state.referenceNumFrames);
+
+ updateCurveUI();
+ updateUndoRedoButtons();
+ render();
+ drawSpectrumViewer(); // Show initial spectrum
+}
+
+// ============================================================================
+// Curve Management
+// ============================================================================
+
+function addCurve() {
+ // Generate a unique color for this curve
+ const colors = [
+ '#0e639c', // Blue
+ '#00aa00', // Green
+ '#cc5500', // Orange
+ '#aa00aa', // Purple
+ '#00aaaa', // Cyan
+ '#aa5500', // Brown
+ '#ff69b4', // Pink
+ '#ffd700', // Gold
+ ];
+ const color = colors[state.curves.length % colors.length];
+
+ const curve = {
+ id: state.nextCurveId++,
+ controlPoints: [], // Empty initially, user will place points
+ profile: {
+ type: 'gaussian',
+ param1: 30.0, // sigma
+ param2: 0.0
+ },
+ color: color,
+ volume: 1.0 // Per-curve volume multiplier (0.0-1.0)
+ };
+
+ state.curves.push(curve);
+ state.selectedCurveId = curve.id;
+
+ saveHistoryState('Add curve');
+ updateCurveUI();
+ render();
+}
+
+function deleteSelectedCurve() {
+ if (state.selectedCurveId === null) return;
+
+ const idx = state.curves.findIndex(c => c.id === state.selectedCurveId);
+ if (idx >= 0) {
+ state.curves.splice(idx, 1);
+ state.selectedCurveId = null;
+ state.selectedControlPointIdx = null;
+
+ saveHistoryState('Delete curve');
+ updateCurveUI();
+ render();
+ }
+}
+
+function onCurveSelected(e) {
+ const curveId = parseInt(e.target.value);
+ state.selectedCurveId = curveId >= 0 ? curveId : null;
+ state.selectedControlPointIdx = null;
+
+ updateCurveUI();
+ render();
+}
+
+function updateCurveUI() {
+ // Update curve list (toolbar)
+ const curveList = document.getElementById('curveList');
+ curveList.innerHTML = '';
+
+ state.curves.forEach(curve => {
+ const div = document.createElement('div');
+ div.className = 'curve-item';
+ if (curve.id === state.selectedCurveId) {
+ div.classList.add('selected');
+ }
+
+ // Add color indicator
+ const colorDot = document.createElement('span');
+ colorDot.style.display = 'inline-block';
+ colorDot.style.width = '12px';
+ colorDot.style.height = '12px';
+ colorDot.style.borderRadius = '50%';
+ colorDot.style.backgroundColor = curve.color || '#0e639c';
+ colorDot.style.marginRight = '8px';
+ colorDot.style.verticalAlign = 'middle';
+
+ div.appendChild(colorDot);
+ div.appendChild(document.createTextNode(`Curve ${curve.id} (${curve.controlPoints.length} points)`));
+
+ div.addEventListener('click', () => {
+ state.selectedCurveId = curve.id;
+ state.selectedControlPointIdx = null;
+ updateCurveUI();
+ updatePointInfo();
+ render();
+ });
+ curveList.appendChild(div);
+ });
+
+ // Update curve select dropdown
+ const curveSelect = document.getElementById('curveSelect');
+ curveSelect.innerHTML = '';
+
+ if (state.curves.length === 0) {
+ const opt = document.createElement('option');
+ opt.value = -1;
+ opt.textContent = 'No curves';
+ curveSelect.appendChild(opt);
+ } else {
+ state.curves.forEach(curve => {
+ const opt = document.createElement('option');
+ opt.value = curve.id;
+ opt.textContent = `Curve ${curve.id}`;
+ opt.selected = curve.id === state.selectedCurveId;
+ curveSelect.appendChild(opt);
+ });
+ }
+
+ // Update delete button state
+ document.getElementById('deleteCurveBtn').disabled = state.selectedCurveId === null;
+
+ // Update profile controls
+ if (state.selectedCurveId !== null) {
+ const curve = state.curves.find(c => c.id === state.selectedCurveId);
+ if (curve) {
+ document.getElementById('profileType').value = curve.profile.type;
+ document.getElementById('sigmaSlider').value = curve.profile.param1;
+ document.getElementById('sigmaValue').value = curve.profile.param1;
+
+ // Update curve volume slider
+ const volumePercent = Math.round(curve.volume * 100);
+ document.getElementById('curveVolumeSlider').value = volumePercent;
+ document.getElementById('curveVolumeValue').value = volumePercent;
+ }
+ }
+
+ // Update point info panel
+ updatePointInfo();
+}
+
+function updatePointInfo() {
+ const frameEl = document.getElementById('pointFrame');
+ const freqEl = document.getElementById('pointFreq');
+ const ampEl = document.getElementById('pointAmp');
+
+ if (state.selectedCurveId === null || state.selectedControlPointIdx === null) {
+ // No point selected
+ frameEl.textContent = '-';
+ freqEl.textContent = '-';
+ ampEl.textContent = '-';
+ return;
+ }
+
+ const curve = state.curves.find(c => c.id === state.selectedCurveId);
+ if (!curve || state.selectedControlPointIdx >= curve.controlPoints.length) {
+ frameEl.textContent = '-';
+ freqEl.textContent = '-';
+ ampEl.textContent = '-';
+ return;
+ }
+
+ const point = curve.controlPoints[state.selectedControlPointIdx];
+ frameEl.textContent = point.frame.toFixed(0);
+ freqEl.textContent = point.freqHz.toFixed(1) + ' Hz';
+ ampEl.textContent = point.amplitude.toFixed(3);
+}
+
+// ============================================================================
+// Profile Controls
+// ============================================================================
+
+function onProfileChanged(e) {
+ if (state.selectedCurveId === null) return;
+
+ const curve = state.curves.find(c => c.id === state.selectedCurveId);
+ if (!curve) return;
+
+ curve.profile.type = e.target.value;
+
+ // Update label based on profile type
+ const label = document.getElementById('sigmaLabel');
+ if (curve.profile.type === 'gaussian') {
+ label.textContent = 'Sigma:';
+ } else if (curve.profile.type === 'decaying_sinusoid') {
+ label.textContent = 'Decay:';
+ } else if (curve.profile.type === 'noise') {
+ label.textContent = 'Decay:'; // Changed from 'Amplitude:' to 'Decay:'
+ }
+
+ saveHistoryState('Change profile');
+ render();
+}
+
+function onSigmaChanged(e) {
+ if (state.selectedCurveId === null) return;
+
+ const curve = state.curves.find(c => c.id === state.selectedCurveId);
+ if (!curve) return;
+
+ curve.profile.param1 = parseFloat(e.target.value);
+ document.getElementById('sigmaValue').value = curve.profile.param1;
+
+ render();
+}
+
+function onSigmaValueChanged(e) {
+ if (state.selectedCurveId === null) return;
+
+ const curve = state.curves.find(c => c.id === state.selectedCurveId);
+ if (!curve) return;
+
+ curve.profile.param1 = parseFloat(e.target.value);
+ document.getElementById('sigmaSlider').value = curve.profile.param1;
+
+ render();
+}
+
+function onRefOpacityChanged(e) {
+ state.referenceOpacity = parseFloat(e.target.value) / 100.0; // Convert 0-100 to 0.0-1.0
+ document.getElementById('refOpacityValue').value = e.target.value;
+ render();
+}
+
+function onRefOpacityValueChanged(e) {
+ state.referenceOpacity = parseFloat(e.target.value) / 100.0;
+ document.getElementById('refOpacitySlider').value = e.target.value;
+ render();
+}
+
+function onVolumeChanged(e) {
+ state.playbackVolume = parseFloat(e.target.value) / 100.0; // Convert 0-100 to 0.0-1.0
+ document.getElementById('volumeValue').value = e.target.value;
+
+ // Update gain node if audio is currently playing
+ if (state.currentGainNode) {
+ state.currentGainNode.gain.value = state.playbackVolume;
+ }
+}
+
+function onVolumeValueChanged(e) {
+ state.playbackVolume = parseFloat(e.target.value) / 100.0;
+ document.getElementById('volumeSlider').value = e.target.value;
+
+ // Update gain node if audio is currently playing
+ if (state.currentGainNode) {
+ state.currentGainNode.gain.value = state.playbackVolume;
+ }
+}
+
+function onCurveVolumeChanged(e) {
+ if (state.selectedCurveId === null) return;
+
+ const curve = state.curves.find(c => c.id === state.selectedCurveId);
+ if (!curve) return;
+
+ curve.volume = parseFloat(e.target.value) / 100.0; // Convert 0-100 to 0.0-1.0
+ document.getElementById('curveVolumeValue').value = e.target.value;
+
+ render();
+}
+
+function onCurveVolumeValueChanged(e) {
+ if (state.selectedCurveId === null) return;
+
+ const curve = state.curves.find(c => c.id === state.selectedCurveId);
+ if (!curve) return;
+
+ curve.volume = parseFloat(e.target.value) / 100.0;
+ document.getElementById('curveVolumeSlider').value = e.target.value;
+
+ render();
+}
+
+// ============================================================================
+// Canvas Interaction
+// ============================================================================
+
+let isDragging = false;
+let dragStartX = 0;
+let dragStartY = 0;
+
+function onCanvasMouseDown(e) {
+ const rect = e.target.getBoundingClientRect();
+ const x = e.clientX - rect.left;
+ const y = e.clientY - rect.top;
+
+ // Check if clicking on existing control point
+ const clickedPoint = findControlPointAt(x, y);
+
+ if (clickedPoint) {
+ // Start dragging existing point
+ state.selectedCurveId = clickedPoint.curveId;
+ state.selectedControlPointIdx = clickedPoint.pointIdx;
+ isDragging = true;
+ dragStartX = x;
+ dragStartY = y;
+ updateCurveUI();
+ updatePointInfo();
+ render();
+ } else if (state.selectedCurveId !== null) {
+ // Place new control point
+ const curve = state.curves.find(c => c.id === state.selectedCurveId);
+ if (curve) {
+ const point = screenToSpectrogram(x, y);
+ curve.controlPoints.push(point);
+
+ // Sort by frame
+ curve.controlPoints.sort((a, b) => a.frame - b.frame);
+
+ saveHistoryState('Add control point');
+ updateCurveUI();
+ updatePointInfo();
+ render();
+ }
+ }
+}
+
+function onCanvasMouseMove(e) {
+ if (!isDragging) return;
+ if (state.selectedCurveId === null || state.selectedControlPointIdx === null) return;
+
+ const rect = e.target.getBoundingClientRect();
+ const x = e.clientX - rect.left;
+ const y = e.clientY - rect.top;
+
+ const curve = state.curves.find(c => c.id === state.selectedCurveId);
+ if (!curve) return;
+
+ const point = curve.controlPoints[state.selectedControlPointIdx];
+ if (!point) return;
+
+ // Update point position
+ const newPoint = screenToSpectrogram(x, y);
+ point.frame = newPoint.frame;
+ point.freqHz = newPoint.freqHz;
+ point.amplitude = newPoint.amplitude;
+
+ // Re-sort by frame
+ curve.controlPoints.sort((a, b) => a.frame - b.frame);
+
+ // Update point info panel in real-time
+ updatePointInfo();
+
+ render();
+}
+
+function onCanvasMouseUp(e) {
+ if (isDragging) {
+ isDragging = false;
+ saveHistoryState('Move control point');
+ }
+}
+
+function onCanvasRightClick(e) {
+ e.preventDefault();
+
+ const rect = e.target.getBoundingClientRect();
+ const x = e.clientX - rect.left;
+ const y = e.clientY - rect.top;
+
+ const clickedPoint = findControlPointAt(x, y);
+ if (clickedPoint) {
+ const curve = state.curves.find(c => c.id === clickedPoint.curveId);
+ if (curve) {
+ curve.controlPoints.splice(clickedPoint.pointIdx, 1);
+ state.selectedControlPointIdx = null;
+
+ saveHistoryState('Delete control point');
+ updateCurveUI();
+ render();
+ }
+ }
+}
+
+function findControlPointAt(screenX, screenY) {
+ const CLICK_RADIUS = 8; // pixels
+
+ for (const curve of state.curves) {
+ for (let i = 0; i < curve.controlPoints.length; i++) {
+ const point = curve.controlPoints[i];
+ const screenPos = spectrogramToScreen(point.frame, point.freqHz);
+
+ const dx = screenX - screenPos.x;
+ const dy = screenY - screenPos.y;
+ const dist = Math.sqrt(dx * dx + dy * dy);
+
+ if (dist <= CLICK_RADIUS) {
+ return {curveId: curve.id, pointIdx: i};
+ }
+ }
+ }
+
+ return null;
+}
+
+function deleteSelectedControlPoint() {
+ if (state.selectedCurveId === null || state.selectedControlPointIdx === null) return;
+
+ const curve = state.curves.find(c => c.id === state.selectedCurveId);
+ if (curve && state.selectedControlPointIdx < curve.controlPoints.length) {
+ curve.controlPoints.splice(state.selectedControlPointIdx, 1);
+ state.selectedControlPointIdx = null;
+
+ saveHistoryState('Delete control point');
+ updateCurveUI();
+ render();
+ }
+}
+
+function deselectAll() {
+ state.selectedCurveId = null;
+ state.selectedControlPointIdx = null;
+ updateCurveUI();
+ updatePointInfo();
+ render();
+}
+
+function onCanvasHover(e) {
+ const rect = e.target.getBoundingClientRect();
+ state.mouseX = e.clientX - rect.left;
+ state.mouseY = e.clientY - rect.top;
+
+ // Convert to spectrogram coordinates
+ const coords = screenToSpectrogram(state.mouseX, state.mouseY);
+ state.mouseFrame = Math.floor(coords.frame);
+ state.mouseFreq = coords.freqHz;
+
+ // Only redraw if not dragging (avoid slowdown during drag)
+ if (!isDragging) {
+ render();
+ drawSpectrumViewer(); // Update spectrum viewer with frame under mouse
+ }
+}
+
+function onCanvasLeave(e) {
+ state.mouseX = -1;
+ state.mouseY = -1;
+ render();
+}
+
+// ============================================================================
+// Coordinate Conversion
+// ============================================================================
+
+function screenToSpectrogram(screenX, screenY) {
+ const frame = Math.round(screenX / state.pixelsPerFrame);
+
+ let freqHz;
+ if (USE_LOG_SCALE) {
+ // Logarithmic frequency mapping
+ const logMin = Math.log10(FREQ_MIN);
+ const logMax = Math.log10(FREQ_MAX);
+ const normalizedY = 1.0 - (screenY / state.canvasHeight); // Flip Y (0 at bottom, 1 at top)
+ const logFreq = logMin + normalizedY * (logMax - logMin);
+ freqHz = Math.pow(10, logFreq);
+ } else {
+ // Linear frequency mapping (old behavior)
+ const bin = Math.round((state.canvasHeight - screenY) / state.pixelsPerBin);
+ freqHz = (bin / state.referenceDctSize) * (SAMPLE_RATE / 2);
+ }
+
+ // Amplitude from Y position (normalized 0-1, top = 1.0, bottom = 0.0)
+ const amplitude = 1.0 - (screenY / state.canvasHeight);
+
+ return {
+ frame: Math.max(0, frame),
+ freqHz: Math.max(FREQ_MIN, Math.min(FREQ_MAX, freqHz)),
+ amplitude: Math.max(0, Math.min(1, amplitude))
+ };
+}
+
+function spectrogramToScreen(frame, freqHz) {
+ const x = frame * state.pixelsPerFrame;
+
+ let y;
+ if (USE_LOG_SCALE) {
+ // Logarithmic frequency mapping
+ const logMin = Math.log10(FREQ_MIN);
+ const logMax = Math.log10(FREQ_MAX);
+ const clampedFreq = Math.max(FREQ_MIN, Math.min(FREQ_MAX, freqHz));
+ const logFreq = Math.log10(clampedFreq);
+ const normalizedY = (logFreq - logMin) / (logMax - logMin);
+ y = state.canvasHeight * (1.0 - normalizedY); // Flip Y back to screen coords
+ } else {
+ // Linear frequency mapping (old behavior)
+ const bin = (freqHz / (SAMPLE_RATE / 2)) * state.referenceDctSize;
+ y = state.canvasHeight - (bin * state.pixelsPerBin);
+ }
+
+ return {x, y};
+}
+
+// ============================================================================
+// Rendering (continued in next message due to length)
+// ============================================================================
+
+// ============================================================================
+// Rendering
+// ============================================================================
+
+function render() {
+ const canvas = document.getElementById('spectrogramCanvas');
+ const ctx = canvas.getContext('2d');
+
+ // Clear canvas
+ ctx.fillStyle = '#1e1e1e';
+ ctx.fillRect(0, 0, canvas.width, canvas.height);
+
+ // Draw reference spectrogram (background)
+ if (state.referenceSpectrogram) {
+ drawReferenceSpectrogram(ctx);
+ }
+
+ // Draw procedural spectrogram (foreground)
+ if (state.curves.length > 0) {
+ drawProceduralSpectrogram(ctx);
+ }
+
+ // Draw frequency axis (log-scale grid and labels)
+ drawFrequencyAxis(ctx);
+
+ // Draw playhead indicator
+ drawPlayhead(ctx);
+
+ // Draw mouse crosshair and tooltip
+ drawCrosshair(ctx);
+
+ // Draw control points
+ drawControlPoints(ctx);
+}
+
+function drawPlayhead(ctx) {
+ if (!state.isPlaying || state.playbackCurrentFrame < 0) return;
+
+ const x = state.playbackCurrentFrame * state.pixelsPerFrame;
+
+ // Draw vertical line
+ ctx.strokeStyle = '#ff3333'; // Bright red
+ ctx.lineWidth = 2;
+ ctx.setLineDash([5, 3]); // Dashed line
+ ctx.beginPath();
+ ctx.moveTo(x, 0);
+ ctx.lineTo(x, state.canvasHeight);
+ ctx.stroke();
+ ctx.setLineDash([]); // Reset to solid line
+}
+
+function drawCrosshair(ctx) {
+ if (state.mouseX < 0 || state.mouseY < 0) return;
+
+ // Draw vertical line
+ ctx.strokeStyle = 'rgba(255, 255, 255, 0.3)';
+ ctx.lineWidth = 1;
+ ctx.beginPath();
+ ctx.moveTo(state.mouseX, 0);
+ ctx.lineTo(state.mouseX, state.canvasHeight);
+ ctx.stroke();
+
+ // Draw tooltip
+ const frameText = `Frame: ${state.mouseFrame}`;
+ const freqText = `Freq: ${state.mouseFreq.toFixed(1)} Hz`;
+
+ ctx.font = '12px monospace';
+ const frameWidth = ctx.measureText(frameText).width;
+ const freqWidth = ctx.measureText(freqText).width;
+ const maxWidth = Math.max(frameWidth, freqWidth);
+
+ const tooltipX = state.mouseX + 10;
+ const tooltipY = state.mouseY - 40;
+ const tooltipWidth = maxWidth + 20;
+ const tooltipHeight = 40;
+
+ // Background
+ ctx.fillStyle = 'rgba(0, 0, 0, 0.8)';
+ ctx.fillRect(tooltipX, tooltipY, tooltipWidth, tooltipHeight);
+
+ // Border
+ ctx.strokeStyle = 'rgba(255, 255, 255, 0.3)';
+ ctx.lineWidth = 1;
+ ctx.strokeRect(tooltipX, tooltipY, tooltipWidth, tooltipHeight);
+
+ // Text
+ ctx.fillStyle = '#ffffff';
+ ctx.fillText(frameText, tooltipX + 10, tooltipY + 15);
+ ctx.fillText(freqText, tooltipX + 10, tooltipY + 30);
+}
+
+function drawReferenceSpectrogram(ctx) {
+ // Create offscreen canvas for reference layer
+ const offscreen = document.createElement('canvas');
+ offscreen.width = state.canvasWidth;
+ offscreen.height = state.canvasHeight;
+ const offscreenCtx = offscreen.getContext('2d');
+
+ const imgData = offscreenCtx.createImageData(state.canvasWidth, state.canvasHeight);
+
+ // CORRECT MAPPING: Iterate over destination pixels → sample source bins
+ // This prevents gaps and overlaps
+ for (let screenY = 0; screenY < state.canvasHeight; screenY++) {
+ for (let screenX = 0; screenX < state.canvasWidth; screenX++) {
+ // Convert screen coordinates to spectrogram coordinates
+ const spectroCoords = screenToSpectrogram(screenX, screenY);
+ const frameIdx = Math.floor(spectroCoords.frame);
+
+ // Convert freqHz back to bin
+ const bin = Math.round((spectroCoords.freqHz / (SAMPLE_RATE / 2)) * state.referenceDctSize);
+
+ // Bounds check
+ if (frameIdx < 0 || frameIdx >= state.referenceNumFrames) continue;
+ if (bin < 0 || bin >= state.referenceDctSize) continue;
+
+ // Sample spectrogram
+ const specValue = state.referenceSpectrogram[frameIdx * state.referenceDctSize + bin];
+
+ // Logarithmic intensity mapping (dB scale)
+ // Maps wide dynamic range to visible range
+ const amplitude = Math.abs(specValue);
+ let intensity = 0;
+ if (amplitude > 0.0001) { // Noise floor
+ const dB = 20.0 * Math.log10(amplitude);
+ const dB_min = -60.0; // Noise floor (-60 dB)
+ const dB_max = 40.0; // Peak (40 dB headroom)
+ const normalized = (dB - dB_min) / (dB_max - dB_min);
+ intensity = Math.floor(Math.max(0, Math.min(255, normalized * 255)));
+ }
+
+ // Write pixel
+ const pixelIdx = (screenY * state.canvasWidth + screenX) * 4;
+ imgData.data[pixelIdx + 0] = intensity; // R
+ imgData.data[pixelIdx + 1] = intensity; // G
+ imgData.data[pixelIdx + 2] = intensity; // B
+ imgData.data[pixelIdx + 3] = 255; // A
+ }
+ }
+
+ offscreenCtx.putImageData(imgData, 0, 0);
+
+ // Draw offscreen canvas with proper alpha blending
+ ctx.globalAlpha = state.referenceOpacity;
+ ctx.drawImage(offscreen, 0, 0);
+ ctx.globalAlpha = 1.0;
+}
+
+function drawProceduralSpectrogram(ctx) {
+ // Draw each curve separately with its own color and volume
+ const numFrames = state.referenceNumFrames || 100;
+
+ state.curves.forEach(curve => {
+ if (curve.controlPoints.length === 0) return;
+
+ // Create offscreen canvas for this curve
+ const offscreen = document.createElement('canvas');
+ offscreen.width = state.canvasWidth;
+ offscreen.height = state.canvasHeight;
+ const offscreenCtx = offscreen.getContext('2d');
+
+ // Generate spectrogram for this curve only
+ const curveSpec = new Float32Array(state.referenceDctSize * numFrames);
+ drawCurveToSpectrogram(curve, curveSpec, state.referenceDctSize, numFrames);
+
+ // Parse curve color (hex to RGB)
+ const color = hexToRgb(curve.color || '#0e639c');
+
+ const imgData = offscreenCtx.createImageData(state.canvasWidth, state.canvasHeight);
+
+ // CORRECT MAPPING: Iterate over destination pixels → sample source bins
+ for (let screenY = 0; screenY < state.canvasHeight; screenY++) {
+ for (let screenX = 0; screenX < state.canvasWidth; screenX++) {
+ // Convert screen coordinates to spectrogram coordinates
+ const spectroCoords = screenToSpectrogram(screenX, screenY);
+ const frameIdx = Math.floor(spectroCoords.frame);
+
+ // Convert freqHz back to bin
+ const bin = Math.round((spectroCoords.freqHz / (SAMPLE_RATE / 2)) * state.referenceDctSize);
+
+ // Bounds check
+ if (frameIdx < 0 || frameIdx >= numFrames) continue;
+ if (bin < 0 || bin >= state.referenceDctSize) continue;
+
+ // Sample spectrogram
+ const specValue = curveSpec[frameIdx * state.referenceDctSize + bin];
+
+ // Logarithmic intensity mapping with steeper falloff for procedural curves
+ const amplitude = Math.abs(specValue);
+ let intensity = 0.0;
+ if (amplitude > 0.001) { // Higher noise floor for cleaner visualization
+ const dB = 20.0 * Math.log10(amplitude);
+ const dB_min = -40.0; // Higher floor = steeper falloff (was -60)
+ const dB_max = 40.0; // Peak
+ const normalized = (dB - dB_min) / (dB_max - dB_min);
+ intensity = Math.max(0, Math.min(1.0, normalized)); // 0.0 to 1.0
+ }
+
+ if (intensity > 0.01) { // Only draw visible pixels
+ const pixelIdx = (screenY * state.canvasWidth + screenX) * 4;
+ // Use constant color with alpha for intensity (pure colors)
+ imgData.data[pixelIdx + 0] = color.r;
+ imgData.data[pixelIdx + 1] = color.g;
+ imgData.data[pixelIdx + 2] = color.b;
+ imgData.data[pixelIdx + 3] = Math.floor(intensity * 255); // Alpha = intensity
+ }
+ }
+ }
+
+ offscreenCtx.putImageData(imgData, 0, 0);
+
+ // Draw offscreen canvas with curve volume as opacity (blends properly)
+ const curveOpacity = 0.6 * curve.volume; // Base opacity × curve volume
+ ctx.globalAlpha = curveOpacity;
+ ctx.drawImage(offscreen, 0, 0);
+ });
+
+ ctx.globalAlpha = 1.0;
+}
+
+// Helper: Convert hex color to RGB
+function hexToRgb(hex) {
+ const result = /^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(hex);
+ return result ? {
+ r: parseInt(result[1], 16),
+ g: parseInt(result[2], 16),
+ b: parseInt(result[3], 16)
+ } : {r: 14, g: 99, b: 156}; // Default blue
+}
+
+function drawControlPoints(ctx) {
+ state.curves.forEach(curve => {
+ const isSelected = curve.id === state.selectedCurveId;
+ const curveColor = curve.color || '#0e639c';
+
+ // Draw Bezier curve path
+ if (curve.controlPoints.length >= 2) {
+ ctx.strokeStyle = isSelected ? curveColor : '#666666';
+ ctx.lineWidth = isSelected ? 3 : 2;
+ ctx.beginPath();
+
+ for (let i = 0; i < curve.controlPoints.length; i++) {
+ const point = curve.controlPoints[i];
+ const screenPos = spectrogramToScreen(point.frame, point.freqHz);
+
+ if (i === 0) {
+ ctx.moveTo(screenPos.x, screenPos.y);
+ } else {
+ ctx.lineTo(screenPos.x, screenPos.y);
+ }
+ }
+
+ ctx.stroke();
+ }
+
+ // Draw control points
+ curve.controlPoints.forEach((point, idx) => {
+ const screenPos = spectrogramToScreen(point.frame, point.freqHz);
+ const isPointSelected = isSelected && idx === state.selectedControlPointIdx;
+
+ ctx.fillStyle = isPointSelected ? '#ffaa00' : (isSelected ? curveColor : '#888888');
+ ctx.beginPath();
+ ctx.arc(screenPos.x, screenPos.y, 6, 0, 2 * Math.PI);
+ ctx.fill();
+
+ ctx.strokeStyle = '#ffffff';
+ ctx.lineWidth = 2;
+ ctx.stroke();
+
+ // Draw label
+ if (isSelected) {
+ ctx.fillStyle = '#ffffff';
+ ctx.font = '11px monospace';
+ ctx.fillText(`${Math.round(point.freqHz)}Hz`, screenPos.x + 10, screenPos.y - 5);
+ }
+ });
+ });
+}
+
+function drawFrequencyAxis(ctx) {
+ if (!USE_LOG_SCALE) return; // Only draw axis in log-scale mode
+
+ // Standard musical frequencies to display
+ const frequencies = [20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 16000];
+
+ ctx.fillStyle = '#cccccc';
+ ctx.font = '11px monospace';
+ ctx.textAlign = 'right';
+ ctx.textBaseline = 'middle';
+
+ frequencies.forEach(freq => {
+ const screenPos = spectrogramToScreen(0, freq);
+ const y = screenPos.y;
+
+ if (y >= 0 && y <= state.canvasHeight) {
+ // Draw frequency label
+ const label = freq >= 1000 ? `${freq / 1000}k` : `${freq}`;
+ ctx.fillText(label, state.canvasWidth - 5, y);
+
+ // Draw subtle grid line
+ ctx.strokeStyle = 'rgba(255, 255, 255, 0.1)';
+ ctx.lineWidth = 1;
+ ctx.beginPath();
+ ctx.moveTo(0, y);
+ ctx.lineTo(state.canvasWidth - 40, y); // Leave space for label
+ ctx.stroke();
+ }
+ });
+}
+
+// ============================================================================
+// Procedural Spectrogram Generation
+// ============================================================================
+
+function generateProceduralSpectrogram(numFrames) {
+ const spectrogram = new Float32Array(state.referenceDctSize * numFrames);
+
+ // For each curve, draw its contribution
+ state.curves.forEach(curve => {
+ drawCurveToSpectrogram(curve, spectrogram, state.referenceDctSize, numFrames);
+ });
+
+ return spectrogram;
+}
+
+function drawCurveToSpectrogram(curve, spectrogram, dctSize, numFrames) {
+ if (curve.controlPoints.length === 0) return;
+
+ // Find the frame range covered by control points
+ const frames = curve.controlPoints.map(p => p.frame);
+ const minFrame = Math.max(0, Math.min(...frames)); // Clamp to valid range
+ const maxFrame = Math.min(numFrames - 1, Math.max(...frames));
+
+ // Amplitude scaling factor to match typical DCT coefficient magnitudes
+ // Increased from 10.0 to 50.0 for better audibility
+ const AMPLITUDE_SCALE = 50.0;
+
+ // Apply curve volume to the amplitude
+ const curveVolume = curve.volume || 1.0;
+
+ // Only iterate over the range where control points exist
+ for (let frame = minFrame; frame <= maxFrame; frame++) {
+ // Evaluate Bezier curve at this frame
+ const freqHz = evaluateBezierLinear(curve.controlPoints, frame, 'freqHz');
+ const amplitude = evaluateBezierLinear(curve.controlPoints, frame, 'amplitude');
+
+ // Convert freq to bin
+ const freqBin0 = (freqHz / (SAMPLE_RATE / 2)) * dctSize;
+
+ // Apply vertical profile
+ for (let bin = 0; bin < dctSize; bin++) {
+ const dist = Math.abs(bin - freqBin0);
+ const profileValue = evaluateProfile(curve.profile, dist);
+
+ const idx = frame * dctSize + bin;
+ spectrogram[idx] += amplitude * profileValue * AMPLITUDE_SCALE * curveVolume;
+ }
+ }
+}
+
+function evaluateBezierLinear(controlPoints, frame, property) {
+ if (controlPoints.length === 0) return 0;
+ if (controlPoints.length === 1) return controlPoints[0][property];
+
+ const frames = controlPoints.map(p => p.frame);
+ const values = controlPoints.map(p => p[property]);
+
+ // Clamp to range
+ if (frame <= frames[0]) return values[0];
+ if (frame >= frames[frames.length - 1]) return values[values.length - 1];
+
+ // Find segment
+ for (let i = 0; i < frames.length - 1; i++) {
+ if (frame >= frames[i] && frame <= frames[i + 1]) {
+ const t = (frame - frames[i]) / (frames[i + 1] - frames[i]);
+ return values[i] * (1 - t) + values[i + 1] * t;
+ }
+ }
+
+ return values[values.length - 1];
+}
+
+function evaluateProfile(profile, distance) {
+ switch (profile.type) {
+ case 'gaussian': {
+ const sigma = profile.param1;
+ return Math.exp(-(distance * distance) / (sigma * sigma));
+ }
+
+ case 'decaying_sinusoid': {
+ const decay = profile.param1;
+ const omega = profile.param2 || 0.5;
+ return Math.exp(-decay * distance) * Math.cos(omega * distance);
+ }
+
+ case 'noise': {
+ const amplitude = profile.param1;
+ const decay = profile.param2 || 30.0; // Decay rate (like sigma for Gaussian)
+
+ // Deterministic noise based on distance
+ const seed = 1234;
+ const hash = Math.floor((seed + distance * 17.13) * 1000) % 10000;
+ const noise = (hash / 10000) * 2.0 - 1.0; // Random value: -1 to +1
+
+ // Apply exponential decay (like Gaussian)
+ const decayFactor = Math.exp(-(distance * distance) / (decay * decay));
+
+ return amplitude * noise * decayFactor;
+ }
+
+ default:
+ return 0;
+ }
+}
+
+// ============================================================================
+// Audio Playback
+// ============================================================================
+
+function playAudio(source) {
+ if (!state.audioContext) {
+ alert('Audio context not available');
+ return;
+ }
+
+ stopAudio();
+
+ let spectrogram;
+ let numFrames;
+
+ if (source === 'original') {
+ if (!state.referenceSpectrogram) {
+ alert('No reference audio loaded');
+ return;
+ }
+ spectrogram = state.referenceSpectrogram;
+ numFrames = state.referenceNumFrames;
+ } else { // procedural
+ if (state.curves.length === 0) {
+ alert('No curves defined. Add a curve first.');
+ return;
+ }
+ numFrames = state.referenceNumFrames || 100;
+ spectrogram = generateProceduralSpectrogram(numFrames);
+ }
+
+ // Convert spectrogram to audio via IDCT
+ const audioData = spectrogramToAudio(spectrogram, state.referenceDctSize, numFrames);
+
+ // Create audio buffer
+ const audioBuffer = state.audioContext.createBuffer(1, audioData.length, SAMPLE_RATE);
+ audioBuffer.getChannelData(0).set(audioData);
+
+ // Create gain node for volume control
+ const gainNode = state.audioContext.createGain();
+ gainNode.gain.value = state.playbackVolume;
+
+ // Play
+ const bufferSource = state.audioContext.createBufferSource();
+ bufferSource.buffer = audioBuffer;
+ bufferSource.connect(gainNode);
+ gainNode.connect(state.audioContext.destination);
+ bufferSource.start();
+
+ state.currentSource = bufferSource;
+ state.currentGainNode = gainNode; // Store gain node for live volume updates
+ state.isPlaying = true;
+
+ // Start playhead animation
+ state.playbackStartTime = state.audioContext.currentTime;
+ state.playbackDuration = audioData.length / SAMPLE_RATE;
+ state.playbackCurrentFrame = 0;
+ updatePlayhead();
+
+ bufferSource.onended = () => {
+ state.isPlaying = false;
+ state.currentSource = null;
+ state.currentGainNode = null; // Clear gain node reference
+ state.playbackCurrentFrame = 0;
+ render(); // Clear playhead
+ };
+
+ console.log('Playing audio:', audioData.length, 'samples at volume', state.playbackVolume);
+}
+
+function updatePlayhead() {
+ if (!state.isPlaying) return;
+
+ // Calculate current playback position
+ const elapsed = state.audioContext.currentTime - state.playbackStartTime;
+ const progress = Math.min(1.0, elapsed / state.playbackDuration);
+ state.playbackCurrentFrame = progress * (state.referenceNumFrames || 100);
+
+ // Redraw with playhead
+ render();
+
+ // Update spectrum viewer
+ drawSpectrumViewer();
+
+ // Continue animation
+ requestAnimationFrame(updatePlayhead);
+}
+
+function drawSpectrumViewer() {
+ const viewer = document.getElementById('spectrumViewer');
+ const canvas = document.getElementById('spectrumCanvas');
+ const ctx = canvas.getContext('2d');
+
+ // Always show viewer (not just during playback)
+ viewer.classList.add('active');
+
+ // Determine which frame to display
+ let frameIdx;
+ if (state.isPlaying) {
+ frameIdx = Math.floor(state.playbackCurrentFrame);
+ } else {
+ // When not playing, show frame under mouse
+ frameIdx = state.mouseFrame;
+ }
+
+ if (frameIdx < 0 || frameIdx >= (state.referenceNumFrames || 100)) return;
+
+ // Clear canvas
+ ctx.fillStyle = '#1e1e1e';
+ ctx.fillRect(0, 0, canvas.width, canvas.height);
+
+ const numBars = 100; // Downsample to 100 bars for performance
+ const barWidth = canvas.width / numBars;
+
+ // Get reference spectrum (if available)
+ let refSpectrum = null;
+ if (state.referenceSpectrogram && frameIdx < state.referenceNumFrames) {
+ refSpectrum = new Float32Array(state.referenceDctSize);
+ for (let bin = 0; bin < state.referenceDctSize; bin++) {
+ refSpectrum[bin] = state.referenceSpectrogram[frameIdx * state.referenceDctSize + bin];
+ }
+ }
+
+ // Get procedural spectrum (if curves exist)
+ let procSpectrum = null;
+ if (state.curves.length > 0) {
+ const numFrames = state.referenceNumFrames || 100;
+ const fullProcSpec = new Float32Array(state.referenceDctSize * numFrames);
+ state.curves.forEach(curve => {
+ drawCurveToSpectrogram(curve, fullProcSpec, state.referenceDctSize, numFrames);
+ });
+
+ // Extract just this frame
+ procSpectrum = new Float32Array(state.referenceDctSize);
+ for (let bin = 0; bin < state.referenceDctSize; bin++) {
+ procSpectrum[bin] = fullProcSpec[frameIdx * state.referenceDctSize + bin];
+ }
+ }
+
+ // Draw spectrum bars (both reference and procedural overlaid)
+ for (let i = 0; i < numBars; i++) {
+ const binIdx = Math.floor((i / numBars) * state.referenceDctSize);
+
+ // Draw reference spectrum (green, behind)
+ if (refSpectrum) {
+ const amplitude = Math.abs(refSpectrum[binIdx]);
+ let height = 0;
+ if (amplitude > 0.0001) {
+ const dB = 20.0 * Math.log10(amplitude);
+ const dB_min = -60.0;
+ const dB_max = 40.0;
+ const normalized = (dB - dB_min) / (dB_max - dB_min);
+ height = Math.max(0, Math.min(canvas.height, normalized * canvas.height));
+ }
+
+ if (height > 0) {
+ const gradient = ctx.createLinearGradient(0, canvas.height - height, 0, canvas.height);
+ gradient.addColorStop(0, '#00ff00');
+ gradient.addColorStop(1, '#004400');
+ ctx.fillStyle = gradient;
+ ctx.fillRect(i * barWidth, canvas.height - height, barWidth - 1, height);
+ }
+ }
+
+ // Draw procedural spectrum (red, overlaid)
+ if (procSpectrum) {
+ const amplitude = Math.abs(procSpectrum[binIdx]);
+ let height = 0;
+ if (amplitude > 0.001) {
+ const dB = 20.0 * Math.log10(amplitude);
+ const dB_min = -40.0; // Same as procedural spectrogram rendering
+ const dB_max = 40.0;
+ const normalized = (dB - dB_min) / (dB_max - dB_min);
+ height = Math.max(0, Math.min(canvas.height, normalized * canvas.height));
+ }
+
+ if (height > 0) {
+ const gradient = ctx.createLinearGradient(0, canvas.height - height, 0, canvas.height);
+ gradient.addColorStop(0, '#ff5555'); // Bright red
+ gradient.addColorStop(1, '#550000'); // Dark red
+ ctx.fillStyle = gradient;
+ // Make it slightly transparent to see overlap
+ ctx.globalAlpha = 0.7;
+ ctx.fillRect(i * barWidth, canvas.height - height, barWidth - 1, height);
+ ctx.globalAlpha = 1.0;
+ }
+ }
+ }
+
+ // Draw frequency labels
+ ctx.fillStyle = '#888888';
+ ctx.font = '9px monospace';
+ ctx.textAlign = 'left';
+ ctx.fillText('20 Hz', 2, canvas.height - 2);
+ ctx.textAlign = 'right';
+ ctx.fillText('16 kHz', canvas.width - 2, canvas.height - 2);
+
+ // Draw frame number label (top-left)
+ ctx.textAlign = 'left';
+ ctx.fillStyle = state.isPlaying ? '#ff3333' : '#aaaaaa';
+ ctx.fillText(`Frame ${frameIdx}`, 2, 10);
+}
+
+function stopAudio() {
+ if (state.currentSource) {
+ try {
+ state.currentSource.stop();
+ state.currentSource.disconnect();
+ } catch (e) {
+ // Source may have already stopped naturally
+ }
+ state.currentSource = null;
+ }
+ state.currentGainNode = null; // Clear gain node reference
+ state.isPlaying = false;
+}
+
+function spectrogramToAudio(spectrogram, dctSize, numFrames) {
+ const hopSize = dctSize / 2;
+ const audioLength = numFrames * hopSize + dctSize;
+ const audioData = new Float32Array(audioLength);
+ const window = hanningWindowArray;
+
+ for (let frameIdx = 0; frameIdx < numFrames; frameIdx++) {
+ // Extract frame (no windowing - window is only for analysis, not synthesis)
+ const frame = new Float32Array(dctSize);
+ for (let b = 0; b < dctSize; b++) {
+ frame[b] = spectrogram[frameIdx * dctSize + b];
+ }
+
+ // IDCT
+ const timeFrame = javascript_idct_512(frame);
+
+ // Apply synthesis window for overlap-add
+ const frameStart = frameIdx * hopSize;
+ for (let i = 0; i < dctSize; i++) {
+ if (frameStart + i < audioLength) {
+ audioData[frameStart + i] += timeFrame[i] * window[i];
+ }
+ }
+ }
+
+ return audioData;
+}
+
+// ============================================================================
+// Undo/Redo
+// ============================================================================
+
+function saveHistoryState(action) {
+ // Remove any states after current index
+ state.history = state.history.slice(0, state.historyIndex + 1);
+
+ // Save current state
+ const snapshot = {
+ action,
+ curves: JSON.parse(JSON.stringify(state.curves)),
+ selectedCurveId: state.selectedCurveId
+ };
+
+ state.history.push(snapshot);
+
+ // Limit history size
+ if (state.history.length > state.maxHistorySize) {
+ state.history.shift();
+ } else {
+ state.historyIndex++;
+ }
+
+ updateUndoRedoButtons();
+}
+
+function undo() {
+ if (state.historyIndex <= 0) return;
+
+ state.historyIndex--;
+ const snapshot = state.history[state.historyIndex];
+
+ state.curves = JSON.parse(JSON.stringify(snapshot.curves));
+ state.selectedCurveId = snapshot.selectedCurveId;
+ state.selectedControlPointIdx = null;
+
+ updateCurveUI();
+ updateUndoRedoButtons();
+ render();
+
+ console.log('Undo:', snapshot.action);
+}
+
+function redo() {
+ if (state.historyIndex >= state.history.length - 1) return;
+
+ state.historyIndex++;
+ const snapshot = state.history[state.historyIndex];
+
+ state.curves = JSON.parse(JSON.stringify(snapshot.curves));
+ state.selectedCurveId = snapshot.selectedCurveId;
+ state.selectedControlPointIdx = null;
+
+ updateCurveUI();
+ updateUndoRedoButtons();
+ render();
+
+ console.log('Redo:', snapshot.action);
+}
+
+function updateUndoRedoButtons() {
+ document.getElementById('undoBtn').disabled = state.historyIndex <= 0;
+ document.getElementById('redoBtn').disabled = state.historyIndex >= state.history.length - 1;
+}
+
+// ============================================================================
+// File Export
+// ============================================================================
+
+function saveProceduralParams() {
+ if (state.curves.length === 0) {
+ alert('No curves to save. Add at least one curve first.');
+ return;
+ }
+
+ const text = generateProceduralParamsText();
+ downloadTextFile('procedural_params.txt', text);
+}
+
+function generateProceduralParamsText() {
+ let text = '# Spectral Brush Procedural Parameters\n';
+ text += `METADATA dct_size=${state.referenceDctSize} num_frames=${state.referenceNumFrames || 100} sample_rate=${SAMPLE_RATE}\n\n`;
+
+ state.curves.forEach((curve, idx) => {
+ text += `CURVE bezier\n`;
+
+ curve.controlPoints.forEach(point => {
+ text += ` CONTROL_POINT ${point.frame} ${point.freqHz.toFixed(1)} ${point.amplitude.toFixed(3)}\n`;
+ });
+
+ text += ` PROFILE ${curve.profile.type}`;
+ if (curve.profile.type === 'gaussian') {
+ text += ` sigma=${curve.profile.param1.toFixed(1)}`;
+ } else if (curve.profile.type === 'decaying_sinusoid') {
+ text += ` decay=${curve.profile.param1.toFixed(2)} frequency=${curve.profile.param2.toFixed(2)}`;
+ } else if (curve.profile.type === 'noise') {
+ text += ` amplitude=${curve.profile.param1.toFixed(2)} seed=${curve.profile.param2.toFixed(0)}`;
+ }
+ text += '\n';
+
+ // Add curve volume
+ text += ` VOLUME ${curve.volume.toFixed(3)}\n`;
+
+ text += 'END_CURVE\n\n';
+ });
+
+ return text;
+}
+
+function generateCppCode() {
+ if (state.curves.length === 0) {
+ alert('No curves to export. Add at least one curve first.');
+ return;
+ }
+
+ const code = generateCppCodeText();
+ downloadTextFile('gen_procedural.cc', code);
+}
+
+function generateCppCodeText() {
+ let code = '// Generated by Spectral Brush Editor\n';
+ code += '// This code reproduces the procedural audio procedurally at runtime\n\n';
+ code += '#include "audio/spectral_brush.h"\n\n';
+
+ code += 'void gen_procedural(float* spec, int dct_size, int num_frames) {\n';
+
+ state.curves.forEach((curve, curveIdx) => {
+ code += ` // Curve ${curveIdx} (volume=${curve.volume.toFixed(3)})\n`;
+ code += ' {\n';
+
+ // Control points arrays
+ const numPoints = curve.controlPoints.length;
+ code += ` const float frames[] = {`;
+ code += curve.controlPoints.map(p => `${p.frame}.0f`).join(', ');
+ code += '};\n';
+
+ code += ` const float freqs[] = {`;
+ code += curve.controlPoints.map(p => `${p.freqHz.toFixed(1)}f`).join(', ');
+ code += '};\n';
+
+ // Apply curve volume to amplitudes
+ const curveVolume = curve.volume || 1.0;
+ code += ` const float amps[] = {`;
+ code += curve.controlPoints.map(p => `${(p.amplitude * curveVolume).toFixed(3)}f`).join(', ');
+ code += '};\n\n';
+
+ // Profile type
+ let profileEnum;
+ if (curve.profile.type === 'gaussian') {
+ profileEnum = 'PROFILE_GAUSSIAN';
+ } else if (curve.profile.type === 'decaying_sinusoid') {
+ profileEnum = 'PROFILE_DECAYING_SINUSOID';
+ } else if (curve.profile.type === 'noise') {
+ profileEnum = 'PROFILE_NOISE';
+ }
+
+ // Function call
+ if (curveIdx === 0) {
+ code += ` draw_bezier_curve(spec, dct_size, num_frames,\n`;
+ } else {
+ code += ` draw_bezier_curve_add(spec, dct_size, num_frames,\n`;
+ }
+ code += ` frames, freqs, amps, ${numPoints},\n`;
+ code += ` ${profileEnum}, ${curve.profile.param1.toFixed(2)}f`;
+
+ if (curve.profile.type === 'decaying_sinusoid' || curve.profile.type === 'noise') {
+ code += `, ${curve.profile.param2.toFixed(2)}f`;
+ }
+
+ code += ');\n';
+ code += ' }\n\n';
+ });
+
+ code += '}\n\n';
+ code += '// Usage in demo_assets.txt:\n';
+ code += '// SOUND_PROC, PROC(gen_procedural), NONE, "Procedural sound"\n';
+
+ return code;
+}
+
+function downloadTextFile(filename, text) {
+ const blob = new Blob([text], {type: 'text/plain'});
+ const url = URL.createObjectURL(blob);
+
+ const a = document.createElement('a');
+ a.href = url;
+ a.download = filename;
+ document.body.appendChild(a);
+ a.click();
+ document.body.removeChild(a);
+
+ URL.revokeObjectURL(url);
+
+ console.log('Downloaded:', filename);
+}
+
+// ============================================================================
+// Help Modal
+// ============================================================================
+
+function showHelp() {
+ document.getElementById('helpModal').style.display = 'flex';
+}
+
+function hideHelp() {
+ document.getElementById('helpModal').style.display = 'none';
+}