diff options
| author | skal <pascal.massimino@gmail.com> | 2026-02-06 11:22:01 +0100 |
|---|---|---|
| committer | skal <pascal.massimino@gmail.com> | 2026-02-06 11:22:01 +0100 |
| commit | cf0775046c059fed1a4ed04d500f26397002667d (patch) | |
| tree | 00e643a38e601114e755ec77c8b4901b5d045ff8 /tools/spectral_editor/script.js | |
| parent | 5a1adde097e489c259bd052971546e95683c3596 (diff) | |
feat(tools): Add Spectral Brush Editor UI (Phase 2 of Task #5)
Implement web-based editor for procedural audio tracing.
New Files:
- tools/spectral_editor/index.html - Main UI structure
- tools/spectral_editor/style.css - VSCode-inspired dark theme
- tools/spectral_editor/script.js - Editor logic (~1200 lines)
- tools/spectral_editor/dct.js - IDCT/DCT implementation (reused)
- tools/spectral_editor/README.md - Complete user guide
Features:
- Dual-layer canvas (reference + procedural spectrograms)
- Bezier curve editor (click to place, drag to adjust, right-click to delete)
- Profile controls (Gaussian sigma slider)
- Real-time audio playback (Key 1=procedural, Key 2=original, Space=stop)
- Undo/Redo system (50-action history with snapshots)
- File I/O:
- Load .wav/.spec files (FFT/STFT or binary parser)
- Save procedural_params.txt (human-readable, re-editable)
- Generate C++ code (copy-paste ready for runtime)
- Keyboard shortcuts (Ctrl+Z/Shift+Z, Ctrl+S/Shift+S, Ctrl+O, ?)
- Help modal with shortcut reference
Technical:
- Pure HTML/CSS/JS (no dependencies)
- Web Audio API for playback (32 kHz sample rate)
- Canvas 2D for visualization (log-scale frequency)
- Linear Bezier interpolation matching C++ runtime
- IDCT with overlap-add synthesis
Next: Phase 3 (currently integrated in Phase 2)
- File loading already implemented
- Export already implemented
- Ready for user testing!
Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
Diffstat (limited to 'tools/spectral_editor/script.js')
| -rw-r--r-- | tools/spectral_editor/script.js | 1189 |
1 files changed, 1189 insertions, 0 deletions
diff --git a/tools/spectral_editor/script.js b/tools/spectral_editor/script.js new file mode 100644 index 0000000..1518840 --- /dev/null +++ b/tools/spectral_editor/script.js @@ -0,0 +1,1189 @@ +// Spectral Brush Editor - Main Script +// Implements Bezier curve editing, spectrogram rendering, and audio playback + +// ============================================================================ +// State Management +// ============================================================================ + +const SAMPLE_RATE = 32000; +const DCT_SIZE = 512; + +const state = { + // Reference audio data + referenceSpectrogram: null, // Float32Array or null + referenceDctSize: DCT_SIZE, + referenceNumFrames: 0, + + // Procedural curves + curves: [], // Array of {id, controlPoints: [{frame, freqHz, amplitude}], profile: {type, param1, param2}} + nextCurveId: 0, + selectedCurveId: null, + selectedControlPointIdx: null, + + // Canvas state + canvasWidth: 0, + canvasHeight: 0, + pixelsPerFrame: 2.0, // Zoom level (pixels per frame) + pixelsPerBin: 1.0, // Vertical scale (pixels per frequency bin) + + // Audio playback + audioContext: null, + isPlaying: false, + currentSource: null, + + // Undo/Redo + history: [], + historyIndex: -1, + maxHistorySize: 50 +}; + +// ============================================================================ +// Initialization +// ============================================================================ + +document.addEventListener('DOMContentLoaded', () => { + initCanvas(); + initUI(); + initKeyboardShortcuts(); + initAudioContext(); + + console.log('Spectral Brush Editor initialized'); +}); + +function initCanvas() { + const canvas = document.getElementById('spectrogramCanvas'); + const container = canvas.parentElement; + + // Set canvas size to match container + const resizeCanvas = () => { + canvas.width = container.clientWidth; + canvas.height = container.clientHeight; + state.canvasWidth = canvas.width; + state.canvasHeight = canvas.height; + render(); + }; + + window.addEventListener('resize', resizeCanvas); + resizeCanvas(); + + // Mouse event handlers + canvas.addEventListener('mousedown', onCanvasMouseDown); + canvas.addEventListener('mousemove', onCanvasMouseMove); + canvas.addEventListener('mouseup', onCanvasMouseUp); + canvas.addEventListener('contextmenu', onCanvasRightClick); +} + +function initUI() { + // File loading + document.getElementById('loadWavBtn').addEventListener('click', () => { + document.getElementById('fileInput').click(); + }); + + document.getElementById('fileInput').addEventListener('change', onFileSelected); + + // Curve management + document.getElementById('addCurveBtn').addEventListener('click', addCurve); + document.getElementById('deleteCurveBtn').addEventListener('click', deleteSelectedCurve); + document.getElementById('curveSelect').addEventListener('change', onCurveSelected); + + // Profile controls + document.getElementById('profileType').addEventListener('change', onProfileChanged); + document.getElementById('sigmaSlider').addEventListener('input', onSigmaChanged); + document.getElementById('sigmaValue').addEventListener('input', onSigmaValueChanged); + + // Playback controls + document.getElementById('playProceduralBtn').addEventListener('click', () => playAudio('procedural')); + document.getElementById('playOriginalBtn').addEventListener('click', () => playAudio('original')); + document.getElementById('stopBtn').addEventListener('click', stopAudio); + + // Action buttons + document.getElementById('undoBtn').addEventListener('click', undo); + document.getElementById('redoBtn').addEventListener('click', redo); + document.getElementById('saveParamsBtn').addEventListener('click', saveProceduralParams); + document.getElementById('generateCodeBtn').addEventListener('click', generateCppCode); + document.getElementById('helpBtn').addEventListener('click', showHelp); + + // Help modal + document.getElementById('closeHelpModal').addEventListener('click', hideHelp); + document.getElementById('helpModal').addEventListener('click', (e) => { + if (e.target.id === 'helpModal') hideHelp(); + }); +} + +function initKeyboardShortcuts() { + document.addEventListener('keydown', (e) => { + // Playback shortcuts + if (e.key === '1') { + playAudio('procedural'); + return; + } + if (e.key === '2') { + playAudio('original'); + return; + } + if (e.key === ' ') { + e.preventDefault(); + stopAudio(); + return; + } + + // Edit shortcuts + if (e.key === 'Delete') { + deleteSelectedControlPoint(); + return; + } + if (e.key === 'Escape') { + deselectAll(); + return; + } + + // Undo/Redo + if (e.ctrlKey && e.shiftKey && e.key === 'Z') { + e.preventDefault(); + redo(); + return; + } + if (e.ctrlKey && e.key === 'z') { + e.preventDefault(); + undo(); + return; + } + + // File operations + if (e.ctrlKey && e.shiftKey && e.key === 'S') { + e.preventDefault(); + generateCppCode(); + return; + } + if (e.ctrlKey && e.key === 's') { + e.preventDefault(); + saveProceduralParams(); + return; + } + if (e.ctrlKey && e.key === 'o') { + e.preventDefault(); + document.getElementById('fileInput').click(); + return; + } + + // Help + if (e.key === '?') { + showHelp(); + return; + } + }); +} + +function initAudioContext() { + try { + state.audioContext = new (window.AudioContext || window.webkitAudioContext)({ + sampleRate: SAMPLE_RATE + }); + console.log('Audio context initialized:', state.audioContext.sampleRate, 'Hz'); + } catch (error) { + console.error('Failed to initialize audio context:', error); + alert('Audio playback unavailable. Your browser may not support Web Audio API.'); + } +} + +// ============================================================================ +// File Loading +// ============================================================================ + +function onFileSelected(e) { + const file = e.target.files[0]; + if (!file) return; + + const fileName = file.name; + const fileExt = fileName.split('.').pop().toLowerCase(); + + if (fileExt === 'wav') { + loadWavFile(file); + } else if (fileExt === 'spec') { + loadSpecFile(file); + } else { + alert('Unsupported file format. Please load a .wav or .spec file.'); + } +} + +function loadWavFile(file) { + const reader = new FileReader(); + reader.onload = (e) => { + const arrayBuffer = e.target.result; + state.audioContext.decodeAudioData(arrayBuffer, (audioBuffer) => { + console.log('Decoded WAV:', audioBuffer.length, 'samples,', audioBuffer.numberOfChannels, 'channels'); + + // Convert to spectrogram (simplified: just use first channel) + const audioData = audioBuffer.getChannelData(0); + const spectrogram = audioToSpectrogram(audioData); + + state.referenceSpectrogram = spectrogram.data; + state.referenceDctSize = spectrogram.dctSize; + state.referenceNumFrames = spectrogram.numFrames; + + onReferenceLoaded(file.name); + }, (error) => { + console.error('Failed to decode WAV:', error); + alert('Failed to decode WAV file. Make sure it is a valid audio file.'); + }); + }; + reader.readAsArrayBuffer(file); +} + +function loadSpecFile(file) { + const reader = new FileReader(); + reader.onload = (e) => { + const arrayBuffer = e.target.result; + const spec = parseSpecFile(arrayBuffer); + + if (!spec) { + alert('Failed to parse .spec file. Invalid format.'); + return; + } + + state.referenceSpectrogram = spec.data; + state.referenceDctSize = spec.dctSize; + state.referenceNumFrames = spec.numFrames; + + onReferenceLoaded(file.name); + }; + reader.readAsArrayBuffer(file); +} + +function parseSpecFile(arrayBuffer) { + const view = new DataView(arrayBuffer); + let offset = 0; + + // Read header: "SPEC" magic (4 bytes) + const magic = String.fromCharCode( + view.getUint8(offset++), + view.getUint8(offset++), + view.getUint8(offset++), + view.getUint8(offset++) + ); + + if (magic !== 'SPEC') { + console.error('Invalid .spec file: wrong magic', magic); + return null; + } + + // Read version (uint32) + const version = view.getUint32(offset, true); + offset += 4; + + // Read dct_size (uint32) + const dctSize = view.getUint32(offset, true); + offset += 4; + + // Read num_frames (uint32) + const numFrames = view.getUint32(offset, true); + offset += 4; + + console.log('.spec header:', {version, dctSize, numFrames}); + + // Read spectral data (float32 array) + const dataLength = dctSize * numFrames; + const data = new Float32Array(dataLength); + + for (let i = 0; i < dataLength; i++) { + data[i] = view.getFloat32(offset, true); + offset += 4; + } + + return {dctSize, numFrames, data}; +} + +function audioToSpectrogram(audioData) { + // Simplified STFT: divide audio into frames and apply DCT + // Frame overlap: 50% (hop size = DCT_SIZE / 2) + const hopSize = DCT_SIZE / 2; + const numFrames = Math.floor((audioData.length - DCT_SIZE) / hopSize) + 1; + + const spectrogram = new Float32Array(DCT_SIZE * numFrames); + const window = hanningWindowArray; + + for (let frameIdx = 0; frameIdx < numFrames; frameIdx++) { + const frameStart = frameIdx * hopSize; + const frame = new Float32Array(DCT_SIZE); + + // Extract windowed frame + for (let i = 0; i < DCT_SIZE; i++) { + if (frameStart + i < audioData.length) { + frame[i] = audioData[frameStart + i] * window[i]; + } + } + + // Compute DCT (forward transform) + const dctCoeffs = javascript_dct_512(frame); + + // Store in spectrogram + for (let b = 0; b < DCT_SIZE; b++) { + spectrogram[frameIdx * DCT_SIZE + b] = dctCoeffs[b]; + } + } + + return {dctSize: DCT_SIZE, numFrames, data: spectrogram}; +} + +// Forward DCT (not in dct.js, add here) +function javascript_dct_512(input) { + const output = new Float32Array(DCT_SIZE); + const PI = Math.PI; + const N = DCT_SIZE; + + for (let k = 0; k < N; k++) { + let sum = 0; + for (let n = 0; n < N; n++) { + sum += input[n] * Math.cos((PI / N) * k * (n + 0.5)); + } + output[k] = sum * (k === 0 ? Math.sqrt(1 / N) : Math.sqrt(2 / N)); + } + return output; +} + +function onReferenceLoaded(fileName) { + console.log('Reference loaded:', fileName); + document.getElementById('fileInfo').textContent = fileName; + document.getElementById('canvasOverlay').classList.add('hidden'); + document.getElementById('playOriginalBtn').disabled = false; + + // Adjust zoom to fit + state.pixelsPerFrame = Math.max(1.0, state.canvasWidth / state.referenceNumFrames); + + render(); +} + +// ============================================================================ +// Curve Management +// ============================================================================ + +function addCurve() { + const curve = { + id: state.nextCurveId++, + controlPoints: [], // Empty initially, user will place points + profile: { + type: 'gaussian', + param1: 30.0, // sigma + param2: 0.0 + } + }; + + state.curves.push(curve); + state.selectedCurveId = curve.id; + + saveHistoryState('Add curve'); + updateCurveUI(); + render(); +} + +function deleteSelectedCurve() { + if (state.selectedCurveId === null) return; + + const idx = state.curves.findIndex(c => c.id === state.selectedCurveId); + if (idx >= 0) { + state.curves.splice(idx, 1); + state.selectedCurveId = null; + state.selectedControlPointIdx = null; + + saveHistoryState('Delete curve'); + updateCurveUI(); + render(); + } +} + +function onCurveSelected(e) { + const curveId = parseInt(e.target.value); + state.selectedCurveId = curveId >= 0 ? curveId : null; + state.selectedControlPointIdx = null; + + updateCurveUI(); + render(); +} + +function updateCurveUI() { + // Update curve list (toolbar) + const curveList = document.getElementById('curveList'); + curveList.innerHTML = ''; + + state.curves.forEach(curve => { + const div = document.createElement('div'); + div.className = 'curve-item'; + if (curve.id === state.selectedCurveId) { + div.classList.add('selected'); + } + div.textContent = `Curve ${curve.id} (${curve.controlPoints.length} points)`; + div.addEventListener('click', () => { + state.selectedCurveId = curve.id; + state.selectedControlPointIdx = null; + updateCurveUI(); + render(); + }); + curveList.appendChild(div); + }); + + // Update curve select dropdown + const curveSelect = document.getElementById('curveSelect'); + curveSelect.innerHTML = ''; + + if (state.curves.length === 0) { + const opt = document.createElement('option'); + opt.value = -1; + opt.textContent = 'No curves'; + curveSelect.appendChild(opt); + } else { + state.curves.forEach(curve => { + const opt = document.createElement('option'); + opt.value = curve.id; + opt.textContent = `Curve ${curve.id}`; + opt.selected = curve.id === state.selectedCurveId; + curveSelect.appendChild(opt); + }); + } + + // Update delete button state + document.getElementById('deleteCurveBtn').disabled = state.selectedCurveId === null; + + // Update profile controls + if (state.selectedCurveId !== null) { + const curve = state.curves.find(c => c.id === state.selectedCurveId); + if (curve) { + document.getElementById('profileType').value = curve.profile.type; + document.getElementById('sigmaSlider').value = curve.profile.param1; + document.getElementById('sigmaValue').value = curve.profile.param1; + } + } +} + +// ============================================================================ +// Profile Controls +// ============================================================================ + +function onProfileChanged(e) { + if (state.selectedCurveId === null) return; + + const curve = state.curves.find(c => c.id === state.selectedCurveId); + if (!curve) return; + + curve.profile.type = e.target.value; + + // Update label based on profile type + const label = document.getElementById('sigmaLabel'); + if (curve.profile.type === 'gaussian') { + label.textContent = 'Sigma:'; + } else if (curve.profile.type === 'decaying_sinusoid') { + label.textContent = 'Decay:'; + } else if (curve.profile.type === 'noise') { + label.textContent = 'Amplitude:'; + } + + saveHistoryState('Change profile'); + render(); +} + +function onSigmaChanged(e) { + if (state.selectedCurveId === null) return; + + const curve = state.curves.find(c => c.id === state.selectedCurveId); + if (!curve) return; + + curve.profile.param1 = parseFloat(e.target.value); + document.getElementById('sigmaValue').value = curve.profile.param1; + + render(); +} + +function onSigmaValueChanged(e) { + if (state.selectedCurveId === null) return; + + const curve = state.curves.find(c => c.id === state.selectedCurveId); + if (!curve) return; + + curve.profile.param1 = parseFloat(e.target.value); + document.getElementById('sigmaSlider').value = curve.profile.param1; + + render(); +} + +// ============================================================================ +// Canvas Interaction +// ============================================================================ + +let isDragging = false; +let dragStartX = 0; +let dragStartY = 0; + +function onCanvasMouseDown(e) { + const rect = e.target.getBoundingClientRect(); + const x = e.clientX - rect.left; + const y = e.clientY - rect.top; + + // Check if clicking on existing control point + const clickedPoint = findControlPointAt(x, y); + + if (clickedPoint) { + // Start dragging existing point + state.selectedCurveId = clickedPoint.curveId; + state.selectedControlPointIdx = clickedPoint.pointIdx; + isDragging = true; + dragStartX = x; + dragStartY = y; + updateCurveUI(); + render(); + } else if (state.selectedCurveId !== null) { + // Place new control point + const curve = state.curves.find(c => c.id === state.selectedCurveId); + if (curve) { + const point = screenToSpectrogram(x, y); + curve.controlPoints.push(point); + + // Sort by frame + curve.controlPoints.sort((a, b) => a.frame - b.frame); + + saveHistoryState('Add control point'); + updateCurveUI(); + render(); + } + } +} + +function onCanvasMouseMove(e) { + if (!isDragging) return; + if (state.selectedCurveId === null || state.selectedControlPointIdx === null) return; + + const rect = e.target.getBoundingClientRect(); + const x = e.clientX - rect.left; + const y = e.clientY - rect.top; + + const curve = state.curves.find(c => c.id === state.selectedCurveId); + if (!curve) return; + + const point = curve.controlPoints[state.selectedControlPointIdx]; + if (!point) return; + + // Update point position + const newPoint = screenToSpectrogram(x, y); + point.frame = newPoint.frame; + point.freqHz = newPoint.freqHz; + point.amplitude = newPoint.amplitude; + + // Re-sort by frame + curve.controlPoints.sort((a, b) => a.frame - b.frame); + + render(); +} + +function onCanvasMouseUp(e) { + if (isDragging) { + isDragging = false; + saveHistoryState('Move control point'); + } +} + +function onCanvasRightClick(e) { + e.preventDefault(); + + const rect = e.target.getBoundingClientRect(); + const x = e.clientX - rect.left; + const y = e.clientY - rect.top; + + const clickedPoint = findControlPointAt(x, y); + if (clickedPoint) { + const curve = state.curves.find(c => c.id === clickedPoint.curveId); + if (curve) { + curve.controlPoints.splice(clickedPoint.pointIdx, 1); + state.selectedControlPointIdx = null; + + saveHistoryState('Delete control point'); + updateCurveUI(); + render(); + } + } +} + +function findControlPointAt(screenX, screenY) { + const CLICK_RADIUS = 8; // pixels + + for (const curve of state.curves) { + for (let i = 0; i < curve.controlPoints.length; i++) { + const point = curve.controlPoints[i]; + const screenPos = spectrogramToScreen(point.frame, point.freqHz); + + const dx = screenX - screenPos.x; + const dy = screenY - screenPos.y; + const dist = Math.sqrt(dx * dx + dy * dy); + + if (dist <= CLICK_RADIUS) { + return {curveId: curve.id, pointIdx: i}; + } + } + } + + return null; +} + +function deleteSelectedControlPoint() { + if (state.selectedCurveId === null || state.selectedControlPointIdx === null) return; + + const curve = state.curves.find(c => c.id === state.selectedCurveId); + if (curve && state.selectedControlPointIdx < curve.controlPoints.length) { + curve.controlPoints.splice(state.selectedControlPointIdx, 1); + state.selectedControlPointIdx = null; + + saveHistoryState('Delete control point'); + updateCurveUI(); + render(); + } +} + +function deselectAll() { + state.selectedCurveId = null; + state.selectedControlPointIdx = null; + updateCurveUI(); + render(); +} + +// ============================================================================ +// Coordinate Conversion +// ============================================================================ + +function screenToSpectrogram(screenX, screenY) { + const frame = Math.round(screenX / state.pixelsPerFrame); + const bin = Math.round((state.canvasHeight - screenY) / state.pixelsPerBin); + const freqHz = (bin / state.referenceDctSize) * (SAMPLE_RATE / 2); + + // Amplitude from Y position (normalized 0-1, top = 1.0, bottom = 0.0) + const amplitude = 1.0 - (screenY / state.canvasHeight); + + return { + frame: Math.max(0, frame), + freqHz: Math.max(0, freqHz), + amplitude: Math.max(0, Math.min(1, amplitude)) + }; +} + +function spectrogramToScreen(frame, freqHz) { + const bin = (freqHz / (SAMPLE_RATE / 2)) * state.referenceDctSize; + const x = frame * state.pixelsPerFrame; + const y = state.canvasHeight - (bin * state.pixelsPerBin); + return {x, y}; +} + +// ============================================================================ +// Rendering (continued in next message due to length) +// ============================================================================ + +// ============================================================================ +// Rendering +// ============================================================================ + +function render() { + const canvas = document.getElementById('spectrogramCanvas'); + const ctx = canvas.getContext('2d'); + + // Clear canvas + ctx.fillStyle = '#1e1e1e'; + ctx.fillRect(0, 0, canvas.width, canvas.height); + + // Draw reference spectrogram (background) + if (state.referenceSpectrogram) { + drawReferenceSpectrogram(ctx); + } + + // Draw procedural spectrogram (foreground) + if (state.curves.length > 0) { + drawProceduralSpectrogram(ctx); + } + + // Draw control points + drawControlPoints(ctx); +} + +function drawReferenceSpectrogram(ctx) { + // Draw semi-transparent reference + ctx.globalAlpha = 0.3; + + const imgData = ctx.createImageData(state.canvasWidth, state.canvasHeight); + + for (let frameIdx = 0; frameIdx < state.referenceNumFrames; frameIdx++) { + const x = Math.floor(frameIdx * state.pixelsPerFrame); + if (x >= state.canvasWidth) break; + + for (let bin = 0; bin < state.referenceDctSize; bin++) { + const y = state.canvasHeight - Math.floor(bin * state.pixelsPerBin); + if (y < 0 || y >= state.canvasHeight) continue; + + const specValue = state.referenceSpectrogram[frameIdx * state.referenceDctSize + bin]; + const intensity = Math.min(255, Math.abs(specValue) * 50); // Scale for visibility + + const pixelIdx = (y * state.canvasWidth + x) * 4; + imgData.data[pixelIdx + 0] = intensity; // R + imgData.data[pixelIdx + 1] = intensity; // G + imgData.data[pixelIdx + 2] = intensity; // B + imgData.data[pixelIdx + 3] = 255; // A + } + } + + ctx.putImageData(imgData, 0, 0); + ctx.globalAlpha = 1.0; +} + +function drawProceduralSpectrogram(ctx) { + // Generate procedural spectrogram + const numFrames = state.referenceNumFrames || 100; + const procedural = generateProceduralSpectrogram(numFrames); + + // Draw as colored overlay + ctx.globalAlpha = 0.7; + + const imgData = ctx.createImageData(state.canvasWidth, state.canvasHeight); + + for (let frameIdx = 0; frameIdx < numFrames; frameIdx++) { + const x = Math.floor(frameIdx * state.pixelsPerFrame); + if (x >= state.canvasWidth) break; + + for (let bin = 0; bin < state.referenceDctSize; bin++) { + const y = state.canvasHeight - Math.floor(bin * state.pixelsPerBin); + if (y < 0 || y >= state.canvasHeight) continue; + + const specValue = procedural[frameIdx * state.referenceDctSize + bin]; + const intensity = Math.min(255, Math.abs(specValue) * 50); + + const pixelIdx = (y * state.canvasWidth + x) * 4; + imgData.data[pixelIdx + 0] = 100; // R (blue-ish) + imgData.data[pixelIdx + 1] = 150; // G + imgData.data[pixelIdx + 2] = intensity; // B + imgData.data[pixelIdx + 3] = 255; // A + } + } + + ctx.putImageData(imgData, 0, 0); + ctx.globalAlpha = 1.0; +} + +function drawControlPoints(ctx) { + state.curves.forEach(curve => { + const isSelected = curve.id === state.selectedCurveId; + + // Draw Bezier curve path + if (curve.controlPoints.length >= 2) { + ctx.strokeStyle = isSelected ? '#0e639c' : '#666666'; + ctx.lineWidth = 2; + ctx.beginPath(); + + for (let i = 0; i < curve.controlPoints.length; i++) { + const point = curve.controlPoints[i]; + const screenPos = spectrogramToScreen(point.frame, point.freqHz); + + if (i === 0) { + ctx.moveTo(screenPos.x, screenPos.y); + } else { + ctx.lineTo(screenPos.x, screenPos.y); + } + } + + ctx.stroke(); + } + + // Draw control points + curve.controlPoints.forEach((point, idx) => { + const screenPos = spectrogramToScreen(point.frame, point.freqHz); + const isPointSelected = isSelected && idx === state.selectedControlPointIdx; + + ctx.fillStyle = isPointSelected ? '#ffaa00' : (isSelected ? '#0e639c' : '#888888'); + ctx.beginPath(); + ctx.arc(screenPos.x, screenPos.y, 6, 0, 2 * Math.PI); + ctx.fill(); + + ctx.strokeStyle = '#ffffff'; + ctx.lineWidth = 2; + ctx.stroke(); + + // Draw label + if (isSelected) { + ctx.fillStyle = '#ffffff'; + ctx.font = '11px monospace'; + ctx.fillText(`${Math.round(point.freqHz)}Hz`, screenPos.x + 10, screenPos.y - 5); + } + }); + }); +} + +// ============================================================================ +// Procedural Spectrogram Generation +// ============================================================================ + +function generateProceduralSpectrogram(numFrames) { + const spectrogram = new Float32Array(state.referenceDctSize * numFrames); + + // For each curve, draw its contribution + state.curves.forEach(curve => { + drawCurveToSpectrogram(curve, spectrogram, state.referenceDctSize, numFrames); + }); + + return spectrogram; +} + +function drawCurveToSpectrogram(curve, spectrogram, dctSize, numFrames) { + if (curve.controlPoints.length === 0) return; + + for (let frame = 0; frame < numFrames; frame++) { + // Evaluate Bezier curve at this frame + const freqHz = evaluateBezierLinear(curve.controlPoints, frame, 'freqHz'); + const amplitude = evaluateBezierLinear(curve.controlPoints, frame, 'amplitude'); + + // Convert freq to bin + const freqBin0 = (freqHz / (SAMPLE_RATE / 2)) * dctSize; + + // Apply vertical profile + for (let bin = 0; bin < dctSize; bin++) { + const dist = Math.abs(bin - freqBin0); + const profileValue = evaluateProfile(curve.profile, dist); + + const idx = frame * dctSize + bin; + spectrogram[idx] += amplitude * profileValue; + } + } +} + +function evaluateBezierLinear(controlPoints, frame, property) { + if (controlPoints.length === 0) return 0; + if (controlPoints.length === 1) return controlPoints[0][property]; + + const frames = controlPoints.map(p => p.frame); + const values = controlPoints.map(p => p[property]); + + // Clamp to range + if (frame <= frames[0]) return values[0]; + if (frame >= frames[frames.length - 1]) return values[values.length - 1]; + + // Find segment + for (let i = 0; i < frames.length - 1; i++) { + if (frame >= frames[i] && frame <= frames[i + 1]) { + const t = (frame - frames[i]) / (frames[i + 1] - frames[i]); + return values[i] * (1 - t) + values[i + 1] * t; + } + } + + return values[values.length - 1]; +} + +function evaluateProfile(profile, distance) { + switch (profile.type) { + case 'gaussian': { + const sigma = profile.param1; + return Math.exp(-(distance * distance) / (sigma * sigma)); + } + + case 'decaying_sinusoid': { + const decay = profile.param1; + const omega = profile.param2 || 0.5; + return Math.exp(-decay * distance) * Math.cos(omega * distance); + } + + case 'noise': { + const amplitude = profile.param1; + const seed = profile.param2 || 42; + // Simple deterministic noise + const hash = (seed + Math.floor(distance * 1000)) % 10000; + return amplitude * (hash / 10000); + } + + default: + return 0; + } +} + +// ============================================================================ +// Audio Playback +// ============================================================================ + +function playAudio(source) { + if (!state.audioContext) { + alert('Audio context not available'); + return; + } + + stopAudio(); + + let spectrogram; + let numFrames; + + if (source === 'original') { + if (!state.referenceSpectrogram) { + alert('No reference audio loaded'); + return; + } + spectrogram = state.referenceSpectrogram; + numFrames = state.referenceNumFrames; + } else { // procedural + if (state.curves.length === 0) { + alert('No curves defined. Add a curve first.'); + return; + } + numFrames = state.referenceNumFrames || 100; + spectrogram = generateProceduralSpectrogram(numFrames); + } + + // Convert spectrogram to audio via IDCT + const audioData = spectrogramToAudio(spectrogram, state.referenceDctSize, numFrames); + + // Create audio buffer + const audioBuffer = state.audioContext.createBuffer(1, audioData.length, SAMPLE_RATE); + audioBuffer.getChannelData(0).set(audioData); + + // Play + const source = state.audioContext.createBufferSource(); + source.buffer = audioBuffer; + source.connect(state.audioContext.destination); + source.start(); + + state.currentSource = source; + state.isPlaying = true; + + source.onended = () => { + state.isPlaying = false; + state.currentSource = null; + }; + + console.log('Playing audio:', audioData.length, 'samples'); +} + +function stopAudio() { + if (state.currentSource) { + state.currentSource.stop(); + state.currentSource = null; + } + state.isPlaying = false; +} + +function spectrogramToAudio(spectrogram, dctSize, numFrames) { + const hopSize = dctSize / 2; + const audioLength = numFrames * hopSize + dctSize; + const audioData = new Float32Array(audioLength); + const window = hanningWindowArray; + + for (let frameIdx = 0; frameIdx < numFrames; frameIdx++) { + // Extract frame + const frame = new Float32Array(dctSize); + for (let b = 0; b < dctSize; b++) { + frame[b] = spectrogram[frameIdx * dctSize + b]; + } + + // IDCT + const timeFrame = javascript_idct_512(frame); + + // Apply window and overlap-add + const frameStart = frameIdx * hopSize; + for (let i = 0; i < dctSize; i++) { + if (frameStart + i < audioLength) { + audioData[frameStart + i] += timeFrame[i] * window[i]; + } + } + } + + return audioData; +} + +// ============================================================================ +// Undo/Redo +// ============================================================================ + +function saveHistoryState(action) { + // Remove any states after current index + state.history = state.history.slice(0, state.historyIndex + 1); + + // Save current state + const snapshot = { + action, + curves: JSON.parse(JSON.stringify(state.curves)), + selectedCurveId: state.selectedCurveId + }; + + state.history.push(snapshot); + + // Limit history size + if (state.history.length > state.maxHistorySize) { + state.history.shift(); + } else { + state.historyIndex++; + } + + updateUndoRedoButtons(); +} + +function undo() { + if (state.historyIndex <= 0) return; + + state.historyIndex--; + const snapshot = state.history[state.historyIndex]; + + state.curves = JSON.parse(JSON.stringify(snapshot.curves)); + state.selectedCurveId = snapshot.selectedCurveId; + state.selectedControlPointIdx = null; + + updateCurveUI(); + updateUndoRedoButtons(); + render(); + + console.log('Undo:', snapshot.action); +} + +function redo() { + if (state.historyIndex >= state.history.length - 1) return; + + state.historyIndex++; + const snapshot = state.history[state.historyIndex]; + + state.curves = JSON.parse(JSON.stringify(snapshot.curves)); + state.selectedCurveId = snapshot.selectedCurveId; + state.selectedControlPointIdx = null; + + updateCurveUI(); + updateUndoRedoButtons(); + render(); + + console.log('Redo:', snapshot.action); +} + +function updateUndoRedoButtons() { + document.getElementById('undoBtn').disabled = state.historyIndex <= 0; + document.getElementById('redoBtn').disabled = state.historyIndex >= state.history.length - 1; +} + +// ============================================================================ +// File Export +// ============================================================================ + +function saveProceduralParams() { + if (state.curves.length === 0) { + alert('No curves to save. Add at least one curve first.'); + return; + } + + const text = generateProceduralParamsText(); + downloadTextFile('procedural_params.txt', text); +} + +function generateProceduralParamsText() { + let text = '# Spectral Brush Procedural Parameters\n'; + text += `METADATA dct_size=${state.referenceDctSize} num_frames=${state.referenceNumFrames || 100} sample_rate=${SAMPLE_RATE}\n\n`; + + state.curves.forEach((curve, idx) => { + text += `CURVE bezier\n`; + + curve.controlPoints.forEach(point => { + text += ` CONTROL_POINT ${point.frame} ${point.freqHz.toFixed(1)} ${point.amplitude.toFixed(3)}\n`; + }); + + text += ` PROFILE ${curve.profile.type}`; + if (curve.profile.type === 'gaussian') { + text += ` sigma=${curve.profile.param1.toFixed(1)}`; + } else if (curve.profile.type === 'decaying_sinusoid') { + text += ` decay=${curve.profile.param1.toFixed(2)} frequency=${curve.profile.param2.toFixed(2)}`; + } else if (curve.profile.type === 'noise') { + text += ` amplitude=${curve.profile.param1.toFixed(2)} seed=${curve.profile.param2.toFixed(0)}`; + } + text += '\n'; + + text += 'END_CURVE\n\n'; + }); + + return text; +} + +function generateCppCode() { + if (state.curves.length === 0) { + alert('No curves to export. Add at least one curve first.'); + return; + } + + const code = generateCppCodeText(); + downloadTextFile('gen_procedural.cc', code); +} + +function generateCppCodeText() { + let code = '// Generated by Spectral Brush Editor\n'; + code += '// This code reproduces the procedural audio procedurally at runtime\n\n'; + code += '#include "audio/spectral_brush.h"\n\n'; + + code += 'void gen_procedural(float* spec, int dct_size, int num_frames) {\n'; + + state.curves.forEach((curve, curveIdx) => { + code += ` // Curve ${curveIdx}\n`; + code += ' {\n'; + + // Control points arrays + const numPoints = curve.controlPoints.length; + code += ` const float frames[] = {`; + code += curve.controlPoints.map(p => `${p.frame}.0f`).join(', '); + code += '};\n'; + + code += ` const float freqs[] = {`; + code += curve.controlPoints.map(p => `${p.freqHz.toFixed(1)}f`).join(', '); + code += '};\n'; + + code += ` const float amps[] = {`; + code += curve.controlPoints.map(p => `${p.amplitude.toFixed(3)}f`).join(', '); + code += '};\n\n'; + + // Profile type + let profileEnum; + if (curve.profile.type === 'gaussian') { + profileEnum = 'PROFILE_GAUSSIAN'; + } else if (curve.profile.type === 'decaying_sinusoid') { + profileEnum = 'PROFILE_DECAYING_SINUSOID'; + } else if (curve.profile.type === 'noise') { + profileEnum = 'PROFILE_NOISE'; + } + + // Function call + if (curveIdx === 0) { + code += ` draw_bezier_curve(spec, dct_size, num_frames,\n`; + } else { + code += ` draw_bezier_curve_add(spec, dct_size, num_frames,\n`; + } + code += ` frames, freqs, amps, ${numPoints},\n`; + code += ` ${profileEnum}, ${curve.profile.param1.toFixed(2)}f`; + + if (curve.profile.type === 'decaying_sinusoid' || curve.profile.type === 'noise') { + code += `, ${curve.profile.param2.toFixed(2)}f`; + } + + code += ');\n'; + code += ' }\n\n'; + }); + + code += '}\n\n'; + code += '// Usage in demo_assets.txt:\n'; + code += '// SOUND_PROC, PROC(gen_procedural), NONE, "Procedural sound"\n'; + + return code; +} + +function downloadTextFile(filename, text) { + const blob = new Blob([text], {type: 'text/plain'}); + const url = URL.createObjectURL(blob); + + const a = document.createElement('a'); + a.href = url; + a.download = filename; + document.body.appendChild(a); + a.click(); + document.body.removeChild(a); + + URL.revokeObjectURL(url); + + console.log('Downloaded:', filename); +} + +// ============================================================================ +// Help Modal +// ============================================================================ + +function showHelp() { + document.getElementById('helpModal').style.display = 'flex'; +} + +function hideHelp() { + document.getElementById('helpModal').style.display = 'none'; +} |
