summaryrefslogtreecommitdiff
path: root/tools
diff options
context:
space:
mode:
Diffstat (limited to 'tools')
-rw-r--r--tools/asset_packer.cc185
-rw-r--r--tools/editor/dct.js167
-rw-r--r--tools/editor/script.js4
-rw-r--r--tools/spectral_editor/FEATURES.md151
-rw-r--r--tools/spectral_editor/README.md268
-rw-r--r--tools/spectral_editor/dct.js217
-rw-r--r--tools/spectral_editor/index.html180
-rw-r--r--tools/spectral_editor/script.js1774
-rw-r--r--tools/spectral_editor/style.css508
9 files changed, 3438 insertions, 16 deletions
diff --git a/tools/asset_packer.cc b/tools/asset_packer.cc
index 04b74a4..79a6ce6 100644
--- a/tools/asset_packer.cc
+++ b/tools/asset_packer.cc
@@ -5,10 +5,13 @@
#include <cstdio> // for simplicity, use fprintf() for output generation
#include <fstream>
#include <map>
+#include <algorithm> // For std::count
+#include <cstring> // For std::memcpy
#include <regex> // For std::regex
#include <stdexcept> // For std::stof exceptions
#include <string>
#include <vector>
+#include <cmath>
#define STB_IMAGE_IMPLEMENTATION
#define STBI_NO_LINEAR // Don't apply gamma correction, we want raw bytes
@@ -39,6 +42,11 @@ static bool HasImageExtension(const std::string& filename) {
return false;
}
+static bool HasMeshExtension(const std::string& filename) {
+ std::string ext = filename.substr(filename.find_last_of(".") + 1);
+ return ext == "obj";
+}
+
// Helper struct to hold all information about an asset during parsing
struct AssetBuildInfo {
std::string name;
@@ -53,6 +61,22 @@ struct AssetBuildInfo {
std::string func_name_str_name; // ASSET_PROC_FUNC_STR_xxx for procedural
};
+struct Vec3 {
+ float x, y, z;
+ Vec3 operator+(const Vec3& o) const { return {x + o.x, y + o.y, z + o.z}; }
+ Vec3 operator+=(const Vec3& o) { x += o.x; y += o.y; z += o.z; return *this; }
+ Vec3 operator-(const Vec3& o) const { return {x - o.x, y - o.y, z - o.z}; }
+ Vec3 operator*(float s) const { return {x * s, y * s, z * s}; }
+ static Vec3 cross(const Vec3& a, const Vec3& b) {
+ return {a.y * b.z - a.z * b.y, a.z * b.x - a.x * b.z, a.x * b.y - a.y * b.x};
+ }
+ Vec3 normalize() const {
+ float len = std::sqrt(x * x + y * y + z * z);
+ if (len > 1e-6f) return {x / len, y / len, z / len};
+ return {0, 0, 0};
+ }
+};
+
int main(int argc, char* argv[]) {
if (argc != 4) {
fprintf(stderr,
@@ -230,6 +254,7 @@ int main(int argc, char* argv[]) {
std::vector<uint8_t> buffer;
bool is_image = HasImageExtension(info.filename);
+ bool is_mesh = HasMeshExtension(info.filename);
if (is_image) {
int w, h, channels;
@@ -251,6 +276,166 @@ int main(int argc, char* argv[]) {
stbi_image_free(img_data);
printf("Processed image asset %s: %dx%d RGBA\n", info.name.c_str(), w,
h);
+ } else if (is_mesh) {
+ std::ifstream obj_file(full_path);
+ if (!obj_file.is_open()) {
+ fprintf(stderr, "Error: Could not open mesh file: %s\n",
+ full_path.c_str());
+ return 1;
+ }
+
+ std::vector<float> v_pos;
+ std::vector<float> v_norm;
+ std::vector<float> v_uv;
+
+ struct RawFace {
+ int v[3];
+ int vt[3];
+ int vn[3];
+ };
+ std::vector<RawFace> raw_faces;
+
+ struct Vertex {
+ float p[3], n[3], u[2];
+ };
+ std::vector<Vertex> final_vertices;
+ std::vector<uint32_t> final_indices;
+ std::map<std::string, uint32_t> vertex_map;
+
+ std::string obj_line;
+ while (std::getline(obj_file, obj_line)) {
+ if (obj_line.compare(0, 2, "v ") == 0) {
+ float x, y, z;
+ std::sscanf(obj_line.c_str(), "v %f %f %f", &x, &y, &z);
+ v_pos.push_back(x);
+ v_pos.push_back(y);
+ v_pos.push_back(z);
+ } else if (obj_line.compare(0, 3, "vn ") == 0) {
+ float x, y, z;
+ std::sscanf(obj_line.c_str(), "vn %f %f %f", &x, &y, &z);
+ v_norm.push_back(x);
+ v_norm.push_back(y);
+ v_norm.push_back(z);
+ } else if (obj_line.compare(0, 3, "vt ") == 0) {
+ float u, v;
+ std::sscanf(obj_line.c_str(), "vt %f %f", &u, &v);
+ v_uv.push_back(u);
+ v_uv.push_back(v);
+ } else if (obj_line.compare(0, 2, "f ") == 0) {
+ char s1[64], s2[64], s3[64];
+ if (std::sscanf(obj_line.c_str(), "f %s %s %s", s1, s2, s3) == 3) {
+ std::string parts[3] = {s1, s2, s3};
+ RawFace face = {};
+ for (int i = 0; i < 3; ++i) {
+ int v_idx = 0, vt_idx = 0, vn_idx = 0;
+ if (parts[i].find("//") != std::string::npos) {
+ std::sscanf(parts[i].c_str(), "%d//%d", &v_idx, &vn_idx);
+ } else if (std::count(parts[i].begin(), parts[i].end(), '/') ==
+ 2) {
+ std::sscanf(parts[i].c_str(), "%d/%d/%d", &v_idx, &vt_idx,
+ &vn_idx);
+ } else if (std::count(parts[i].begin(), parts[i].end(), '/') ==
+ 1) {
+ std::sscanf(parts[i].c_str(), "%d/%d", &v_idx, &vt_idx);
+ } else {
+ std::sscanf(parts[i].c_str(), "%d", &v_idx);
+ }
+ face.v[i] = v_idx;
+ face.vt[i] = vt_idx;
+ face.vn[i] = vn_idx;
+ }
+ raw_faces.push_back(face);
+ }
+ }
+ }
+
+ // Generate normals if missing
+ if (v_norm.empty() && !v_pos.empty()) {
+ printf("Generating normals for %s...\n", info.name.c_str());
+ std::vector<Vec3> temp_normals(v_pos.size() / 3, {0, 0, 0});
+ for (auto& face : raw_faces) {
+ // Indices are 1-based in OBJ
+ int idx0 = face.v[0] - 1;
+ int idx1 = face.v[1] - 1;
+ int idx2 = face.v[2] - 1;
+
+ if (idx0 >= 0 && idx1 >= 0 && idx2 >= 0) {
+ Vec3 p0 = {v_pos[idx0 * 3], v_pos[idx0 * 3 + 1], v_pos[idx0 * 3 + 2]};
+ Vec3 p1 = {v_pos[idx1 * 3], v_pos[idx1 * 3 + 1], v_pos[idx1 * 3 + 2]};
+ Vec3 p2 = {v_pos[idx2 * 3], v_pos[idx2 * 3 + 1], v_pos[idx2 * 3 + 2]};
+
+ Vec3 normal = Vec3::cross(p1 - p0, p2 - p0).normalize();
+ temp_normals[idx0] += normal;
+ temp_normals[idx1] += normal;
+ temp_normals[idx2] += normal;
+ }
+ }
+
+ for (const auto& n : temp_normals) {
+ Vec3 normalized = n.normalize();
+ v_norm.push_back(normalized.x);
+ v_norm.push_back(normalized.y);
+ v_norm.push_back(normalized.z);
+ }
+
+ // Assign generated normals to faces
+ for (auto& face : raw_faces) {
+ face.vn[0] = face.v[0];
+ face.vn[1] = face.v[1];
+ face.vn[2] = face.v[2];
+ }
+ }
+
+ // Build final vertices
+ for (const auto& face : raw_faces) {
+ for (int i = 0; i < 3; ++i) {
+ // Reconstruct key string for uniqueness check
+ char key_buf[128];
+ std::sprintf(key_buf, "%d/%d/%d", face.v[i], face.vt[i], face.vn[i]);
+ std::string key = key_buf;
+
+ if (vertex_map.find(key) == vertex_map.end()) {
+ vertex_map[key] = (uint32_t)final_vertices.size();
+
+ Vertex v = {};
+ if (face.v[i] > 0) {
+ v.p[0] = v_pos[(face.v[i] - 1) * 3];
+ v.p[1] = v_pos[(face.v[i] - 1) * 3 + 1];
+ v.p[2] = v_pos[(face.v[i] - 1) * 3 + 2];
+ }
+ if (face.vn[i] > 0) {
+ v.n[0] = v_norm[(face.vn[i] - 1) * 3];
+ v.n[1] = v_norm[(face.vn[i] - 1) * 3 + 1];
+ v.n[2] = v_norm[(face.vn[i] - 1) * 3 + 2];
+ }
+ if (face.vt[i] > 0) {
+ v.u[0] = v_uv[(face.vt[i] - 1) * 2];
+ v.u[1] = v_uv[(face.vt[i] - 1) * 2 + 1];
+ }
+ final_vertices.push_back(v);
+ }
+ final_indices.push_back(vertex_map[key]);
+ }
+ }
+
+ // Format: [num_vertices(u32)][Vertex * num_vertices][num_indices(u32)][uint32_t
+ // * num_indices]
+ buffer.resize(sizeof(uint32_t) + final_vertices.size() * sizeof(Vertex) +
+ sizeof(uint32_t) +
+ final_indices.size() * sizeof(uint32_t));
+ uint8_t* out_ptr = buffer.data();
+ *reinterpret_cast<uint32_t*>(out_ptr) = (uint32_t)final_vertices.size();
+ out_ptr += sizeof(uint32_t);
+ std::memcpy(out_ptr, final_vertices.data(),
+ final_vertices.size() * sizeof(Vertex));
+ out_ptr += final_vertices.size() * sizeof(Vertex);
+ *reinterpret_cast<uint32_t*>(out_ptr) = (uint32_t)final_indices.size();
+ out_ptr += sizeof(uint32_t);
+ std::memcpy(out_ptr, final_indices.data(),
+ final_indices.size() * sizeof(uint32_t));
+
+ printf("Processed mesh asset %s: %zu vertices, %zu indices\n",
+ info.name.c_str(), final_vertices.size(), final_indices.size());
} else {
std::ifstream asset_file(full_path, std::ios::binary);
if (!asset_file.is_open()) {
diff --git a/tools/editor/dct.js b/tools/editor/dct.js
index e48ce2b..c081473 100644
--- a/tools/editor/dct.js
+++ b/tools/editor/dct.js
@@ -1,21 +1,6 @@
const dctSize = 512; // Default DCT size, read from header
// --- Utility Functions for Audio Processing ---
-// JavaScript equivalent of C++ idct_512
-function javascript_idct_512(input) {
- const output = new Float32Array(dctSize);
- const PI = Math.PI;
- const N = dctSize;
-
- for (let n = 0; n < N; ++n) {
- let sum = input[0] / 2.0;
- for (let k = 1; k < N; ++k) {
- sum += input[k] * Math.cos((PI / N) * k * (n + 0.5));
- }
- output[n] = sum * (2.0 / N);
- }
- return output;
-}
// Hanning window for smooth audio transitions (JavaScript equivalent)
function hanningWindow(size) {
@@ -29,3 +14,155 @@ function hanningWindow(size) {
const hanningWindowArray = hanningWindow(dctSize); // Pre-calculate window
+// ============================================================================
+// FFT-based DCT/IDCT Implementation
+// ============================================================================
+
+// Bit-reversal permutation (in-place)
+function bitReversePermute(real, imag, N) {
+ let temp_bits = N;
+ let num_bits = 0;
+ while (temp_bits > 1) {
+ temp_bits >>= 1;
+ num_bits++;
+ }
+
+ for (let i = 0; i < N; i++) {
+ let j = 0;
+ let temp = i;
+ for (let b = 0; b < num_bits; b++) {
+ j = (j << 1) | (temp & 1);
+ temp >>= 1;
+ }
+
+ if (j > i) {
+ const tmp_real = real[i];
+ const tmp_imag = imag[i];
+ real[i] = real[j];
+ imag[i] = imag[j];
+ real[j] = tmp_real;
+ imag[j] = tmp_imag;
+ }
+ }
+}
+
+// In-place radix-2 FFT
+function fftRadix2(real, imag, N, direction) {
+ const PI = Math.PI;
+
+ for (let stage_size = 2; stage_size <= N; stage_size *= 2) {
+ const half_stage = stage_size / 2;
+ const angle = direction * 2.0 * PI / stage_size;
+
+ let wr = 1.0;
+ let wi = 0.0;
+ const wr_delta = Math.cos(angle);
+ const wi_delta = Math.sin(angle);
+
+ for (let k = 0; k < half_stage; k++) {
+ for (let group_start = k; group_start < N; group_start += stage_size) {
+ const i = group_start;
+ const j = group_start + half_stage;
+
+ const temp_real = real[j] * wr - imag[j] * wi;
+ const temp_imag = real[j] * wi + imag[j] * wr;
+
+ real[j] = real[i] - temp_real;
+ imag[j] = imag[i] - temp_imag;
+ real[i] = real[i] + temp_real;
+ imag[i] = imag[i] + temp_imag;
+ }
+
+ const wr_old = wr;
+ wr = wr_old * wr_delta - wi * wi_delta;
+ wi = wr_old * wi_delta + wi * wr_delta;
+ }
+ }
+}
+
+function fftForward(real, imag, N) {
+ bitReversePermute(real, imag, N);
+ fftRadix2(real, imag, N, +1);
+}
+
+function fftInverse(real, imag, N) {
+ bitReversePermute(real, imag, N);
+ fftRadix2(real, imag, N, -1);
+
+ const scale = 1.0 / N;
+ for (let i = 0; i < N; i++) {
+ real[i] *= scale;
+ imag[i] *= scale;
+ }
+}
+
+// DCT-II via FFT using reordering method
+function javascript_dct_fft(input, N) {
+ const PI = Math.PI;
+
+ const real = new Float32Array(N);
+ const imag = new Float32Array(N);
+
+ for (let i = 0; i < N / 2; i++) {
+ real[i] = input[2 * i];
+ real[N - 1 - i] = input[2 * i + 1];
+ }
+
+ fftForward(real, imag, N);
+
+ const output = new Float32Array(N);
+ for (let k = 0; k < N; k++) {
+ const angle = -PI * k / (2.0 * N);
+ const wr = Math.cos(angle);
+ const wi = Math.sin(angle);
+
+ const dct_value = real[k] * wr - imag[k] * wi;
+
+ if (k === 0) {
+ output[k] = dct_value * Math.sqrt(1.0 / N);
+ } else {
+ output[k] = dct_value * Math.sqrt(2.0 / N);
+ }
+ }
+
+ return output;
+}
+
+// IDCT (DCT-III) via FFT using reordering method
+function javascript_idct_fft(input, N) {
+ const PI = Math.PI;
+
+ const real = new Float32Array(N);
+ const imag = new Float32Array(N);
+
+ for (let k = 0; k < N; k++) {
+ const angle = PI * k / (2.0 * N);
+ const wr = Math.cos(angle);
+ const wi = Math.sin(angle);
+
+ let scaled;
+ if (k === 0) {
+ scaled = input[k] / Math.sqrt(1.0 / N);
+ } else {
+ scaled = input[k] / Math.sqrt(2.0 / N) * 2.0;
+ }
+
+ real[k] = scaled * wr;
+ imag[k] = scaled * wi;
+ }
+
+ fftInverse(real, imag, N);
+
+ const output = new Float32Array(N);
+ for (let i = 0; i < N / 2; i++) {
+ output[2 * i] = real[i];
+ output[2 * i + 1] = real[N - 1 - i];
+ }
+
+ return output;
+}
+
+// Fast O(N log N) IDCT using FFT
+function javascript_idct_512(input) {
+ return javascript_idct_fft(input, dctSize);
+}
diff --git a/tools/editor/script.js b/tools/editor/script.js
index abfd4f4..06c9bef 100644
--- a/tools/editor/script.js
+++ b/tools/editor/script.js
@@ -631,9 +631,11 @@ async function playSpectrogramData(specData) {
// Convert spectrogram frames (frequency domain) to audio samples (time domain)
for (let frameIndex = 0; frameIndex < numFrames; frameIndex++) {
const spectralFrame = specData.data.slice(frameIndex * dctSize, (frameIndex + 1) * dctSize);
+
+ // IDCT (no windowing - window is only for analysis, not synthesis)
const timeDomainFrame = javascript_idct_512(spectralFrame);
- // Apply Hanning window for smooth transitions
+ // Apply Hanning window for smooth transitions between frames
for (let i = 0; i < dctSize; i++) {
audioData[frameIndex * dctSize + i] = timeDomainFrame[i] * hanningWindowArray[i];
}
diff --git a/tools/spectral_editor/FEATURES.md b/tools/spectral_editor/FEATURES.md
new file mode 100644
index 0000000..6c36cc2
--- /dev/null
+++ b/tools/spectral_editor/FEATURES.md
@@ -0,0 +1,151 @@
+# Spectral Editor - Feature Roadmap
+
+This document tracks planned enhancements for the spectral editor.
+
+## Priority: High
+
+### A. Curve Translation (Shift+Click+Drag)
+**Description**: Shift+click on a control point + mouse-move should displace the whole curve at a time (translate all control points).
+
+**Implementation Notes**:
+- Detect shift key state during control point click
+- Store initial positions of all control points in the curve
+- Apply uniform translation delta to all points during drag
+- Maintain curve shape while moving
+
+**Complexity**: Medium
+**Estimated Effort**: 2-3 hours
+
+---
+
+### B. Viewport Zoom (Mouse Wheel)
+**Description**: Mouse-wheel should allow zooming in/out on the view for fine placement of curves.
+
+**Implementation Notes**:
+- Implement zoom scale factor (e.g., 0.5x to 4.0x)
+- Center zoom around mouse cursor position
+- Update rendering to use scaled coordinates
+- Add visual zoom indicator (e.g., "Zoom: 2.0x")
+- Consider pan functionality (drag with middle mouse or space+drag)
+
+**Complexity**: High (coordinate transformation, pan interaction)
+**Estimated Effort**: 6-8 hours
+
+---
+
+## Priority: Medium
+
+### C. Enhanced Sinusoid Pattern
+**Description**: The 'sinusoid' pattern is quite interesting and should have more variations.
+
+**Proposed Variations**:
+- **Asymmetric Decay**: Different decay rates above and below the curve center
+ - `decay_top` parameter (controls upper harmonics falloff)
+ - `decay_bottom` parameter (controls lower harmonics falloff)
+- **Temporal Modulation**: Per-frame amplitude/frequency modulation along timeline
+ - `amplitude_envelope` (fade in/out over time)
+ - `frequency_drift` (vibrato/wobble effect)
+ - `phase_offset` (shift pattern over time)
+- **Harmonic Series**: Option to generate harmonic overtones
+ - `num_harmonics` parameter
+ - `harmonic_decay` parameter
+
+**Implementation Notes**:
+- Extend `SinusoidProfile` class with additional parameters
+- Add UI controls for new parameters (sliders, dropdowns)
+- Render preview showing modulation over time
+
+**Complexity**: Medium-High
+**Estimated Effort**: 8-12 hours
+
+---
+
+### D. Per-Control-Point Modulation
+**Description**: Each control point should be assigned individually controllable volume, decay params, etc. for fine modulation along time.
+
+**Proposed Parameters (per control point)**:
+- `volume`: Local amplitude multiplier (0.0 - 2.0)
+- `decay`: Local decay rate override
+- `width`: Gaussian width override (for profile spreading)
+- `phase`: Phase offset for sinusoid patterns
+- `color`: Visual indicator for parameter variations
+
+**Implementation Notes**:
+- Extend control point data structure with parameter fields
+- Add per-point property panel (show on control point selection)
+- Render visual hints (color-coded points, size variations)
+- Interpolate parameters between control points for smooth transitions
+
+**Complexity**: High (UI/UX design, parameter interpolation)
+**Estimated Effort**: 10-15 hours
+
+---
+
+### E. Composable Profiles
+**Description**: Profiles should be composable along a curve (e.g., apply Gaussian curve to sinusoid pattern).
+
+**Proposed Syntax**:
+```cpp
+// Example: Gaussian-modulated sinusoid
+CompositeProfile {
+ base: SinusoidProfile { frequency: 100.0, decay: 0.5 },
+ envelope: GaussianProfile { center: 256, width: 50 }
+}
+```
+
+**Implementation Notes**:
+- Define profile composition operators:
+ - `multiply`: Envelope modulation (amplitude × profile)
+ - `add`: Additive blending (profile1 + profile2)
+ - `max`: Take maximum value at each bin
+- Add UI for profile layering (drag-and-drop profile stack)
+- Render composite preview with layer visualization
+
+**Complexity**: High (requires profile abstraction refactor)
+**Estimated Effort**: 12-16 hours
+
+---
+
+## Priority: Low (Polish)
+
+### F. Improved Parameter Sliders
+**Description**: Adjust slider ranges for better usability (Decay, Width, Frequency, etc.).
+
+**Issues to Address**:
+- Decay slider: Non-linear scaling (logarithmic?) for finer control at low values
+- Frequency slider: Snap to musical notes (optional A440-based grid)
+- Width slider: Preview visualization (show affected frequency range)
+- General: Add numeric input fields next to sliders for precise values
+
+**Implementation Notes**:
+- Implement logarithmic slider interpolation for decay/width
+- Add slider tick marks at useful intervals
+- Display current value and units (Hz, bins, dB, etc.)
+- Add reset-to-default buttons
+
+**Complexity**: Low-Medium
+**Estimated Effort**: 3-4 hours
+
+---
+
+## Future Ideas (Backlog)
+
+- **Undo/Redo System**: Track edit history for curve modifications
+- **Preset Library**: Save/load common curve patterns (kick drum, snare, bass, etc.)
+- **Curve Smoothing**: Apply smoothing filters to jittery control points
+- **Copy/Paste**: Duplicate curves or control point selections
+- **Multi-Selection**: Select and edit multiple control points simultaneously
+- **Grid Snapping**: Snap control points to frequency/time grid
+- **Export Options**: Export to different formats (JSON, binary, C++ code)
+
+---
+
+## Total Estimated Effort
+- **High Priority**: 8-11 hours
+- **Medium Priority**: 30-43 hours
+- **Low Priority**: 3-4 hours
+- **Grand Total**: 41-58 hours (roughly 1-1.5 weeks of focused work)
+
+---
+
+*Last Updated: February 6, 2026*
diff --git a/tools/spectral_editor/README.md b/tools/spectral_editor/README.md
new file mode 100644
index 0000000..6bb3681
--- /dev/null
+++ b/tools/spectral_editor/README.md
@@ -0,0 +1,268 @@
+# Spectral Brush Editor
+
+A web-based tool for creating procedural audio by tracing spectrograms with parametric Bezier curves.
+
+## Purpose
+
+Replace large `.spec` binary assets with tiny procedural C++ code:
+- **Before:** 5 KB binary `.spec` file
+- **After:** ~100 bytes of C++ code calling `draw_bezier_curve()`
+
+**Compression ratio:** 50-100×
+
+## Features
+
+### Core Functionality
+- Load `.wav` or `.spec` files as reference
+- Trace spectrograms with Bezier curves + vertical profiles
+- Real-time audio preview (procedural vs. original)
+- Undo/Redo support (50-action history)
+- Export to `procedural_params.txt` (re-editable)
+- Generate C++ code (copy-paste ready)
+- **Live volume control** during playback
+- **Reference opacity slider** for mixing original/procedural views
+- **Per-curve volume control** for fine-tuning
+
+### Profiles
+- **Gaussian:** Smooth harmonic falloff (fully implemented)
+- **Decaying Sinusoid:** Resonant/metallic texture (implemented)
+- **Noise:** Random texture/grit with decay envelope (implemented)
+
+### Visualization
+- **Log-scale frequency axis** (20 Hz to 16 kHz) for better bass visibility
+- **Logarithmic dB-scale intensity** for proper dynamic range display
+- **Playhead indicator** showing current playback position
+- **Mouse crosshair with tooltip** displaying frame and frequency
+- **Real-time spectrum viewer** (bottom-right):
+ - Shows frequency spectrum for frame under mouse (hover mode)
+ - Shows frequency spectrum for current playback frame (playback mode)
+ - Dual display: Reference (green) and Procedural (red) overlaid
+ - Always visible for instant feedback
+
+## Quick Start
+
+1. **Open the editor:**
+ ```bash
+ open tools/spectral_editor/index.html
+ ```
+ (Or open in your browser via file:// protocol)
+
+2. **Load a reference sound:**
+ - Click "Load .wav/.spec" or press `Ctrl+O`
+ - Select a `.wav` or `.spec` file
+
+3. **Add a curve:**
+ - Click "Add Curve" button
+ - Click on canvas to place control points
+ - Drag control points to adjust frequency and amplitude
+
+4. **Adjust profile:**
+ - Use "Sigma" slider to control width
+ - Higher sigma = wider frequency spread
+
+5. **Preview audio:**
+ - Press `1` to play procedural sound
+ - Press `2` to play original .wav
+ - Press `Space` to stop
+
+6. **Export:**
+ - `Ctrl+S` → Save `procedural_params.txt` (re-editable)
+ - `Ctrl+Shift+S` → Generate C++ code
+
+## Keyboard Shortcuts
+
+| Key | Action |
+|-----|--------|
+| **1** | Play procedural sound |
+| **2** | Play original .wav |
+| **Space** | Stop playback |
+| **Delete** | Delete selected control point |
+| **Esc** | Deselect all |
+| **Ctrl+Z** | Undo |
+| **Ctrl+Shift+Z** | Redo |
+| **Ctrl+S** | Save procedural_params.txt |
+| **Ctrl+Shift+S** | Generate C++ code |
+| **Ctrl+O** | Open file |
+| **?** | Show help |
+
+## Mouse Controls
+
+- **Click** on canvas: Place control point
+- **Drag** control point: Adjust position (frame, frequency, amplitude)
+- **Right-click** control point: Delete
+
+## Workflow
+
+### Example: Create a Kick Drum
+
+1. Load a reference kick drum (e.g., `kick.wav`)
+2. Add a curve
+3. Place control points to trace the low-frequency punch:
+ - Point 1: Frame 0, ~200 Hz, amplitude 0.9
+ - Point 2: Frame 20, ~80 Hz, amplitude 0.7
+ - Point 3: Frame 100, ~50 Hz, amplitude 0.0
+4. Adjust sigma to ~30 (smooth falloff)
+5. Press `1` to preview
+6. Fine-tune control points
+7. Export C++ code
+
+### Generated C++ Code Example
+
+```cpp
+// Generated by Spectral Brush Editor
+#include "audio/spectral_brush.h"
+
+void gen_kick_procedural(float* spec, int dct_size, int num_frames) {
+ // Curve 0
+ {
+ const float frames[] = {0.0f, 20.0f, 100.0f};
+ const float freqs[] = {200.0f, 80.0f, 50.0f};
+ const float amps[] = {0.900f, 0.700f, 0.000f};
+
+ draw_bezier_curve(spec, dct_size, num_frames,
+ frames, freqs, amps, 3,
+ PROFILE_GAUSSIAN, 30.00f);
+ }
+}
+
+// Usage in demo_assets.txt:
+// KICK_PROC, PROC(gen_kick_procedural), NONE, "Procedural kick drum"
+```
+
+## File Formats
+
+### procedural_params.txt (Re-editable)
+
+Human-readable text format that can be loaded back into the editor:
+
+```
+# Spectral Brush Procedural Parameters
+METADATA dct_size=512 num_frames=100 sample_rate=32000
+
+CURVE bezier
+ CONTROL_POINT 0 200.0 0.900
+ CONTROL_POINT 20 80.0 0.700
+ CONTROL_POINT 100 50.0 0.000
+ PROFILE gaussian sigma=30.0
+END_CURVE
+```
+
+### C++ Code (Ready to Compile)
+
+Generated code using the spectral_brush runtime API. Copy-paste into `src/audio/procedural_samples.cc`.
+
+## Technical Details
+
+### Spectral Brush Primitive
+
+A spectral brush consists of:
+
+1. **Central Curve** (Bezier): Traces a path through time-frequency space
+ - `{freq_bin, amplitude} = bezier(frame_number)`
+ - Control points: `(frame, freq_hz, amplitude)`
+
+2. **Vertical Profile**: Shapes the "brush stroke" around the central curve
+ - Gaussian: `exp(-(dist² / σ²))`
+ - Applied vertically at each frame
+
+### Coordinate System
+
+- **X-axis (Time):** Frame number (0 → num_frames)
+- **Y-axis (Frequency):** Frequency in Hz (0 → 16 kHz for 32 kHz sample rate)
+- **Amplitude:** Controlled by Y-position of control points (0.0-1.0)
+
+### Audio Synthesis
+
+1. Generate procedural spectrogram (DCT coefficients)
+2. Apply IDCT to convert to time-domain audio
+3. Use overlap-add with Hanning window
+4. Play via Web Audio API (32 kHz sample rate)
+
+## Development Status
+
+### Phase 1: C++ Runtime ✅ COMPLETE
+- Spectral brush primitive (Bezier + profiles)
+- C++ implementation in `src/audio/spectral_brush.{h,cc}`
+- Integration with asset system
+
+### Phase 2: Web Editor ✅ COMPLETE (Milestone: February 6, 2026)
+- Full-featured web-based editor
+- Real-time audio preview and visualization
+- Log-scale frequency display with dB-scale intensity
+- All three profile types (Gaussian, Decaying Sinusoid, Noise)
+- Live volume control and reference opacity mixing
+- Real-time dual-spectrum viewer (reference + procedural)
+- Export to `procedural_params.txt` and C++ code
+
+### Phase 3: Advanced Features (TODO)
+
+**High Priority:**
+- **Effect Combination System:** How to combine effects (e.g., noise + Gaussian modulation)?
+ - Layer-based compositing (add/multiply/subtract)
+ - Profile modulation (noise modulated by Gaussian envelope)
+ - Multi-pass rendering pipeline
+
+- **Improved C++ Code Testing:**
+ - Verify generated code compiles correctly
+ - Test parameter ranges and edge cases
+ - Add validation warnings in editor
+
+- **Better Frequency Scale:**
+ - Current log-scale is too bass-heavy
+ - Investigate mu-law or similar perceptual scales
+ - Allow user-configurable frequency mapping
+
+- **Pre-defined Shape Library:**
+ - Template curves for common sounds (kick, snare, hi-hat, bass)
+ - One-click insertion with adjustable parameters
+ - Save/load custom shape presets
+
+**Future Enhancements:**
+- Cubic Bezier interpolation (smoother curves)
+- Multi-dimensional Bezier (vary decay, oscillation over time)
+- Frequency snapping (snap to musical notes/scales)
+- Load `procedural_params.txt` back into editor (re-editing)
+- Frame cache optimization for faster rendering
+- FFT-based DCT optimization (O(N log N) vs O(N²))
+
+## Troubleshooting
+
+**Q: Audio doesn't play**
+- Check browser console for errors
+- Ensure audio context initialized (some browsers require user interaction first)
+- Try clicking canvas before pressing `1` or `2`
+
+**Q: Canvas is blank**
+- Make sure you loaded a reference file (`.wav` or `.spec`)
+- Check console for file loading errors
+
+**Q: Exported code doesn't compile**
+- Ensure `spectral_brush.h/cc` is built and linked
+- Verify `draw_bezier_curve()` function is available
+- Check include paths in your build system
+
+**Q: Generated sound doesn't match original**
+- Adjust sigma (profile width)
+- Add more control points for finer detail
+- Use multiple curves for complex sounds
+
+## Integration with Demo
+
+1. Generate C++ code from editor
+2. Copy code into `src/audio/procedural_samples.cc`
+3. Add entry to `assets/final/demo_assets.txt`:
+ ```
+ SOUND_PROC, PROC(gen_procedural), NONE, "Procedural sound"
+ ```
+4. Rebuild demo
+5. Use `AssetId::SOUND_PROC` in your code
+
+## Browser Compatibility
+
+- **Tested:** Chrome 90+, Firefox 88+, Edge 90+, Safari 14+
+- **Requirements:** Web Audio API support
+- **Recommended:** Desktop browser (mobile support limited)
+
+## License
+
+Part of the 64k demo project. See project LICENSE.
diff --git a/tools/spectral_editor/dct.js b/tools/spectral_editor/dct.js
new file mode 100644
index 0000000..435a7e8
--- /dev/null
+++ b/tools/spectral_editor/dct.js
@@ -0,0 +1,217 @@
+const dctSize = 512; // Default DCT size, read from header
+
+// --- Utility Functions for Audio Processing ---
+// Fast O(N log N) IDCT using FFT
+// JavaScript equivalent of C++ idct_512
+function javascript_idct_512(input) {
+ return javascript_idct_512_fft(input);
+}
+
+// Hanning window for smooth audio transitions (JavaScript equivalent)
+function hanningWindow(size) {
+ const window = new Float32Array(size);
+ const PI = Math.PI;
+ for (let i = 0; i < size; i++) {
+ window[i] = 0.5 * (1 - Math.cos((2 * PI * i) / (size - 1)));
+ }
+ return window;
+}
+
+const hanningWindowArray = hanningWindow(dctSize); // Pre-calculate window
+
+// ============================================================================
+// FFT-based DCT/IDCT Implementation
+// ============================================================================
+// Fast Fourier Transform using Radix-2 Cooley-Tukey algorithm
+// This implementation MUST match the C++ version in src/audio/fft.cc exactly
+
+// Bit-reversal permutation (in-place)
+// Reorders array elements by reversing their binary indices
+function bitReversePermute(real, imag, N) {
+ // Calculate number of bits needed
+ let temp_bits = N;
+ let num_bits = 0;
+ while (temp_bits > 1) {
+ temp_bits >>= 1;
+ num_bits++;
+ }
+
+ for (let i = 0; i < N; i++) {
+ // Compute bit-reversed index
+ let j = 0;
+ let temp = i;
+ for (let b = 0; b < num_bits; b++) {
+ j = (j << 1) | (temp & 1);
+ temp >>= 1;
+ }
+
+ // Swap if j > i (to avoid swapping twice)
+ if (j > i) {
+ const tmp_real = real[i];
+ const tmp_imag = imag[i];
+ real[i] = real[j];
+ imag[i] = imag[j];
+ real[j] = tmp_real;
+ imag[j] = tmp_imag;
+ }
+ }
+}
+
+// In-place radix-2 FFT (after bit-reversal)
+// direction: +1 for forward FFT, -1 for inverse FFT
+function fftRadix2(real, imag, N, direction) {
+ const PI = Math.PI;
+
+ // Butterfly operations
+ for (let stage_size = 2; stage_size <= N; stage_size *= 2) {
+ const half_stage = stage_size / 2;
+ const angle = direction * 2.0 * PI / stage_size;
+
+ // Precompute twiddle factors for this stage
+ let wr = 1.0;
+ let wi = 0.0;
+ const wr_delta = Math.cos(angle);
+ const wi_delta = Math.sin(angle);
+
+ for (let k = 0; k < half_stage; k++) {
+ // Apply butterfly to all groups at this stage
+ for (let group_start = k; group_start < N; group_start += stage_size) {
+ const i = group_start;
+ const j = group_start + half_stage;
+
+ // Complex multiplication: (real[j] + i*imag[j]) * (wr + i*wi)
+ const temp_real = real[j] * wr - imag[j] * wi;
+ const temp_imag = real[j] * wi + imag[j] * wr;
+
+ // Butterfly operation
+ real[j] = real[i] - temp_real;
+ imag[j] = imag[i] - temp_imag;
+ real[i] = real[i] + temp_real;
+ imag[i] = imag[i] + temp_imag;
+ }
+
+ // Update twiddle factor for next k (rotation)
+ const wr_old = wr;
+ wr = wr_old * wr_delta - wi * wi_delta;
+ wi = wr_old * wi_delta + wi * wr_delta;
+ }
+ }
+}
+
+// Forward FFT: Time domain → Frequency domain
+function fftForward(real, imag, N) {
+ bitReversePermute(real, imag, N);
+ fftRadix2(real, imag, N, +1);
+}
+
+// Inverse FFT: Frequency domain → Time domain
+function fftInverse(real, imag, N) {
+ bitReversePermute(real, imag, N);
+ fftRadix2(real, imag, N, -1);
+
+ // Scale by 1/N
+ const scale = 1.0 / N;
+ for (let i = 0; i < N; i++) {
+ real[i] *= scale;
+ imag[i] *= scale;
+ }
+}
+
+// DCT-II via FFT using reordering method (matches C++ dct_fft)
+// Reference: Numerical Recipes Chapter 12.3
+function javascript_dct_fft(input, N) {
+ const PI = Math.PI;
+
+ // Allocate arrays for N-point FFT
+ const real = new Float32Array(N);
+ const imag = new Float32Array(N);
+
+ // Reorder input: even indices first, then odd indices reversed
+ // [x[0], x[2], x[4], ...] followed by [x[N-1], x[N-3], x[N-5], ...]
+ for (let i = 0; i < N / 2; i++) {
+ real[i] = input[2 * i]; // Even indices: 0, 2, 4, ...
+ real[N - 1 - i] = input[2 * i + 1]; // Odd indices reversed: N-1, N-3, ...
+ }
+ // imag is already zeros (Float32Array default)
+
+ // Apply N-point FFT
+ fftForward(real, imag, N);
+
+ // Extract DCT coefficients with phase correction
+ // DCT[k] = Re{FFT[k] * exp(-j*pi*k/(2*N))} * normalization
+ const output = new Float32Array(N);
+ for (let k = 0; k < N; k++) {
+ const angle = -PI * k / (2.0 * N);
+ const wr = Math.cos(angle);
+ const wi = Math.sin(angle);
+
+ // Complex multiplication: (real[k] + j*imag[k]) * (wr + j*wi)
+ // Real part: real*wr - imag*wi
+ const dct_value = real[k] * wr - imag[k] * wi;
+
+ // Apply DCT-II normalization
+ if (k === 0) {
+ output[k] = dct_value * Math.sqrt(1.0 / N);
+ } else {
+ output[k] = dct_value * Math.sqrt(2.0 / N);
+ }
+ }
+
+ return output;
+}
+
+// IDCT (DCT-III) via FFT using reordering method (matches C++ idct_fft)
+// Reference: Numerical Recipes Chapter 12.3
+function javascript_idct_fft(input, N) {
+ const PI = Math.PI;
+
+ // Allocate arrays for N-point FFT
+ const real = new Float32Array(N);
+ const imag = new Float32Array(N);
+
+ // Prepare FFT input with inverse phase correction
+ // FFT[k] = DCT[k] * exp(+j*pi*k/(2*N)) / normalization
+ // Note: DCT-III needs factor of 2 for AC terms
+ for (let k = 0; k < N; k++) {
+ const angle = PI * k / (2.0 * N); // Positive angle for inverse
+ const wr = Math.cos(angle);
+ const wi = Math.sin(angle);
+
+ // Inverse of DCT-II normalization with correct DCT-III scaling
+ let scaled;
+ if (k === 0) {
+ scaled = input[k] / Math.sqrt(1.0 / N);
+ } else {
+ // DCT-III needs factor of 2 for AC terms
+ scaled = input[k] / Math.sqrt(2.0 / N) * 2.0;
+ }
+
+ // Complex multiplication: scaled * (wr + j*wi)
+ real[k] = scaled * wr;
+ imag[k] = scaled * wi;
+ }
+
+ // Apply inverse FFT
+ fftInverse(real, imag, N);
+
+ // Unpack: reverse the reordering from DCT
+ // Even output indices come from first half of FFT output
+ // Odd output indices come from second half (reversed)
+ const output = new Float32Array(N);
+ for (let i = 0; i < N / 2; i++) {
+ output[2 * i] = real[i]; // Even positions
+ output[2 * i + 1] = real[N - 1 - i]; // Odd positions (reversed)
+ }
+
+ return output;
+}
+
+// Convenience wrappers for dctSize = 512 (backward compatible)
+function javascript_dct_512_fft(input) {
+ return javascript_dct_fft(input, dctSize);
+}
+
+function javascript_idct_512_fft(input) {
+ return javascript_idct_fft(input, dctSize);
+}
+
diff --git a/tools/spectral_editor/index.html b/tools/spectral_editor/index.html
new file mode 100644
index 0000000..a9391dd
--- /dev/null
+++ b/tools/spectral_editor/index.html
@@ -0,0 +1,180 @@
+<!DOCTYPE html>
+<html lang="en">
+<head>
+ <meta charset="UTF-8">
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
+ <title>Spectral Brush Editor</title>
+ <link rel="stylesheet" href="style.css">
+</head>
+<body>
+ <div id="app">
+ <!-- Header -->
+ <header>
+ <h1>Spectral Brush Editor</h1>
+ <div class="header-controls">
+ <button id="loadWavBtn" class="btn-primary">Load .wav/.spec</button>
+ <input type="file" id="fileInput" accept=".wav,.spec" style="display:none">
+ <span id="fileInfo" class="file-info"></span>
+ </div>
+ </header>
+
+ <!-- Main content area -->
+ <div class="main-content">
+ <!-- Canvas area (left side, 80% width) -->
+ <div class="canvas-container">
+ <canvas id="spectrogramCanvas"></canvas>
+ <div id="canvasOverlay" class="canvas-overlay">
+ <p>Load a .wav or .spec file to begin</p>
+ <p class="hint">Click "Load .wav/.spec" button or press Ctrl+O</p>
+ </div>
+ <!-- Mini spectrum viewer (bottom-right overlay) -->
+ <div id="spectrumViewer" class="spectrum-viewer">
+ <canvas id="spectrumCanvas" width="200" height="100"></canvas>
+ </div>
+ </div>
+
+ <!-- Toolbar (right side, 20% width) -->
+ <div class="toolbar">
+ <h3>Curves</h3>
+ <button id="addCurveBtn" class="btn-toolbar" title="Add new curve">
+ <span class="icon">+</span> Add Curve
+ </button>
+ <button id="deleteCurveBtn" class="btn-toolbar btn-danger" title="Delete selected curve" disabled>
+ <span class="icon">×</span> Delete
+ </button>
+ <div id="curveList" class="curve-list"></div>
+
+ <h3>Selected Point</h3>
+ <div id="pointInfo" class="point-info">
+ <div class="info-row">
+ <span class="info-label">Frame:</span>
+ <span id="pointFrame" class="info-value">-</span>
+ </div>
+ <div class="info-row">
+ <span class="info-label">Frequency:</span>
+ <span id="pointFreq" class="info-value">-</span>
+ </div>
+ <div class="info-row">
+ <span class="info-label">Amplitude:</span>
+ <span id="pointAmp" class="info-value">-</span>
+ </div>
+ </div>
+ </div>
+ </div>
+
+ <!-- Control panel (bottom) -->
+ <div class="control-panel">
+ <!-- Left section: Profile controls -->
+ <div class="control-section">
+ <label for="profileType">Profile:</label>
+ <select id="profileType" class="select-input">
+ <option value="gaussian">Gaussian</option>
+ <option value="decaying_sinusoid">Decaying Sinusoid</option>
+ <option value="noise">Noise</option>
+ </select>
+
+ <label for="sigmaSlider" id="sigmaLabel">Sigma:</label>
+ <input type="range" id="sigmaSlider" class="slider" min="1" max="100" value="30" step="0.1">
+ <input type="number" id="sigmaValue" class="number-input" min="1" max="100" value="30" step="0.1">
+
+ <label for="curveVolumeSlider">Curve Vol:</label>
+ <input type="range" id="curveVolumeSlider" class="slider" min="0" max="100" value="100" step="1">
+ <input type="number" id="curveVolumeValue" class="number-input" min="0" max="100" value="100" step="1">
+ </div>
+
+ <!-- Middle section: Display controls -->
+ <div class="control-section">
+ <label for="refOpacitySlider">Ref Opacity:</label>
+ <input type="range" id="refOpacitySlider" class="slider" min="0" max="100" value="50" step="1">
+ <input type="number" id="refOpacityValue" class="number-input" min="0" max="100" value="50" step="1">
+ </div>
+
+ <!-- Curve selection -->
+ <div class="control-section">
+ <label for="curveSelect">Active Curve:</label>
+ <select id="curveSelect" class="select-input">
+ <option value="-1">No curves</option>
+ </select>
+ </div>
+
+ <!-- Right section: Playback controls -->
+ <div class="control-section playback-controls">
+ <label for="volumeSlider">Volume:</label>
+ <input type="range" id="volumeSlider" class="slider" min="0" max="100" value="100" step="1">
+ <input type="number" id="volumeValue" class="number-input" min="0" max="100" value="100" step="1">
+ </div>
+
+ <div class="control-section playback-controls">
+ <button id="playProceduralBtn" class="btn-playback" title="Play procedural sound (Key 1)">
+ <span class="icon">▶</span> <kbd>1</kbd> Procedural
+ </button>
+ <button id="playOriginalBtn" class="btn-playback" title="Play original .wav (Key 2)" disabled>
+ <span class="icon">▶</span> <kbd>2</kbd> Original
+ </button>
+ <button id="stopBtn" class="btn-playback" title="Stop playback (Space)">
+ <span class="icon">■</span> <kbd>Space</kbd> Stop
+ </button>
+ </div>
+ </div>
+
+ <!-- Bottom action bar -->
+ <div class="action-bar">
+ <div class="action-group">
+ <button id="undoBtn" class="btn-action" title="Undo (Ctrl+Z)" disabled>
+ <span class="icon">↶</span> Undo
+ </button>
+ <button id="redoBtn" class="btn-action" title="Redo (Ctrl+Shift+Z)" disabled>
+ <span class="icon">↷</span> Redo
+ </button>
+ </div>
+
+ <div class="action-group">
+ <button id="saveParamsBtn" class="btn-action" title="Save procedural_params.txt (Ctrl+S)">
+ <span class="icon">💾</span> Save Params
+ </button>
+ <button id="generateCodeBtn" class="btn-action" title="Generate C++ code (Ctrl+Shift+S)">
+ <span class="icon">📝</span> Generate C++
+ </button>
+ </div>
+
+ <div class="action-group">
+ <button id="helpBtn" class="btn-action" title="Show keyboard shortcuts (?)">
+ <span class="icon">?</span> Help
+ </button>
+ </div>
+ </div>
+ </div>
+
+ <!-- Help modal (hidden by default) -->
+ <div id="helpModal" class="modal" style="display:none">
+ <div class="modal-content">
+ <span class="modal-close" id="closeHelpModal">&times;</span>
+ <h2>Keyboard Shortcuts</h2>
+ <table class="shortcuts-table">
+ <tr><th>Key</th><th>Action</th></tr>
+ <tr><td><kbd>1</kbd></td><td>Play procedural sound</td></tr>
+ <tr><td><kbd>2</kbd></td><td>Play original .wav</td></tr>
+ <tr><td><kbd>Space</kbd></td><td>Stop playback</td></tr>
+ <tr><td><kbd>Delete</kbd></td><td>Delete selected control point</td></tr>
+ <tr><td><kbd>Esc</kbd></td><td>Deselect all</td></tr>
+ <tr><td><kbd>Ctrl+Z</kbd></td><td>Undo</td></tr>
+ <tr><td><kbd>Ctrl+Shift+Z</kbd></td><td>Redo</td></tr>
+ <tr><td><kbd>Ctrl+S</kbd></td><td>Save procedural_params.txt</td></tr>
+ <tr><td><kbd>Ctrl+Shift+S</kbd></td><td>Generate C++ code</td></tr>
+ <tr><td><kbd>Ctrl+O</kbd></td><td>Open file</td></tr>
+ <tr><td><kbd>?</kbd></td><td>Show this help</td></tr>
+ </table>
+ <h3>Mouse Controls</h3>
+ <ul>
+ <li><strong>Click</strong> on canvas: Place control point</li>
+ <li><strong>Drag</strong> control point: Adjust position (frame, frequency, amplitude)</li>
+ <li><strong>Right-click</strong> control point: Delete</li>
+ </ul>
+ </div>
+ </div>
+
+ <!-- Scripts -->
+ <script src="dct.js"></script>
+ <script src="script.js"></script>
+</body>
+</html>
diff --git a/tools/spectral_editor/script.js b/tools/spectral_editor/script.js
new file mode 100644
index 0000000..6c6dd49
--- /dev/null
+++ b/tools/spectral_editor/script.js
@@ -0,0 +1,1774 @@
+// Spectral Brush Editor - Main Script
+// Implements Bezier curve editing, spectrogram rendering, and audio playback
+
+// ============================================================================
+// State Management
+// ============================================================================
+
+const SAMPLE_RATE = 32000;
+const DCT_SIZE = 512;
+
+// Frequency range for log-scale display
+const FREQ_MIN = 20.0; // 20 Hz (lowest audible bass)
+const FREQ_MAX = 16000.0; // 16 kHz (Nyquist for 32kHz sample rate)
+const USE_LOG_SCALE = true; // Enable logarithmic frequency axis
+
+const state = {
+ // Reference audio data
+ referenceSpectrogram: null, // Float32Array or null
+ referenceDctSize: DCT_SIZE,
+ referenceNumFrames: 0,
+
+ // Procedural curves
+ curves: [], // Array of {id, controlPoints: [{frame, freqHz, amplitude}], profile: {type, param1, param2}}
+ nextCurveId: 0,
+ selectedCurveId: null,
+ selectedControlPointIdx: null,
+
+ // Canvas state
+ canvasWidth: 0,
+ canvasHeight: 0,
+ pixelsPerFrame: 2.0, // Zoom level (pixels per frame)
+ pixelsPerBin: 1.0, // Vertical scale (pixels per frequency bin)
+
+ // Audio playback
+ audioContext: null,
+ isPlaying: false,
+ currentSource: null,
+ currentGainNode: null, // Keep reference to gain node for live volume updates
+ playbackVolume: 1.0, // Global volume for playback (0.0-1.0, increased from 0.7)
+ referenceOpacity: 0.5, // Opacity for reference spectrogram (0.0-1.0, increased from 0.3)
+
+ // Playhead indicator
+ playbackStartTime: null,
+ playbackDuration: 0,
+ playbackCurrentFrame: 0,
+
+ // Mouse hover state
+ mouseX: -1,
+ mouseY: -1,
+ mouseFrame: 0,
+ mouseFreq: 0,
+
+ // Undo/Redo
+ history: [],
+ historyIndex: -1,
+ maxHistorySize: 50
+};
+
+// ============================================================================
+// Initialization
+// ============================================================================
+
+document.addEventListener('DOMContentLoaded', () => {
+ initCanvas();
+ initUI();
+ initKeyboardShortcuts();
+ initAudioContext();
+
+ console.log('Spectral Brush Editor initialized');
+});
+
+function initCanvas() {
+ const canvas = document.getElementById('spectrogramCanvas');
+ const container = canvas.parentElement;
+
+ // Set canvas size to match container
+ const resizeCanvas = () => {
+ canvas.width = container.clientWidth;
+ canvas.height = container.clientHeight;
+ state.canvasWidth = canvas.width;
+ state.canvasHeight = canvas.height;
+ render();
+ };
+
+ window.addEventListener('resize', resizeCanvas);
+ resizeCanvas();
+
+ // Mouse event handlers
+ canvas.addEventListener('mousedown', onCanvasMouseDown);
+ canvas.addEventListener('mousemove', onCanvasMouseMove);
+ canvas.addEventListener('mouseup', onCanvasMouseUp);
+ canvas.addEventListener('contextmenu', onCanvasRightClick);
+
+ // Mouse hover handlers (for crosshair)
+ canvas.addEventListener('mousemove', onCanvasHover);
+ canvas.addEventListener('mouseleave', onCanvasLeave);
+}
+
+function initUI() {
+ // File loading
+ document.getElementById('loadWavBtn').addEventListener('click', () => {
+ document.getElementById('fileInput').click();
+ });
+
+ document.getElementById('fileInput').addEventListener('change', onFileSelected);
+
+ // Curve management
+ document.getElementById('addCurveBtn').addEventListener('click', addCurve);
+ document.getElementById('deleteCurveBtn').addEventListener('click', deleteSelectedCurve);
+ document.getElementById('curveSelect').addEventListener('change', onCurveSelected);
+
+ // Profile controls
+ document.getElementById('profileType').addEventListener('change', onProfileChanged);
+ document.getElementById('sigmaSlider').addEventListener('input', onSigmaChanged);
+ document.getElementById('sigmaValue').addEventListener('input', onSigmaValueChanged);
+ document.getElementById('curveVolumeSlider').addEventListener('input', onCurveVolumeChanged);
+ document.getElementById('curveVolumeValue').addEventListener('input', onCurveVolumeValueChanged);
+
+ // Display controls
+ document.getElementById('refOpacitySlider').addEventListener('input', onRefOpacityChanged);
+ document.getElementById('refOpacityValue').addEventListener('input', onRefOpacityValueChanged);
+
+ // Playback controls
+ document.getElementById('volumeSlider').addEventListener('input', onVolumeChanged);
+ document.getElementById('volumeValue').addEventListener('input', onVolumeValueChanged);
+ document.getElementById('playProceduralBtn').addEventListener('click', () => playAudio('procedural'));
+ document.getElementById('playOriginalBtn').addEventListener('click', () => playAudio('original'));
+ document.getElementById('stopBtn').addEventListener('click', stopAudio);
+
+ // Action buttons
+ document.getElementById('undoBtn').addEventListener('click', undo);
+ document.getElementById('redoBtn').addEventListener('click', redo);
+ document.getElementById('saveParamsBtn').addEventListener('click', saveProceduralParams);
+ document.getElementById('generateCodeBtn').addEventListener('click', generateCppCode);
+ document.getElementById('helpBtn').addEventListener('click', showHelp);
+
+ // Help modal
+ document.getElementById('closeHelpModal').addEventListener('click', hideHelp);
+ document.getElementById('helpModal').addEventListener('click', (e) => {
+ if (e.target.id === 'helpModal') hideHelp();
+ });
+}
+
+function initKeyboardShortcuts() {
+ document.addEventListener('keydown', (e) => {
+ // Playback shortcuts
+ if (e.key === '1') {
+ playAudio('procedural');
+ return;
+ }
+ if (e.key === '2') {
+ playAudio('original');
+ return;
+ }
+ if (e.key === ' ') {
+ e.preventDefault();
+ stopAudio();
+ return;
+ }
+
+ // Edit shortcuts
+ if (e.key === 'Delete') {
+ deleteSelectedControlPoint();
+ return;
+ }
+ if (e.key === 'Escape') {
+ deselectAll();
+ return;
+ }
+
+ // Undo/Redo
+ if (e.ctrlKey && e.shiftKey && e.key === 'Z') {
+ e.preventDefault();
+ redo();
+ return;
+ }
+ if (e.ctrlKey && e.key === 'z') {
+ e.preventDefault();
+ undo();
+ return;
+ }
+
+ // File operations
+ if (e.ctrlKey && e.shiftKey && e.key === 'S') {
+ e.preventDefault();
+ generateCppCode();
+ return;
+ }
+ if (e.ctrlKey && e.key === 's') {
+ e.preventDefault();
+ saveProceduralParams();
+ return;
+ }
+ if (e.ctrlKey && e.key === 'o') {
+ e.preventDefault();
+ document.getElementById('fileInput').click();
+ return;
+ }
+
+ // Help
+ if (e.key === '?') {
+ showHelp();
+ return;
+ }
+ });
+}
+
+function initAudioContext() {
+ try {
+ state.audioContext = new (window.AudioContext || window.webkitAudioContext)({
+ sampleRate: SAMPLE_RATE
+ });
+ console.log('Audio context initialized:', state.audioContext.sampleRate, 'Hz');
+ } catch (error) {
+ console.error('Failed to initialize audio context:', error);
+ alert('Audio playback unavailable. Your browser may not support Web Audio API.');
+ }
+}
+
+// ============================================================================
+// File Loading
+// ============================================================================
+
+function onFileSelected(e) {
+ const file = e.target.files[0];
+ if (!file) return;
+
+ // Check if there are unsaved curves
+ if (state.curves.length > 0) {
+ const confirmLoad = confirm(
+ 'You have unsaved curves. Loading a new file will reset all curves.\n\n' +
+ 'Do you want to save your work first?\n\n' +
+ 'Click "OK" to save, or "Cancel" to discard and continue loading.'
+ );
+
+ if (confirmLoad) {
+ // User wants to save first
+ saveProceduralParams();
+ // After saving, ask again if they want to proceed
+ const proceedLoad = confirm('File saved. Proceed with loading new file?');
+ if (!proceedLoad) {
+ // User changed their mind, reset file input
+ e.target.value = '';
+ return;
+ }
+ }
+ }
+
+ const fileName = file.name;
+ const fileExt = fileName.split('.').pop().toLowerCase();
+
+ if (fileExt === 'wav') {
+ loadWavFile(file);
+ } else if (fileExt === 'spec') {
+ loadSpecFile(file);
+ } else {
+ alert('Unsupported file format. Please load a .wav or .spec file.');
+ }
+}
+
+function loadWavFile(file) {
+ const reader = new FileReader();
+ reader.onload = (e) => {
+ const arrayBuffer = e.target.result;
+ state.audioContext.decodeAudioData(arrayBuffer, (audioBuffer) => {
+ console.log('Decoded WAV:', audioBuffer.length, 'samples,', audioBuffer.numberOfChannels, 'channels');
+
+ // Convert to spectrogram (simplified: just use first channel)
+ const audioData = audioBuffer.getChannelData(0);
+ const spectrogram = audioToSpectrogram(audioData);
+
+ state.referenceSpectrogram = spectrogram.data;
+ state.referenceDctSize = spectrogram.dctSize;
+ state.referenceNumFrames = spectrogram.numFrames;
+
+ onReferenceLoaded(file.name);
+ }, (error) => {
+ console.error('Failed to decode WAV:', error);
+ alert('Failed to decode WAV file. Make sure it is a valid audio file.');
+ });
+ };
+ reader.readAsArrayBuffer(file);
+}
+
+function loadSpecFile(file) {
+ const reader = new FileReader();
+ reader.onload = (e) => {
+ const arrayBuffer = e.target.result;
+ const spec = parseSpecFile(arrayBuffer);
+
+ if (!spec) {
+ alert('Failed to parse .spec file. Invalid format.');
+ return;
+ }
+
+ state.referenceSpectrogram = spec.data;
+ state.referenceDctSize = spec.dctSize;
+ state.referenceNumFrames = spec.numFrames;
+
+ onReferenceLoaded(file.name);
+ };
+ reader.readAsArrayBuffer(file);
+}
+
+function parseSpecFile(arrayBuffer) {
+ const view = new DataView(arrayBuffer);
+ let offset = 0;
+
+ // Read header: "SPEC" magic (4 bytes)
+ const magic = String.fromCharCode(
+ view.getUint8(offset++),
+ view.getUint8(offset++),
+ view.getUint8(offset++),
+ view.getUint8(offset++)
+ );
+
+ if (magic !== 'SPEC') {
+ console.error('Invalid .spec file: wrong magic', magic);
+ return null;
+ }
+
+ // Read version (uint32)
+ const version = view.getUint32(offset, true);
+ offset += 4;
+
+ // Read dct_size (uint32)
+ const dctSize = view.getUint32(offset, true);
+ offset += 4;
+
+ // Read num_frames (uint32)
+ const numFrames = view.getUint32(offset, true);
+ offset += 4;
+
+ console.log('.spec header:', {version, dctSize, numFrames});
+
+ // Read spectral data (float32 array)
+ const dataLength = dctSize * numFrames;
+ const data = new Float32Array(dataLength);
+
+ for (let i = 0; i < dataLength; i++) {
+ data[i] = view.getFloat32(offset, true);
+ offset += 4;
+ }
+
+ return {dctSize, numFrames, data};
+}
+
+function audioToSpectrogram(audioData) {
+ // Simplified STFT: divide audio into frames and apply DCT
+ // Frame overlap: 50% (hop size = DCT_SIZE / 2)
+ const hopSize = DCT_SIZE / 2;
+ const numFrames = Math.floor((audioData.length - DCT_SIZE) / hopSize) + 1;
+
+ const spectrogram = new Float32Array(DCT_SIZE * numFrames);
+ const window = hanningWindowArray;
+
+ for (let frameIdx = 0; frameIdx < numFrames; frameIdx++) {
+ const frameStart = frameIdx * hopSize;
+ const frame = new Float32Array(DCT_SIZE);
+
+ // Extract windowed frame
+ for (let i = 0; i < DCT_SIZE; i++) {
+ if (frameStart + i < audioData.length) {
+ frame[i] = audioData[frameStart + i] * window[i];
+ }
+ }
+
+ // Compute DCT (forward transform)
+ const dctCoeffs = javascript_dct_512(frame);
+
+ // Store in spectrogram
+ for (let b = 0; b < DCT_SIZE; b++) {
+ spectrogram[frameIdx * DCT_SIZE + b] = dctCoeffs[b];
+ }
+ }
+
+ return {dctSize: DCT_SIZE, numFrames, data: spectrogram};
+}
+
+// Forward DCT (not in dct.js, add here)
+// Fast O(N log N) DCT using FFT (delegates to dct.js implementation)
+function javascript_dct_512(input) {
+ return javascript_dct_512_fft(input);
+}
+
+function onReferenceLoaded(fileName) {
+ console.log('Reference loaded:', fileName);
+ document.getElementById('fileInfo').textContent = fileName;
+ document.getElementById('canvasOverlay').classList.add('hidden');
+ document.getElementById('playOriginalBtn').disabled = false;
+
+ // Reset curves when loading new file
+ state.curves = [];
+ state.nextCurveId = 0;
+ state.selectedCurveId = null;
+ state.selectedControlPointIdx = null;
+
+ // Clear history
+ state.history = [];
+ state.historyIndex = -1;
+
+ // Reset mouse to frame 0
+ state.mouseFrame = 0;
+
+ // Adjust zoom to fit
+ state.pixelsPerFrame = Math.max(1.0, state.canvasWidth / state.referenceNumFrames);
+
+ updateCurveUI();
+ updateUndoRedoButtons();
+ render();
+ drawSpectrumViewer(); // Show initial spectrum
+}
+
+// ============================================================================
+// Curve Management
+// ============================================================================
+
+function addCurve() {
+ // Generate a unique color for this curve
+ const colors = [
+ '#0e639c', // Blue
+ '#00aa00', // Green
+ '#cc5500', // Orange
+ '#aa00aa', // Purple
+ '#00aaaa', // Cyan
+ '#aa5500', // Brown
+ '#ff69b4', // Pink
+ '#ffd700', // Gold
+ ];
+ const color = colors[state.curves.length % colors.length];
+
+ const curve = {
+ id: state.nextCurveId++,
+ controlPoints: [], // Empty initially, user will place points
+ profile: {
+ type: 'gaussian',
+ param1: 30.0, // sigma
+ param2: 0.0
+ },
+ color: color,
+ volume: 1.0 // Per-curve volume multiplier (0.0-1.0)
+ };
+
+ state.curves.push(curve);
+ state.selectedCurveId = curve.id;
+
+ saveHistoryState('Add curve');
+ updateCurveUI();
+ render();
+}
+
+function deleteSelectedCurve() {
+ if (state.selectedCurveId === null) return;
+
+ const idx = state.curves.findIndex(c => c.id === state.selectedCurveId);
+ if (idx >= 0) {
+ state.curves.splice(idx, 1);
+ state.selectedCurveId = null;
+ state.selectedControlPointIdx = null;
+
+ saveHistoryState('Delete curve');
+ updateCurveUI();
+ render();
+ }
+}
+
+function onCurveSelected(e) {
+ const curveId = parseInt(e.target.value);
+ state.selectedCurveId = curveId >= 0 ? curveId : null;
+ state.selectedControlPointIdx = null;
+
+ updateCurveUI();
+ render();
+}
+
+function updateCurveUI() {
+ // Update curve list (toolbar)
+ const curveList = document.getElementById('curveList');
+ curveList.innerHTML = '';
+
+ state.curves.forEach(curve => {
+ const div = document.createElement('div');
+ div.className = 'curve-item';
+ if (curve.id === state.selectedCurveId) {
+ div.classList.add('selected');
+ }
+
+ // Add color indicator
+ const colorDot = document.createElement('span');
+ colorDot.style.display = 'inline-block';
+ colorDot.style.width = '12px';
+ colorDot.style.height = '12px';
+ colorDot.style.borderRadius = '50%';
+ colorDot.style.backgroundColor = curve.color || '#0e639c';
+ colorDot.style.marginRight = '8px';
+ colorDot.style.verticalAlign = 'middle';
+
+ div.appendChild(colorDot);
+ div.appendChild(document.createTextNode(`Curve ${curve.id} (${curve.controlPoints.length} points)`));
+
+ div.addEventListener('click', () => {
+ state.selectedCurveId = curve.id;
+ state.selectedControlPointIdx = null;
+ updateCurveUI();
+ updatePointInfo();
+ render();
+ });
+ curveList.appendChild(div);
+ });
+
+ // Update curve select dropdown
+ const curveSelect = document.getElementById('curveSelect');
+ curveSelect.innerHTML = '';
+
+ if (state.curves.length === 0) {
+ const opt = document.createElement('option');
+ opt.value = -1;
+ opt.textContent = 'No curves';
+ curveSelect.appendChild(opt);
+ } else {
+ state.curves.forEach(curve => {
+ const opt = document.createElement('option');
+ opt.value = curve.id;
+ opt.textContent = `Curve ${curve.id}`;
+ opt.selected = curve.id === state.selectedCurveId;
+ curveSelect.appendChild(opt);
+ });
+ }
+
+ // Update delete button state
+ document.getElementById('deleteCurveBtn').disabled = state.selectedCurveId === null;
+
+ // Update profile controls
+ if (state.selectedCurveId !== null) {
+ const curve = state.curves.find(c => c.id === state.selectedCurveId);
+ if (curve) {
+ document.getElementById('profileType').value = curve.profile.type;
+ document.getElementById('sigmaSlider').value = curve.profile.param1;
+ document.getElementById('sigmaValue').value = curve.profile.param1;
+
+ // Update curve volume slider
+ const volumePercent = Math.round(curve.volume * 100);
+ document.getElementById('curveVolumeSlider').value = volumePercent;
+ document.getElementById('curveVolumeValue').value = volumePercent;
+ }
+ }
+
+ // Update point info panel
+ updatePointInfo();
+}
+
+function updatePointInfo() {
+ const frameEl = document.getElementById('pointFrame');
+ const freqEl = document.getElementById('pointFreq');
+ const ampEl = document.getElementById('pointAmp');
+
+ if (state.selectedCurveId === null || state.selectedControlPointIdx === null) {
+ // No point selected
+ frameEl.textContent = '-';
+ freqEl.textContent = '-';
+ ampEl.textContent = '-';
+ return;
+ }
+
+ const curve = state.curves.find(c => c.id === state.selectedCurveId);
+ if (!curve || state.selectedControlPointIdx >= curve.controlPoints.length) {
+ frameEl.textContent = '-';
+ freqEl.textContent = '-';
+ ampEl.textContent = '-';
+ return;
+ }
+
+ const point = curve.controlPoints[state.selectedControlPointIdx];
+ frameEl.textContent = point.frame.toFixed(0);
+ freqEl.textContent = point.freqHz.toFixed(1) + ' Hz';
+ ampEl.textContent = point.amplitude.toFixed(3);
+}
+
+// ============================================================================
+// Profile Controls
+// ============================================================================
+
+function onProfileChanged(e) {
+ if (state.selectedCurveId === null) return;
+
+ const curve = state.curves.find(c => c.id === state.selectedCurveId);
+ if (!curve) return;
+
+ curve.profile.type = e.target.value;
+
+ // Update label based on profile type
+ const label = document.getElementById('sigmaLabel');
+ if (curve.profile.type === 'gaussian') {
+ label.textContent = 'Sigma:';
+ } else if (curve.profile.type === 'decaying_sinusoid') {
+ label.textContent = 'Decay:';
+ } else if (curve.profile.type === 'noise') {
+ label.textContent = 'Decay:'; // Changed from 'Amplitude:' to 'Decay:'
+ }
+
+ saveHistoryState('Change profile');
+ render();
+}
+
+function onSigmaChanged(e) {
+ if (state.selectedCurveId === null) return;
+
+ const curve = state.curves.find(c => c.id === state.selectedCurveId);
+ if (!curve) return;
+
+ curve.profile.param1 = parseFloat(e.target.value);
+ document.getElementById('sigmaValue').value = curve.profile.param1;
+
+ render();
+}
+
+function onSigmaValueChanged(e) {
+ if (state.selectedCurveId === null) return;
+
+ const curve = state.curves.find(c => c.id === state.selectedCurveId);
+ if (!curve) return;
+
+ curve.profile.param1 = parseFloat(e.target.value);
+ document.getElementById('sigmaSlider').value = curve.profile.param1;
+
+ render();
+}
+
+function onRefOpacityChanged(e) {
+ state.referenceOpacity = parseFloat(e.target.value) / 100.0; // Convert 0-100 to 0.0-1.0
+ document.getElementById('refOpacityValue').value = e.target.value;
+ render();
+}
+
+function onRefOpacityValueChanged(e) {
+ state.referenceOpacity = parseFloat(e.target.value) / 100.0;
+ document.getElementById('refOpacitySlider').value = e.target.value;
+ render();
+}
+
+function onVolumeChanged(e) {
+ state.playbackVolume = parseFloat(e.target.value) / 100.0; // Convert 0-100 to 0.0-1.0
+ document.getElementById('volumeValue').value = e.target.value;
+
+ // Update gain node if audio is currently playing
+ if (state.currentGainNode) {
+ state.currentGainNode.gain.value = state.playbackVolume;
+ }
+}
+
+function onVolumeValueChanged(e) {
+ state.playbackVolume = parseFloat(e.target.value) / 100.0;
+ document.getElementById('volumeSlider').value = e.target.value;
+
+ // Update gain node if audio is currently playing
+ if (state.currentGainNode) {
+ state.currentGainNode.gain.value = state.playbackVolume;
+ }
+}
+
+function onCurveVolumeChanged(e) {
+ if (state.selectedCurveId === null) return;
+
+ const curve = state.curves.find(c => c.id === state.selectedCurveId);
+ if (!curve) return;
+
+ curve.volume = parseFloat(e.target.value) / 100.0; // Convert 0-100 to 0.0-1.0
+ document.getElementById('curveVolumeValue').value = e.target.value;
+
+ render();
+}
+
+function onCurveVolumeValueChanged(e) {
+ if (state.selectedCurveId === null) return;
+
+ const curve = state.curves.find(c => c.id === state.selectedCurveId);
+ if (!curve) return;
+
+ curve.volume = parseFloat(e.target.value) / 100.0;
+ document.getElementById('curveVolumeSlider').value = e.target.value;
+
+ render();
+}
+
+// ============================================================================
+// Canvas Interaction
+// ============================================================================
+
+let isDragging = false;
+let dragStartX = 0;
+let dragStartY = 0;
+
+function onCanvasMouseDown(e) {
+ const rect = e.target.getBoundingClientRect();
+ const x = e.clientX - rect.left;
+ const y = e.clientY - rect.top;
+
+ // Check if clicking on existing control point
+ const clickedPoint = findControlPointAt(x, y);
+
+ if (clickedPoint) {
+ // Start dragging existing point
+ state.selectedCurveId = clickedPoint.curveId;
+ state.selectedControlPointIdx = clickedPoint.pointIdx;
+ isDragging = true;
+ dragStartX = x;
+ dragStartY = y;
+ updateCurveUI();
+ updatePointInfo();
+ render();
+ } else if (state.selectedCurveId !== null) {
+ // Place new control point
+ const curve = state.curves.find(c => c.id === state.selectedCurveId);
+ if (curve) {
+ const point = screenToSpectrogram(x, y);
+ curve.controlPoints.push(point);
+
+ // Sort by frame
+ curve.controlPoints.sort((a, b) => a.frame - b.frame);
+
+ saveHistoryState('Add control point');
+ updateCurveUI();
+ updatePointInfo();
+ render();
+ }
+ }
+}
+
+function onCanvasMouseMove(e) {
+ if (!isDragging) return;
+ if (state.selectedCurveId === null || state.selectedControlPointIdx === null) return;
+
+ const rect = e.target.getBoundingClientRect();
+ const x = e.clientX - rect.left;
+ const y = e.clientY - rect.top;
+
+ const curve = state.curves.find(c => c.id === state.selectedCurveId);
+ if (!curve) return;
+
+ const point = curve.controlPoints[state.selectedControlPointIdx];
+ if (!point) return;
+
+ // Update point position
+ const newPoint = screenToSpectrogram(x, y);
+ point.frame = newPoint.frame;
+ point.freqHz = newPoint.freqHz;
+ point.amplitude = newPoint.amplitude;
+
+ // Re-sort by frame
+ curve.controlPoints.sort((a, b) => a.frame - b.frame);
+
+ // Update point info panel in real-time
+ updatePointInfo();
+
+ render();
+}
+
+function onCanvasMouseUp(e) {
+ if (isDragging) {
+ isDragging = false;
+ saveHistoryState('Move control point');
+ }
+}
+
+function onCanvasRightClick(e) {
+ e.preventDefault();
+
+ const rect = e.target.getBoundingClientRect();
+ const x = e.clientX - rect.left;
+ const y = e.clientY - rect.top;
+
+ const clickedPoint = findControlPointAt(x, y);
+ if (clickedPoint) {
+ const curve = state.curves.find(c => c.id === clickedPoint.curveId);
+ if (curve) {
+ curve.controlPoints.splice(clickedPoint.pointIdx, 1);
+ state.selectedControlPointIdx = null;
+
+ saveHistoryState('Delete control point');
+ updateCurveUI();
+ render();
+ }
+ }
+}
+
+function findControlPointAt(screenX, screenY) {
+ const CLICK_RADIUS = 8; // pixels
+
+ for (const curve of state.curves) {
+ for (let i = 0; i < curve.controlPoints.length; i++) {
+ const point = curve.controlPoints[i];
+ const screenPos = spectrogramToScreen(point.frame, point.freqHz);
+
+ const dx = screenX - screenPos.x;
+ const dy = screenY - screenPos.y;
+ const dist = Math.sqrt(dx * dx + dy * dy);
+
+ if (dist <= CLICK_RADIUS) {
+ return {curveId: curve.id, pointIdx: i};
+ }
+ }
+ }
+
+ return null;
+}
+
+function deleteSelectedControlPoint() {
+ if (state.selectedCurveId === null || state.selectedControlPointIdx === null) return;
+
+ const curve = state.curves.find(c => c.id === state.selectedCurveId);
+ if (curve && state.selectedControlPointIdx < curve.controlPoints.length) {
+ curve.controlPoints.splice(state.selectedControlPointIdx, 1);
+ state.selectedControlPointIdx = null;
+
+ saveHistoryState('Delete control point');
+ updateCurveUI();
+ render();
+ }
+}
+
+function deselectAll() {
+ state.selectedCurveId = null;
+ state.selectedControlPointIdx = null;
+ updateCurveUI();
+ updatePointInfo();
+ render();
+}
+
+function onCanvasHover(e) {
+ const rect = e.target.getBoundingClientRect();
+ state.mouseX = e.clientX - rect.left;
+ state.mouseY = e.clientY - rect.top;
+
+ // Convert to spectrogram coordinates
+ const coords = screenToSpectrogram(state.mouseX, state.mouseY);
+ state.mouseFrame = Math.floor(coords.frame);
+ state.mouseFreq = coords.freqHz;
+
+ // Only redraw if not dragging (avoid slowdown during drag)
+ if (!isDragging) {
+ render();
+ drawSpectrumViewer(); // Update spectrum viewer with frame under mouse
+ }
+}
+
+function onCanvasLeave(e) {
+ state.mouseX = -1;
+ state.mouseY = -1;
+ render();
+}
+
+// ============================================================================
+// Coordinate Conversion
+// ============================================================================
+
+function screenToSpectrogram(screenX, screenY) {
+ const frame = Math.round(screenX / state.pixelsPerFrame);
+
+ let freqHz;
+ if (USE_LOG_SCALE) {
+ // Logarithmic frequency mapping
+ const logMin = Math.log10(FREQ_MIN);
+ const logMax = Math.log10(FREQ_MAX);
+ const normalizedY = 1.0 - (screenY / state.canvasHeight); // Flip Y (0 at bottom, 1 at top)
+ const logFreq = logMin + normalizedY * (logMax - logMin);
+ freqHz = Math.pow(10, logFreq);
+ } else {
+ // Linear frequency mapping (old behavior)
+ const bin = Math.round((state.canvasHeight - screenY) / state.pixelsPerBin);
+ freqHz = (bin / state.referenceDctSize) * (SAMPLE_RATE / 2);
+ }
+
+ // Amplitude from Y position (normalized 0-1, top = 1.0, bottom = 0.0)
+ const amplitude = 1.0 - (screenY / state.canvasHeight);
+
+ return {
+ frame: Math.max(0, frame),
+ freqHz: Math.max(FREQ_MIN, Math.min(FREQ_MAX, freqHz)),
+ amplitude: Math.max(0, Math.min(1, amplitude))
+ };
+}
+
+function spectrogramToScreen(frame, freqHz) {
+ const x = frame * state.pixelsPerFrame;
+
+ let y;
+ if (USE_LOG_SCALE) {
+ // Logarithmic frequency mapping
+ const logMin = Math.log10(FREQ_MIN);
+ const logMax = Math.log10(FREQ_MAX);
+ const clampedFreq = Math.max(FREQ_MIN, Math.min(FREQ_MAX, freqHz));
+ const logFreq = Math.log10(clampedFreq);
+ const normalizedY = (logFreq - logMin) / (logMax - logMin);
+ y = state.canvasHeight * (1.0 - normalizedY); // Flip Y back to screen coords
+ } else {
+ // Linear frequency mapping (old behavior)
+ const bin = (freqHz / (SAMPLE_RATE / 2)) * state.referenceDctSize;
+ y = state.canvasHeight - (bin * state.pixelsPerBin);
+ }
+
+ return {x, y};
+}
+
+// ============================================================================
+// Rendering (continued in next message due to length)
+// ============================================================================
+
+// ============================================================================
+// Rendering
+// ============================================================================
+
+function render() {
+ const canvas = document.getElementById('spectrogramCanvas');
+ const ctx = canvas.getContext('2d');
+
+ // Clear canvas
+ ctx.fillStyle = '#1e1e1e';
+ ctx.fillRect(0, 0, canvas.width, canvas.height);
+
+ // Draw reference spectrogram (background)
+ if (state.referenceSpectrogram) {
+ drawReferenceSpectrogram(ctx);
+ }
+
+ // Draw procedural spectrogram (foreground)
+ if (state.curves.length > 0) {
+ drawProceduralSpectrogram(ctx);
+ }
+
+ // Draw frequency axis (log-scale grid and labels)
+ drawFrequencyAxis(ctx);
+
+ // Draw playhead indicator
+ drawPlayhead(ctx);
+
+ // Draw mouse crosshair and tooltip
+ drawCrosshair(ctx);
+
+ // Draw control points
+ drawControlPoints(ctx);
+}
+
+function drawPlayhead(ctx) {
+ if (!state.isPlaying || state.playbackCurrentFrame < 0) return;
+
+ const x = state.playbackCurrentFrame * state.pixelsPerFrame;
+
+ // Draw vertical line
+ ctx.strokeStyle = '#ff3333'; // Bright red
+ ctx.lineWidth = 2;
+ ctx.setLineDash([5, 3]); // Dashed line
+ ctx.beginPath();
+ ctx.moveTo(x, 0);
+ ctx.lineTo(x, state.canvasHeight);
+ ctx.stroke();
+ ctx.setLineDash([]); // Reset to solid line
+}
+
+function drawCrosshair(ctx) {
+ if (state.mouseX < 0 || state.mouseY < 0) return;
+
+ // Draw vertical line
+ ctx.strokeStyle = 'rgba(255, 255, 255, 0.3)';
+ ctx.lineWidth = 1;
+ ctx.beginPath();
+ ctx.moveTo(state.mouseX, 0);
+ ctx.lineTo(state.mouseX, state.canvasHeight);
+ ctx.stroke();
+
+ // Draw tooltip
+ const frameText = `Frame: ${state.mouseFrame}`;
+ const freqText = `Freq: ${state.mouseFreq.toFixed(1)} Hz`;
+
+ ctx.font = '12px monospace';
+ const frameWidth = ctx.measureText(frameText).width;
+ const freqWidth = ctx.measureText(freqText).width;
+ const maxWidth = Math.max(frameWidth, freqWidth);
+
+ const tooltipX = state.mouseX + 10;
+ const tooltipY = state.mouseY - 40;
+ const tooltipWidth = maxWidth + 20;
+ const tooltipHeight = 40;
+
+ // Background
+ ctx.fillStyle = 'rgba(0, 0, 0, 0.8)';
+ ctx.fillRect(tooltipX, tooltipY, tooltipWidth, tooltipHeight);
+
+ // Border
+ ctx.strokeStyle = 'rgba(255, 255, 255, 0.3)';
+ ctx.lineWidth = 1;
+ ctx.strokeRect(tooltipX, tooltipY, tooltipWidth, tooltipHeight);
+
+ // Text
+ ctx.fillStyle = '#ffffff';
+ ctx.fillText(frameText, tooltipX + 10, tooltipY + 15);
+ ctx.fillText(freqText, tooltipX + 10, tooltipY + 30);
+}
+
+function drawReferenceSpectrogram(ctx) {
+ // Create offscreen canvas for reference layer
+ const offscreen = document.createElement('canvas');
+ offscreen.width = state.canvasWidth;
+ offscreen.height = state.canvasHeight;
+ const offscreenCtx = offscreen.getContext('2d');
+
+ const imgData = offscreenCtx.createImageData(state.canvasWidth, state.canvasHeight);
+
+ // CORRECT MAPPING: Iterate over destination pixels → sample source bins
+ // This prevents gaps and overlaps
+ for (let screenY = 0; screenY < state.canvasHeight; screenY++) {
+ for (let screenX = 0; screenX < state.canvasWidth; screenX++) {
+ // Convert screen coordinates to spectrogram coordinates
+ const spectroCoords = screenToSpectrogram(screenX, screenY);
+ const frameIdx = Math.floor(spectroCoords.frame);
+
+ // Convert freqHz back to bin
+ const bin = Math.round((spectroCoords.freqHz / (SAMPLE_RATE / 2)) * state.referenceDctSize);
+
+ // Bounds check
+ if (frameIdx < 0 || frameIdx >= state.referenceNumFrames) continue;
+ if (bin < 0 || bin >= state.referenceDctSize) continue;
+
+ // Sample spectrogram
+ const specValue = state.referenceSpectrogram[frameIdx * state.referenceDctSize + bin];
+
+ // Logarithmic intensity mapping (dB scale)
+ // Maps wide dynamic range to visible range
+ const amplitude = Math.abs(specValue);
+ let intensity = 0;
+ if (amplitude > 0.0001) { // Noise floor
+ const dB = 20.0 * Math.log10(amplitude);
+ const dB_min = -60.0; // Noise floor (-60 dB)
+ const dB_max = 40.0; // Peak (40 dB headroom)
+ const normalized = (dB - dB_min) / (dB_max - dB_min);
+ intensity = Math.floor(Math.max(0, Math.min(255, normalized * 255)));
+ }
+
+ // Write pixel
+ const pixelIdx = (screenY * state.canvasWidth + screenX) * 4;
+ imgData.data[pixelIdx + 0] = intensity; // R
+ imgData.data[pixelIdx + 1] = intensity; // G
+ imgData.data[pixelIdx + 2] = intensity; // B
+ imgData.data[pixelIdx + 3] = 255; // A
+ }
+ }
+
+ offscreenCtx.putImageData(imgData, 0, 0);
+
+ // Draw offscreen canvas with proper alpha blending
+ ctx.globalAlpha = state.referenceOpacity;
+ ctx.drawImage(offscreen, 0, 0);
+ ctx.globalAlpha = 1.0;
+}
+
+function drawProceduralSpectrogram(ctx) {
+ // Draw each curve separately with its own color and volume
+ const numFrames = state.referenceNumFrames || 100;
+
+ state.curves.forEach(curve => {
+ if (curve.controlPoints.length === 0) return;
+
+ // Create offscreen canvas for this curve
+ const offscreen = document.createElement('canvas');
+ offscreen.width = state.canvasWidth;
+ offscreen.height = state.canvasHeight;
+ const offscreenCtx = offscreen.getContext('2d');
+
+ // Generate spectrogram for this curve only
+ const curveSpec = new Float32Array(state.referenceDctSize * numFrames);
+ drawCurveToSpectrogram(curve, curveSpec, state.referenceDctSize, numFrames);
+
+ // Parse curve color (hex to RGB)
+ const color = hexToRgb(curve.color || '#0e639c');
+
+ const imgData = offscreenCtx.createImageData(state.canvasWidth, state.canvasHeight);
+
+ // CORRECT MAPPING: Iterate over destination pixels → sample source bins
+ for (let screenY = 0; screenY < state.canvasHeight; screenY++) {
+ for (let screenX = 0; screenX < state.canvasWidth; screenX++) {
+ // Convert screen coordinates to spectrogram coordinates
+ const spectroCoords = screenToSpectrogram(screenX, screenY);
+ const frameIdx = Math.floor(spectroCoords.frame);
+
+ // Convert freqHz back to bin
+ const bin = Math.round((spectroCoords.freqHz / (SAMPLE_RATE / 2)) * state.referenceDctSize);
+
+ // Bounds check
+ if (frameIdx < 0 || frameIdx >= numFrames) continue;
+ if (bin < 0 || bin >= state.referenceDctSize) continue;
+
+ // Sample spectrogram
+ const specValue = curveSpec[frameIdx * state.referenceDctSize + bin];
+
+ // Logarithmic intensity mapping with steeper falloff for procedural curves
+ const amplitude = Math.abs(specValue);
+ let intensity = 0.0;
+ if (amplitude > 0.001) { // Higher noise floor for cleaner visualization
+ const dB = 20.0 * Math.log10(amplitude);
+ const dB_min = -40.0; // Higher floor = steeper falloff (was -60)
+ const dB_max = 40.0; // Peak
+ const normalized = (dB - dB_min) / (dB_max - dB_min);
+ intensity = Math.max(0, Math.min(1.0, normalized)); // 0.0 to 1.0
+ }
+
+ if (intensity > 0.01) { // Only draw visible pixels
+ const pixelIdx = (screenY * state.canvasWidth + screenX) * 4;
+ // Use constant color with alpha for intensity (pure colors)
+ imgData.data[pixelIdx + 0] = color.r;
+ imgData.data[pixelIdx + 1] = color.g;
+ imgData.data[pixelIdx + 2] = color.b;
+ imgData.data[pixelIdx + 3] = Math.floor(intensity * 255); // Alpha = intensity
+ }
+ }
+ }
+
+ offscreenCtx.putImageData(imgData, 0, 0);
+
+ // Draw offscreen canvas with curve volume as opacity (blends properly)
+ const curveOpacity = 0.6 * curve.volume; // Base opacity × curve volume
+ ctx.globalAlpha = curveOpacity;
+ ctx.drawImage(offscreen, 0, 0);
+ });
+
+ ctx.globalAlpha = 1.0;
+}
+
+// Helper: Convert hex color to RGB
+function hexToRgb(hex) {
+ const result = /^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(hex);
+ return result ? {
+ r: parseInt(result[1], 16),
+ g: parseInt(result[2], 16),
+ b: parseInt(result[3], 16)
+ } : {r: 14, g: 99, b: 156}; // Default blue
+}
+
+function drawControlPoints(ctx) {
+ state.curves.forEach(curve => {
+ const isSelected = curve.id === state.selectedCurveId;
+ const curveColor = curve.color || '#0e639c';
+
+ // Draw Bezier curve path
+ if (curve.controlPoints.length >= 2) {
+ ctx.strokeStyle = isSelected ? curveColor : '#666666';
+ ctx.lineWidth = isSelected ? 3 : 2;
+ ctx.beginPath();
+
+ for (let i = 0; i < curve.controlPoints.length; i++) {
+ const point = curve.controlPoints[i];
+ const screenPos = spectrogramToScreen(point.frame, point.freqHz);
+
+ if (i === 0) {
+ ctx.moveTo(screenPos.x, screenPos.y);
+ } else {
+ ctx.lineTo(screenPos.x, screenPos.y);
+ }
+ }
+
+ ctx.stroke();
+ }
+
+ // Draw control points
+ curve.controlPoints.forEach((point, idx) => {
+ const screenPos = spectrogramToScreen(point.frame, point.freqHz);
+ const isPointSelected = isSelected && idx === state.selectedControlPointIdx;
+
+ ctx.fillStyle = isPointSelected ? '#ffaa00' : (isSelected ? curveColor : '#888888');
+ ctx.beginPath();
+ ctx.arc(screenPos.x, screenPos.y, 6, 0, 2 * Math.PI);
+ ctx.fill();
+
+ ctx.strokeStyle = '#ffffff';
+ ctx.lineWidth = 2;
+ ctx.stroke();
+
+ // Draw label
+ if (isSelected) {
+ ctx.fillStyle = '#ffffff';
+ ctx.font = '11px monospace';
+ ctx.fillText(`${Math.round(point.freqHz)}Hz`, screenPos.x + 10, screenPos.y - 5);
+ }
+ });
+ });
+}
+
+function drawFrequencyAxis(ctx) {
+ if (!USE_LOG_SCALE) return; // Only draw axis in log-scale mode
+
+ // Standard musical frequencies to display
+ const frequencies = [20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 16000];
+
+ ctx.fillStyle = '#cccccc';
+ ctx.font = '11px monospace';
+ ctx.textAlign = 'right';
+ ctx.textBaseline = 'middle';
+
+ frequencies.forEach(freq => {
+ const screenPos = spectrogramToScreen(0, freq);
+ const y = screenPos.y;
+
+ if (y >= 0 && y <= state.canvasHeight) {
+ // Draw frequency label
+ const label = freq >= 1000 ? `${freq / 1000}k` : `${freq}`;
+ ctx.fillText(label, state.canvasWidth - 5, y);
+
+ // Draw subtle grid line
+ ctx.strokeStyle = 'rgba(255, 255, 255, 0.1)';
+ ctx.lineWidth = 1;
+ ctx.beginPath();
+ ctx.moveTo(0, y);
+ ctx.lineTo(state.canvasWidth - 40, y); // Leave space for label
+ ctx.stroke();
+ }
+ });
+}
+
+// ============================================================================
+// Procedural Spectrogram Generation
+// ============================================================================
+
+function generateProceduralSpectrogram(numFrames) {
+ const spectrogram = new Float32Array(state.referenceDctSize * numFrames);
+
+ // For each curve, draw its contribution
+ state.curves.forEach(curve => {
+ drawCurveToSpectrogram(curve, spectrogram, state.referenceDctSize, numFrames);
+ });
+
+ return spectrogram;
+}
+
+function drawCurveToSpectrogram(curve, spectrogram, dctSize, numFrames) {
+ if (curve.controlPoints.length === 0) return;
+
+ // Find the frame range covered by control points
+ const frames = curve.controlPoints.map(p => p.frame);
+ const minFrame = Math.max(0, Math.min(...frames)); // Clamp to valid range
+ const maxFrame = Math.min(numFrames - 1, Math.max(...frames));
+
+ // Amplitude scaling factor to match typical DCT coefficient magnitudes
+ // Increased from 10.0 to 50.0 for better audibility
+ const AMPLITUDE_SCALE = 50.0;
+
+ // Apply curve volume to the amplitude
+ const curveVolume = curve.volume || 1.0;
+
+ // Only iterate over the range where control points exist
+ for (let frame = minFrame; frame <= maxFrame; frame++) {
+ // Evaluate Bezier curve at this frame
+ const freqHz = evaluateBezierLinear(curve.controlPoints, frame, 'freqHz');
+ const amplitude = evaluateBezierLinear(curve.controlPoints, frame, 'amplitude');
+
+ // Convert freq to bin
+ const freqBin0 = (freqHz / (SAMPLE_RATE / 2)) * dctSize;
+
+ // Apply vertical profile
+ for (let bin = 0; bin < dctSize; bin++) {
+ const dist = Math.abs(bin - freqBin0);
+ const profileValue = evaluateProfile(curve.profile, dist);
+
+ const idx = frame * dctSize + bin;
+ spectrogram[idx] += amplitude * profileValue * AMPLITUDE_SCALE * curveVolume;
+ }
+ }
+}
+
+function evaluateBezierLinear(controlPoints, frame, property) {
+ if (controlPoints.length === 0) return 0;
+ if (controlPoints.length === 1) return controlPoints[0][property];
+
+ const frames = controlPoints.map(p => p.frame);
+ const values = controlPoints.map(p => p[property]);
+
+ // Clamp to range
+ if (frame <= frames[0]) return values[0];
+ if (frame >= frames[frames.length - 1]) return values[values.length - 1];
+
+ // Find segment
+ for (let i = 0; i < frames.length - 1; i++) {
+ if (frame >= frames[i] && frame <= frames[i + 1]) {
+ const t = (frame - frames[i]) / (frames[i + 1] - frames[i]);
+ return values[i] * (1 - t) + values[i + 1] * t;
+ }
+ }
+
+ return values[values.length - 1];
+}
+
+function evaluateProfile(profile, distance) {
+ switch (profile.type) {
+ case 'gaussian': {
+ const sigma = profile.param1;
+ return Math.exp(-(distance * distance) / (sigma * sigma));
+ }
+
+ case 'decaying_sinusoid': {
+ const decay = profile.param1;
+ const omega = profile.param2 || 0.5;
+ return Math.exp(-decay * distance) * Math.cos(omega * distance);
+ }
+
+ case 'noise': {
+ const amplitude = profile.param1;
+ const decay = profile.param2 || 30.0; // Decay rate (like sigma for Gaussian)
+
+ // Deterministic noise based on distance
+ const seed = 1234;
+ const hash = Math.floor((seed + distance * 17.13) * 1000) % 10000;
+ const noise = (hash / 10000) * 2.0 - 1.0; // Random value: -1 to +1
+
+ // Apply exponential decay (like Gaussian)
+ const decayFactor = Math.exp(-(distance * distance) / (decay * decay));
+
+ return amplitude * noise * decayFactor;
+ }
+
+ default:
+ return 0;
+ }
+}
+
+// ============================================================================
+// Audio Playback
+// ============================================================================
+
+function playAudio(source) {
+ if (!state.audioContext) {
+ alert('Audio context not available');
+ return;
+ }
+
+ stopAudio();
+
+ let spectrogram;
+ let numFrames;
+
+ if (source === 'original') {
+ if (!state.referenceSpectrogram) {
+ alert('No reference audio loaded');
+ return;
+ }
+ spectrogram = state.referenceSpectrogram;
+ numFrames = state.referenceNumFrames;
+ } else { // procedural
+ if (state.curves.length === 0) {
+ alert('No curves defined. Add a curve first.');
+ return;
+ }
+ numFrames = state.referenceNumFrames || 100;
+ spectrogram = generateProceduralSpectrogram(numFrames);
+ }
+
+ // Convert spectrogram to audio via IDCT
+ const audioData = spectrogramToAudio(spectrogram, state.referenceDctSize, numFrames);
+
+ // Create audio buffer
+ const audioBuffer = state.audioContext.createBuffer(1, audioData.length, SAMPLE_RATE);
+ audioBuffer.getChannelData(0).set(audioData);
+
+ // Create gain node for volume control
+ const gainNode = state.audioContext.createGain();
+ gainNode.gain.value = state.playbackVolume;
+
+ // Play
+ const bufferSource = state.audioContext.createBufferSource();
+ bufferSource.buffer = audioBuffer;
+ bufferSource.connect(gainNode);
+ gainNode.connect(state.audioContext.destination);
+ bufferSource.start();
+
+ state.currentSource = bufferSource;
+ state.currentGainNode = gainNode; // Store gain node for live volume updates
+ state.isPlaying = true;
+
+ // Start playhead animation
+ state.playbackStartTime = state.audioContext.currentTime;
+ state.playbackDuration = audioData.length / SAMPLE_RATE;
+ state.playbackCurrentFrame = 0;
+ updatePlayhead();
+
+ bufferSource.onended = () => {
+ state.isPlaying = false;
+ state.currentSource = null;
+ state.currentGainNode = null; // Clear gain node reference
+ state.playbackCurrentFrame = 0;
+ render(); // Clear playhead
+ };
+
+ console.log('Playing audio:', audioData.length, 'samples at volume', state.playbackVolume);
+}
+
+function updatePlayhead() {
+ if (!state.isPlaying) return;
+
+ // Calculate current playback position
+ const elapsed = state.audioContext.currentTime - state.playbackStartTime;
+ const progress = Math.min(1.0, elapsed / state.playbackDuration);
+ state.playbackCurrentFrame = progress * (state.referenceNumFrames || 100);
+
+ // Redraw with playhead
+ render();
+
+ // Update spectrum viewer
+ drawSpectrumViewer();
+
+ // Continue animation
+ requestAnimationFrame(updatePlayhead);
+}
+
+function drawSpectrumViewer() {
+ const viewer = document.getElementById('spectrumViewer');
+ const canvas = document.getElementById('spectrumCanvas');
+ const ctx = canvas.getContext('2d');
+
+ // Always show viewer (not just during playback)
+ viewer.classList.add('active');
+
+ // Determine which frame to display
+ let frameIdx;
+ if (state.isPlaying) {
+ frameIdx = Math.floor(state.playbackCurrentFrame);
+ } else {
+ // When not playing, show frame under mouse
+ frameIdx = state.mouseFrame;
+ }
+
+ if (frameIdx < 0 || frameIdx >= (state.referenceNumFrames || 100)) return;
+
+ // Clear canvas
+ ctx.fillStyle = '#1e1e1e';
+ ctx.fillRect(0, 0, canvas.width, canvas.height);
+
+ const numBars = 100; // Downsample to 100 bars for performance
+ const barWidth = canvas.width / numBars;
+
+ // Get reference spectrum (if available)
+ let refSpectrum = null;
+ if (state.referenceSpectrogram && frameIdx < state.referenceNumFrames) {
+ refSpectrum = new Float32Array(state.referenceDctSize);
+ for (let bin = 0; bin < state.referenceDctSize; bin++) {
+ refSpectrum[bin] = state.referenceSpectrogram[frameIdx * state.referenceDctSize + bin];
+ }
+ }
+
+ // Get procedural spectrum (if curves exist)
+ let procSpectrum = null;
+ if (state.curves.length > 0) {
+ const numFrames = state.referenceNumFrames || 100;
+ const fullProcSpec = new Float32Array(state.referenceDctSize * numFrames);
+ state.curves.forEach(curve => {
+ drawCurveToSpectrogram(curve, fullProcSpec, state.referenceDctSize, numFrames);
+ });
+
+ // Extract just this frame
+ procSpectrum = new Float32Array(state.referenceDctSize);
+ for (let bin = 0; bin < state.referenceDctSize; bin++) {
+ procSpectrum[bin] = fullProcSpec[frameIdx * state.referenceDctSize + bin];
+ }
+ }
+
+ // Draw spectrum bars (both reference and procedural overlaid)
+ for (let i = 0; i < numBars; i++) {
+ const binIdx = Math.floor((i / numBars) * state.referenceDctSize);
+
+ // Draw reference spectrum (green, behind)
+ if (refSpectrum) {
+ const amplitude = Math.abs(refSpectrum[binIdx]);
+ let height = 0;
+ if (amplitude > 0.0001) {
+ const dB = 20.0 * Math.log10(amplitude);
+ const dB_min = -60.0;
+ const dB_max = 40.0;
+ const normalized = (dB - dB_min) / (dB_max - dB_min);
+ height = Math.max(0, Math.min(canvas.height, normalized * canvas.height));
+ }
+
+ if (height > 0) {
+ const gradient = ctx.createLinearGradient(0, canvas.height - height, 0, canvas.height);
+ gradient.addColorStop(0, '#00ff00');
+ gradient.addColorStop(1, '#004400');
+ ctx.fillStyle = gradient;
+ ctx.fillRect(i * barWidth, canvas.height - height, barWidth - 1, height);
+ }
+ }
+
+ // Draw procedural spectrum (red, overlaid)
+ if (procSpectrum) {
+ const amplitude = Math.abs(procSpectrum[binIdx]);
+ let height = 0;
+ if (amplitude > 0.001) {
+ const dB = 20.0 * Math.log10(amplitude);
+ const dB_min = -40.0; // Same as procedural spectrogram rendering
+ const dB_max = 40.0;
+ const normalized = (dB - dB_min) / (dB_max - dB_min);
+ height = Math.max(0, Math.min(canvas.height, normalized * canvas.height));
+ }
+
+ if (height > 0) {
+ const gradient = ctx.createLinearGradient(0, canvas.height - height, 0, canvas.height);
+ gradient.addColorStop(0, '#ff5555'); // Bright red
+ gradient.addColorStop(1, '#550000'); // Dark red
+ ctx.fillStyle = gradient;
+ // Make it slightly transparent to see overlap
+ ctx.globalAlpha = 0.7;
+ ctx.fillRect(i * barWidth, canvas.height - height, barWidth - 1, height);
+ ctx.globalAlpha = 1.0;
+ }
+ }
+ }
+
+ // Draw frequency labels
+ ctx.fillStyle = '#888888';
+ ctx.font = '9px monospace';
+ ctx.textAlign = 'left';
+ ctx.fillText('20 Hz', 2, canvas.height - 2);
+ ctx.textAlign = 'right';
+ ctx.fillText('16 kHz', canvas.width - 2, canvas.height - 2);
+
+ // Draw frame number label (top-left)
+ ctx.textAlign = 'left';
+ ctx.fillStyle = state.isPlaying ? '#ff3333' : '#aaaaaa';
+ ctx.fillText(`Frame ${frameIdx}`, 2, 10);
+}
+
+function stopAudio() {
+ if (state.currentSource) {
+ try {
+ state.currentSource.stop();
+ state.currentSource.disconnect();
+ } catch (e) {
+ // Source may have already stopped naturally
+ }
+ state.currentSource = null;
+ }
+ state.currentGainNode = null; // Clear gain node reference
+ state.isPlaying = false;
+}
+
+function spectrogramToAudio(spectrogram, dctSize, numFrames) {
+ const hopSize = dctSize / 2;
+ const audioLength = numFrames * hopSize + dctSize;
+ const audioData = new Float32Array(audioLength);
+ const window = hanningWindowArray;
+
+ for (let frameIdx = 0; frameIdx < numFrames; frameIdx++) {
+ // Extract frame (no windowing - window is only for analysis, not synthesis)
+ const frame = new Float32Array(dctSize);
+ for (let b = 0; b < dctSize; b++) {
+ frame[b] = spectrogram[frameIdx * dctSize + b];
+ }
+
+ // IDCT
+ const timeFrame = javascript_idct_512(frame);
+
+ // Apply synthesis window for overlap-add
+ const frameStart = frameIdx * hopSize;
+ for (let i = 0; i < dctSize; i++) {
+ if (frameStart + i < audioLength) {
+ audioData[frameStart + i] += timeFrame[i] * window[i];
+ }
+ }
+ }
+
+ return audioData;
+}
+
+// ============================================================================
+// Undo/Redo
+// ============================================================================
+
+function saveHistoryState(action) {
+ // Remove any states after current index
+ state.history = state.history.slice(0, state.historyIndex + 1);
+
+ // Save current state
+ const snapshot = {
+ action,
+ curves: JSON.parse(JSON.stringify(state.curves)),
+ selectedCurveId: state.selectedCurveId
+ };
+
+ state.history.push(snapshot);
+
+ // Limit history size
+ if (state.history.length > state.maxHistorySize) {
+ state.history.shift();
+ } else {
+ state.historyIndex++;
+ }
+
+ updateUndoRedoButtons();
+}
+
+function undo() {
+ if (state.historyIndex <= 0) return;
+
+ state.historyIndex--;
+ const snapshot = state.history[state.historyIndex];
+
+ state.curves = JSON.parse(JSON.stringify(snapshot.curves));
+ state.selectedCurveId = snapshot.selectedCurveId;
+ state.selectedControlPointIdx = null;
+
+ updateCurveUI();
+ updateUndoRedoButtons();
+ render();
+
+ console.log('Undo:', snapshot.action);
+}
+
+function redo() {
+ if (state.historyIndex >= state.history.length - 1) return;
+
+ state.historyIndex++;
+ const snapshot = state.history[state.historyIndex];
+
+ state.curves = JSON.parse(JSON.stringify(snapshot.curves));
+ state.selectedCurveId = snapshot.selectedCurveId;
+ state.selectedControlPointIdx = null;
+
+ updateCurveUI();
+ updateUndoRedoButtons();
+ render();
+
+ console.log('Redo:', snapshot.action);
+}
+
+function updateUndoRedoButtons() {
+ document.getElementById('undoBtn').disabled = state.historyIndex <= 0;
+ document.getElementById('redoBtn').disabled = state.historyIndex >= state.history.length - 1;
+}
+
+// ============================================================================
+// File Export
+// ============================================================================
+
+function saveProceduralParams() {
+ if (state.curves.length === 0) {
+ alert('No curves to save. Add at least one curve first.');
+ return;
+ }
+
+ const text = generateProceduralParamsText();
+ downloadTextFile('procedural_params.txt', text);
+}
+
+function generateProceduralParamsText() {
+ let text = '# Spectral Brush Procedural Parameters\n';
+ text += `METADATA dct_size=${state.referenceDctSize} num_frames=${state.referenceNumFrames || 100} sample_rate=${SAMPLE_RATE}\n\n`;
+
+ state.curves.forEach((curve, idx) => {
+ text += `CURVE bezier\n`;
+
+ curve.controlPoints.forEach(point => {
+ text += ` CONTROL_POINT ${point.frame} ${point.freqHz.toFixed(1)} ${point.amplitude.toFixed(3)}\n`;
+ });
+
+ text += ` PROFILE ${curve.profile.type}`;
+ if (curve.profile.type === 'gaussian') {
+ text += ` sigma=${curve.profile.param1.toFixed(1)}`;
+ } else if (curve.profile.type === 'decaying_sinusoid') {
+ text += ` decay=${curve.profile.param1.toFixed(2)} frequency=${curve.profile.param2.toFixed(2)}`;
+ } else if (curve.profile.type === 'noise') {
+ text += ` amplitude=${curve.profile.param1.toFixed(2)} seed=${curve.profile.param2.toFixed(0)}`;
+ }
+ text += '\n';
+
+ // Add curve volume
+ text += ` VOLUME ${curve.volume.toFixed(3)}\n`;
+
+ text += 'END_CURVE\n\n';
+ });
+
+ return text;
+}
+
+function generateCppCode() {
+ if (state.curves.length === 0) {
+ alert('No curves to export. Add at least one curve first.');
+ return;
+ }
+
+ const code = generateCppCodeText();
+ downloadTextFile('gen_procedural.cc', code);
+}
+
+function generateCppCodeText() {
+ let code = '// Generated by Spectral Brush Editor\n';
+ code += '// This code reproduces the procedural audio procedurally at runtime\n\n';
+ code += '#include "audio/spectral_brush.h"\n\n';
+
+ code += 'void gen_procedural(float* spec, int dct_size, int num_frames) {\n';
+
+ state.curves.forEach((curve, curveIdx) => {
+ code += ` // Curve ${curveIdx} (volume=${curve.volume.toFixed(3)})\n`;
+ code += ' {\n';
+
+ // Control points arrays
+ const numPoints = curve.controlPoints.length;
+ code += ` const float frames[] = {`;
+ code += curve.controlPoints.map(p => `${p.frame}.0f`).join(', ');
+ code += '};\n';
+
+ code += ` const float freqs[] = {`;
+ code += curve.controlPoints.map(p => `${p.freqHz.toFixed(1)}f`).join(', ');
+ code += '};\n';
+
+ // Apply curve volume to amplitudes
+ const curveVolume = curve.volume || 1.0;
+ code += ` const float amps[] = {`;
+ code += curve.controlPoints.map(p => `${(p.amplitude * curveVolume).toFixed(3)}f`).join(', ');
+ code += '};\n\n';
+
+ // Profile type
+ let profileEnum;
+ if (curve.profile.type === 'gaussian') {
+ profileEnum = 'PROFILE_GAUSSIAN';
+ } else if (curve.profile.type === 'decaying_sinusoid') {
+ profileEnum = 'PROFILE_DECAYING_SINUSOID';
+ } else if (curve.profile.type === 'noise') {
+ profileEnum = 'PROFILE_NOISE';
+ }
+
+ // Function call
+ if (curveIdx === 0) {
+ code += ` draw_bezier_curve(spec, dct_size, num_frames,\n`;
+ } else {
+ code += ` draw_bezier_curve_add(spec, dct_size, num_frames,\n`;
+ }
+ code += ` frames, freqs, amps, ${numPoints},\n`;
+ code += ` ${profileEnum}, ${curve.profile.param1.toFixed(2)}f`;
+
+ if (curve.profile.type === 'decaying_sinusoid' || curve.profile.type === 'noise') {
+ code += `, ${curve.profile.param2.toFixed(2)}f`;
+ }
+
+ code += ');\n';
+ code += ' }\n\n';
+ });
+
+ code += '}\n\n';
+ code += '// Usage in demo_assets.txt:\n';
+ code += '// SOUND_PROC, PROC(gen_procedural), NONE, "Procedural sound"\n';
+
+ return code;
+}
+
+function downloadTextFile(filename, text) {
+ const blob = new Blob([text], {type: 'text/plain'});
+ const url = URL.createObjectURL(blob);
+
+ const a = document.createElement('a');
+ a.href = url;
+ a.download = filename;
+ document.body.appendChild(a);
+ a.click();
+ document.body.removeChild(a);
+
+ URL.revokeObjectURL(url);
+
+ console.log('Downloaded:', filename);
+}
+
+// ============================================================================
+// Help Modal
+// ============================================================================
+
+function showHelp() {
+ document.getElementById('helpModal').style.display = 'flex';
+}
+
+function hideHelp() {
+ document.getElementById('helpModal').style.display = 'none';
+}
diff --git a/tools/spectral_editor/style.css b/tools/spectral_editor/style.css
new file mode 100644
index 0000000..fa71d1d
--- /dev/null
+++ b/tools/spectral_editor/style.css
@@ -0,0 +1,508 @@
+/* Spectral Brush Editor Styles */
+
+* {
+ margin: 0;
+ padding: 0;
+ box-sizing: border-box;
+}
+
+body {
+ font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
+ background: #1e1e1e;
+ color: #d4d4d4;
+ overflow: hidden;
+ height: 100vh;
+}
+
+#app {
+ display: flex;
+ flex-direction: column;
+ height: 100vh;
+}
+
+/* Header */
+header {
+ background: #252526;
+ padding: 12px 20px;
+ border-bottom: 1px solid #3e3e42;
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+}
+
+header h1 {
+ font-size: 18px;
+ font-weight: 600;
+ color: #cccccc;
+}
+
+.header-controls {
+ display: flex;
+ align-items: center;
+ gap: 15px;
+}
+
+.file-info {
+ font-size: 13px;
+ color: #858585;
+}
+
+/* Main content area */
+.main-content {
+ display: flex;
+ flex: 1;
+ overflow: hidden;
+}
+
+/* Canvas container (80% width) */
+.canvas-container {
+ flex: 1;
+ position: relative;
+ background: #1e1e1e;
+ border-right: 1px solid #3e3e42;
+}
+
+#spectrogramCanvas {
+ width: 100%;
+ height: 100%;
+ display: block;
+ cursor: crosshair;
+}
+
+.canvas-overlay {
+ position: absolute;
+ top: 0;
+ left: 0;
+ width: 100%;
+ height: 100%;
+ display: flex;
+ flex-direction: column;
+ justify-content: center;
+ align-items: center;
+ background: rgba(30, 30, 30, 0.9);
+ pointer-events: none;
+}
+
+.canvas-overlay.hidden {
+ display: none;
+}
+
+/* Mini spectrum viewer (bottom-right overlay) */
+.spectrum-viewer {
+ position: absolute;
+ bottom: 10px;
+ right: 10px;
+ width: 200px;
+ height: 100px;
+ background: rgba(30, 30, 30, 0.9);
+ border: 1px solid #3e3e42;
+ border-radius: 3px;
+ display: block; /* Always visible */
+ pointer-events: none; /* Don't interfere with mouse events */
+}
+
+.spectrum-viewer.active {
+ display: block; /* Keep for backward compatibility */
+}
+
+#spectrumCanvas {
+ width: 100%;
+ height: 100%;
+ display: block;
+}
+
+.canvas-overlay p {
+ font-size: 16px;
+ margin: 8px 0;
+}
+
+.canvas-overlay .hint {
+ font-size: 13px;
+ color: #858585;
+}
+
+/* Toolbar (20% width) */
+.toolbar {
+ width: 250px;
+ background: #252526;
+ padding: 15px;
+ display: flex;
+ flex-direction: column;
+ gap: 10px;
+ overflow-y: auto;
+}
+
+.toolbar h3 {
+ font-size: 14px;
+ font-weight: 600;
+ color: #cccccc;
+ margin-bottom: 5px;
+}
+
+.btn-toolbar {
+ padding: 8px 12px;
+ background: #0e639c;
+ color: white;
+ border: none;
+ border-radius: 3px;
+ cursor: pointer;
+ font-size: 13px;
+ display: flex;
+ align-items: center;
+ gap: 6px;
+ transition: background 0.2s;
+}
+
+.btn-toolbar:hover {
+ background: #1177bb;
+}
+
+.btn-toolbar:disabled {
+ background: #3e3e42;
+ color: #858585;
+ cursor: not-allowed;
+}
+
+.btn-toolbar.btn-danger {
+ background: #a82d2d;
+}
+
+.btn-toolbar.btn-danger:hover:not(:disabled) {
+ background: #c94242;
+}
+
+.curve-list {
+ margin-top: 10px;
+ display: flex;
+ flex-direction: column;
+ gap: 5px;
+}
+
+.curve-item {
+ padding: 8px 10px;
+ background: #2d2d30;
+ border-radius: 3px;
+ cursor: pointer;
+ font-size: 13px;
+ transition: background 0.2s;
+ border: 1px solid transparent;
+}
+
+.curve-item:hover {
+ background: #3e3e42;
+}
+
+.curve-item.selected {
+ background: #094771;
+ border-color: #0e639c;
+}
+
+/* Point info panel */
+.point-info {
+ margin-top: 10px;
+ padding: 10px;
+ background: #2d2d30;
+ border-radius: 3px;
+ font-size: 12px;
+}
+
+.info-row {
+ display: flex;
+ justify-content: space-between;
+ padding: 4px 0;
+}
+
+.info-label {
+ color: #858585;
+ font-weight: 600;
+}
+
+.info-value {
+ color: #d4d4d4;
+ font-family: monospace;
+}
+
+/* Control panel (bottom) */
+.control-panel {
+ background: #252526;
+ border-top: 1px solid #3e3e42;
+ padding: 12px 20px;
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ gap: 20px;
+}
+
+.control-section {
+ display: flex;
+ align-items: center;
+ gap: 10px;
+}
+
+.control-section label {
+ font-size: 13px;
+ color: #cccccc;
+ white-space: nowrap;
+}
+
+.select-input {
+ padding: 4px 8px;
+ background: #3c3c3c;
+ color: #cccccc;
+ border: 1px solid #3e3e42;
+ border-radius: 3px;
+ font-size: 13px;
+ cursor: pointer;
+}
+
+.slider {
+ width: 150px;
+ height: 4px;
+ -webkit-appearance: none;
+ appearance: none;
+ background: #3e3e42;
+ border-radius: 2px;
+ outline: none;
+}
+
+.slider::-webkit-slider-thumb {
+ -webkit-appearance: none;
+ appearance: none;
+ width: 14px;
+ height: 14px;
+ background: #0e639c;
+ border-radius: 50%;
+ cursor: pointer;
+}
+
+.slider::-moz-range-thumb {
+ width: 14px;
+ height: 14px;
+ background: #0e639c;
+ border-radius: 50%;
+ cursor: pointer;
+ border: none;
+}
+
+.number-input {
+ width: 60px;
+ padding: 4px 6px;
+ background: #3c3c3c;
+ color: #cccccc;
+ border: 1px solid #3e3e42;
+ border-radius: 3px;
+ font-size: 13px;
+}
+
+.playback-controls {
+ gap: 8px;
+}
+
+.btn-playback {
+ padding: 6px 12px;
+ background: #0e639c;
+ color: white;
+ border: none;
+ border-radius: 3px;
+ cursor: pointer;
+ font-size: 12px;
+ display: flex;
+ align-items: center;
+ gap: 6px;
+ transition: background 0.2s;
+}
+
+.btn-playback:hover:not(:disabled) {
+ background: #1177bb;
+}
+
+.btn-playback:disabled {
+ background: #3e3e42;
+ color: #858585;
+ cursor: not-allowed;
+}
+
+.btn-playback kbd {
+ background: rgba(255, 255, 255, 0.1);
+ padding: 2px 5px;
+ border-radius: 3px;
+ font-size: 11px;
+}
+
+/* Action bar (bottom) */
+.action-bar {
+ background: #2d2d30;
+ border-top: 1px solid #3e3e42;
+ padding: 10px 20px;
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+}
+
+.action-group {
+ display: flex;
+ gap: 8px;
+}
+
+.btn-action {
+ padding: 6px 12px;
+ background: #3c3c3c;
+ color: #cccccc;
+ border: 1px solid #3e3e42;
+ border-radius: 3px;
+ cursor: pointer;
+ font-size: 12px;
+ display: flex;
+ align-items: center;
+ gap: 6px;
+ transition: background 0.2s, border-color 0.2s;
+}
+
+.btn-action:hover:not(:disabled) {
+ background: #505050;
+ border-color: #0e639c;
+}
+
+.btn-action:disabled {
+ color: #858585;
+ cursor: not-allowed;
+}
+
+.btn-primary {
+ padding: 6px 16px;
+ background: #0e639c;
+ color: white;
+ border: none;
+ border-radius: 3px;
+ cursor: pointer;
+ font-size: 13px;
+ transition: background 0.2s;
+}
+
+.btn-primary:hover {
+ background: #1177bb;
+}
+
+/* Icon styling */
+.icon {
+ font-size: 14px;
+ line-height: 1;
+}
+
+/* Modal */
+.modal {
+ position: fixed;
+ z-index: 1000;
+ left: 0;
+ top: 0;
+ width: 100%;
+ height: 100%;
+ background-color: rgba(0, 0, 0, 0.7);
+ display: flex;
+ justify-content: center;
+ align-items: center;
+}
+
+.modal-content {
+ background-color: #252526;
+ padding: 30px;
+ border: 1px solid #3e3e42;
+ border-radius: 5px;
+ width: 600px;
+ max-height: 80vh;
+ overflow-y: auto;
+ position: relative;
+}
+
+.modal-close {
+ position: absolute;
+ right: 15px;
+ top: 15px;
+ font-size: 28px;
+ font-weight: bold;
+ color: #858585;
+ cursor: pointer;
+ line-height: 1;
+}
+
+.modal-close:hover {
+ color: #cccccc;
+}
+
+.modal-content h2 {
+ font-size: 20px;
+ margin-bottom: 20px;
+ color: #cccccc;
+}
+
+.modal-content h3 {
+ font-size: 16px;
+ margin-top: 20px;
+ margin-bottom: 10px;
+ color: #cccccc;
+}
+
+.shortcuts-table {
+ width: 100%;
+ border-collapse: collapse;
+ margin-bottom: 20px;
+}
+
+.shortcuts-table th,
+.shortcuts-table td {
+ padding: 8px 12px;
+ text-align: left;
+ border-bottom: 1px solid #3e3e42;
+}
+
+.shortcuts-table th {
+ background: #2d2d30;
+ font-weight: 600;
+ color: #cccccc;
+}
+
+.shortcuts-table td {
+ color: #d4d4d4;
+}
+
+.shortcuts-table kbd {
+ background: #3c3c3c;
+ border: 1px solid #3e3e42;
+ padding: 3px 8px;
+ border-radius: 3px;
+ font-family: monospace;
+ font-size: 12px;
+}
+
+.modal-content ul {
+ list-style: none;
+ padding-left: 0;
+}
+
+.modal-content li {
+ padding: 5px 0;
+ color: #d4d4d4;
+}
+
+.modal-content li strong {
+ color: #cccccc;
+}
+
+/* Scrollbar styling */
+::-webkit-scrollbar {
+ width: 10px;
+ height: 10px;
+}
+
+::-webkit-scrollbar-track {
+ background: #1e1e1e;
+}
+
+::-webkit-scrollbar-thumb {
+ background: #3e3e42;
+ border-radius: 5px;
+}
+
+::-webkit-scrollbar-thumb:hover {
+ background: #4e4e52;
+}