summaryrefslogtreecommitdiff
path: root/tools/mq_editor/index.html
diff options
context:
space:
mode:
Diffstat (limited to 'tools/mq_editor/index.html')
-rw-r--r--tools/mq_editor/index.html85
1 files changed, 85 insertions, 0 deletions
diff --git a/tools/mq_editor/index.html b/tools/mq_editor/index.html
index c1d7bc9..1a07b61 100644
--- a/tools/mq_editor/index.html
+++ b/tools/mq_editor/index.html
@@ -74,6 +74,8 @@
<div class="toolbar">
<input type="file" id="wavFile" accept=".wav">
<button id="extractBtn" disabled>Extract Partials</button>
+ <button id="playBtn" disabled>▶ Play</button>
+ <button id="stopBtn" disabled>■ Stop</button>
<div class="params">
<label>Hop:</label>
@@ -96,9 +98,13 @@
<script>
let audioBuffer = null;
let viewer = null;
+ let audioContext = null;
+ let currentSource = null;
const wavFile = document.getElementById('wavFile');
const extractBtn = document.getElementById('extractBtn');
+ const playBtn = document.getElementById('playBtn');
+ const stopBtn = document.getElementById('stopBtn');
const canvas = document.getElementById('canvas');
const status = document.getElementById('status');
@@ -106,6 +112,13 @@
const threshold = document.getElementById('threshold');
const fftSize = 1024; // Fixed
+ // Initialize audio context
+ function initAudioContext() {
+ if (!audioContext) {
+ audioContext = new (window.AudioContext || window.webkitAudioContext)();
+ }
+ }
+
// Load WAV file
wavFile.addEventListener('change', async (e) => {
const file = e.target.files[0];
@@ -117,7 +130,9 @@
const audioContext = new AudioContext();
audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
+ initAudioContext();
extractBtn.disabled = false;
+ playBtn.disabled = false;
setStatus(`Loaded: ${audioBuffer.duration.toFixed(2)}s, ${audioBuffer.sampleRate}Hz, ${audioBuffer.numberOfChannels}ch`, 'info');
// Create viewer
@@ -159,10 +174,80 @@
}, 50);
});
+ // Play audio
+ playBtn.addEventListener('click', () => {
+ if (!audioBuffer || !audioContext) return;
+
+ stopAudio();
+
+ const startTime = audioContext.currentTime;
+ currentSource = audioContext.createBufferSource();
+ currentSource.buffer = audioBuffer;
+ currentSource.connect(audioContext.destination);
+ currentSource.start();
+
+ currentSource.onended = () => {
+ currentSource = null;
+ playBtn.disabled = false;
+ stopBtn.disabled = true;
+ viewer.setPlayheadTime(-1);
+ setStatus('Stopped', 'info');
+ };
+
+ playBtn.disabled = true;
+ stopBtn.disabled = false;
+ setStatus('Playing...', 'info');
+
+ // Animate playhead
+ function updatePlayhead() {
+ if (!currentSource) return;
+ const elapsed = audioContext.currentTime - startTime;
+ viewer.setPlayheadTime(elapsed);
+ requestAnimationFrame(updatePlayhead);
+ }
+ updatePlayhead();
+ });
+
+ // Stop audio
+ stopBtn.addEventListener('click', () => {
+ stopAudio();
+ });
+
+ function stopAudio() {
+ if (currentSource) {
+ try {
+ currentSource.stop();
+ } catch (e) {
+ // Already stopped
+ }
+ currentSource = null;
+ }
+ if (viewer) {
+ viewer.setPlayheadTime(-1);
+ }
+ playBtn.disabled = false;
+ stopBtn.disabled = true;
+ setStatus('Stopped', 'info');
+ }
+
function setStatus(msg, type = '') {
status.innerHTML = msg;
status.className = type;
}
+
+ // Keyboard shortcuts
+ document.addEventListener('keydown', (e) => {
+ if (e.code === 'Digit1') {
+ e.preventDefault();
+ // TODO: Play synthesized (Phase 2)
+ setStatus('Synthesized playback not yet implemented', 'warn');
+ } else if (e.code === 'Digit2') {
+ e.preventDefault();
+ if (!playBtn.disabled) {
+ playBtn.click();
+ }
+ }
+ });
</script>
</body>
</html>