'use strict';
class CNNv3Tester {
constructor() {
this.canvas = document.getElementById('canvas');
this.statusEl= document.getElementById('status');
this.conEl = document.getElementById('con');
this.video = document.getElementById('vid');
this.weightsU32 = null;
this.weightsBuffer = null;
this.weightsGPU = null;
this.filmMlp = null;
this.image = null;
this.isVideo = false;
this.viewMode= 0; // 0=cnn 1=orig 2=diff
this.targetBitmap = null; // set when a sample dir with target.png is loaded
this.blend = 1.0;
this.layerTextures = {};
this.lastResult = null;
this.isProcessing = false;
this.fps = 30;
this.pipelines = {}; // cached: pack enc0 enc1 bn dec1 dec0 disp vizF32 vizU32 mip
this.linearSampler = null;
this.init();
}
log(msg, type='info') {
const d = document.createElement('div');
d.className = `cl ${type}`;
d.textContent = `[${new Date().toLocaleTimeString()}] ${msg}`;
this.conEl.appendChild(d);
this.conEl.scrollTop = this.conEl.scrollHeight;
}
setStatus(msg, err=false) {
this.statusEl.textContent = msg;
this.statusEl.style.color = err ? '#f44' : '#4a9eff';
}
async init() {
if (!navigator.gpu) { this.setStatus('WebGPU not supported',true); return; }
try {
this.adapter = await navigator.gpu.requestAdapter();
this.device = await this.adapter.requestDevice();
this.context = this.canvas.getContext('webgpu');
this.format = navigator.gpu.getPreferredCanvasFormat();
this.linearSampler = this.device.createSampler({magFilter:'linear',minFilter:'linear',mipmapFilter:'linear'});
this.log('WebGPU ready');
this.preload();
} catch(e) { this.setStatus(`GPU error: ${e.message}`,true); }
}
async preload() {
const base = '../../workspaces/main/weights/';
const files = [
{url: base+'cnn_v3_weights.bin', isFilm: false},
{url: base+'cnn_v3_film_mlp.bin', isFilm: true},
];
for (const {url, isFilm} of files) {
try {
const r = await fetch(url);
if (!r.ok) { this.log(`preload skip: ${url.split('/').pop()} (${r.status})`); continue; }
const buf = await r.arrayBuffer();
const name = url.split('/').pop();
if (isFilm) {
this.filmMlp = this.parseFilm(buf);
const el = document.getElementById('fDrop');
el.textContent = `✓ ${name}`; el.classList.add('ok');
document.getElementById('fSt').textContent = 'FiLM MLP loaded';
document.getElementById('fSt').style.color = '#28a745';
} else {
this.weightsU32 = this.parseWeights(buf); this.weightsBuffer = buf;
if (this.weightsGPU) { this.weightsGPU.destroy(); this.weightsGPU = null; }
const el = document.getElementById('wDrop');
el.textContent = `✓ ${name}`; el.classList.add('ok');
}
this.log(`Preloaded: ${name}`);
} catch(e) { this.log(`preload error (${url.split('/').pop()}): ${e.message}`, 'err'); }
}
if (this.weightsU32) {
if (this.image || this.isVideo) this.run();
else this.setStatus('Weights loaded — drop image/video');
}
}
getDims() {
return this.isVideo
? {w:this.video.videoWidth, h:this.video.videoHeight}
: {w:this.image.width, h:this.image.height};
}
setMode(m) {
document.getElementById('mSimple').classList.toggle('act', m==='simple');
document.getElementById('mFull').classList.toggle('act', m==='full');
document.getElementById('fullHelp').style.display = m==='full' ? 'block' : 'none';
}
// ── Weight parsing ───────────────────────────────────────────────────────
parseWeights(buf) {
const u32 = new Uint32Array(buf);
if (u32.length < TOTAL_U32) throw new Error(`Too small: ${u32.length} u32, need ${TOTAL_U32}`);
const layers = [
{n:'enc0',off:ENC0_OFF,cnt:724},{n:'enc1',off:ENC1_OFF,cnt:296},
{n:'bn', off:BN_OFF, cnt: 72},{n:'dec1',off:DEC1_OFF,cnt:580},
{n:'dec0',off:DEC0_OFF,cnt:292},
];
let html=`
Size: ${(buf.byteLength/1024).toFixed(1)} KB Weights: ${TOTAL_F16} f16
| Layer | Offset | Count | Min | Max |
`;
for (const l of layers) {
let mn=Infinity,mx=-Infinity;
for (let i=l.off;i>1]);
const v=(i&1)?b:a; if(vmx)mx=v;
}
html+=`| ${l.n} | ${l.off} | ${l.cnt} | ${mn.toFixed(3)} | ${mx.toFixed(3)} |
`;
}
html+='
';
document.getElementById('wInfo').innerHTML = html;
return u32;
}
parseFilm(buf) {
const f32=new Float32Array(buf);
if (f32.length < 776) throw new Error(`FiLM too small: ${f32.length}`);
let o=0;
const l0w=f32.slice(o,o+=80), l0b=f32.slice(o,o+=16);
const l1w=f32.slice(o,o+=640),l1b=f32.slice(o,o+=40);
this.log(`FiLM MLP: L0(16×5) L1(40×16), ${f32.length} f32`);
return {l0w,l0b,l1w,l1b};
}
filmFwd(cond) {
const {l0w,l0b,l1w,l1b}=this.filmMlp;
const h=new Float32Array(16);
for(let j=0;j<16;j++){let s=l0b[j];for(let i=0;i<5;i++)s+=l0w[j*5+i]*cond[i];h[j]=Math.max(0,s);}
const o=new Float32Array(40);
for(let j=0;j<40;j++){let s=l1b[j];for(let i=0;i<16;i++)s+=l1w[j*16+i]*h[i];o[j]=s;}
return o;
}
filmParams() {
const I4=[1,1,1,1],Z4=[0,0,0,0],I8=[1,1,1,1,1,1,1,1],Z8=[0,0,0,0,0,0,0,0];
if (!this.filmMlp) return {ge0:I4,be0:Z4,ge1:I8,be1:Z8,gd1:I4,bd1:Z4,gd0:I4,bd0:Z4};
const v=document.getElementById.bind(document);
const cond=[v('sBP').value,v('sBN').value,v('sAI').value,v('sP0').value,v('sP1').value].map(Number);
const f=this.filmFwd(cond);
return {
ge0:[...f.slice(0,4)], be0:[...f.slice(4,8)],
ge1:[...f.slice(8,16)],be1:[...f.slice(16,24)],
gd1:[...f.slice(24,28)],bd1:[...f.slice(28,32)],
gd0:[...f.slice(32,36)],bd0:[...f.slice(36,40)],
};
}
// ── Uniform buffers ──────────────────────────────────────────────────────
// Params4 (48 bytes): wo u32 _pad×3 gamma vec4f beta vec4f
u4(wo,g,b){
const buf=new ArrayBuffer(48),v=new DataView(buf);
v.setUint32(0,wo,true);
for(let i=0;i<4;i++)v.setFloat32(16+i*4,g[i],true);
for(let i=0;i<4;i++)v.setFloat32(32+i*4,b[i],true);
return buf;
}
// Params8 (80 bytes): wo u32 _pad×3 gl gh bl bh vec4f×4
u8(wo,g,b){
const buf=new ArrayBuffer(80),v=new DataView(buf);
v.setUint32(0,wo,true);
for(let i=0;i<4;i++)v.setFloat32(16+i*4,g[i],true);
for(let i=0;i<4;i++)v.setFloat32(32+i*4,g[i+4],true);
for(let i=0;i<4;i++)v.setFloat32(48+i*4,b[i],true);
for(let i=0;i<4;i++)v.setFloat32(64+i*4,b[i+4],true);
return buf;
}
// ParamsBN (16 bytes): wo u32 _pad×3
ubn(wo){const buf=new ArrayBuffer(16);new DataView(buf).setUint32(0,wo,true);return buf;}
// ── Pipeline cache ───────────────────────────────────────────────────────
computePL(code,entry) {
return this.device.createComputePipeline({layout:'auto',
compute:{module:this.device.createShaderModule({code}),entryPoint:entry}});
}
renderPL(code,vs,fs) {
const m=this.device.createShaderModule({code});
return this.device.createRenderPipeline({layout:'auto',
vertex:{module:m,entryPoint:vs},
fragment:{module:m,entryPoint:fs,targets:[{format:this.format}]}});
}
pl(key,fn){if(!this.pipelines[key])this.pipelines[key]=fn();return this.pipelines[key];}
getPack() {return this.pl('pack', ()=>this.computePL(PACK_SHADER,'main'));}
getEnc0() {return this.pl('enc0', ()=>this.computePL(ENC0_SHADER,'main'));}
getEnc1() {return this.pl('enc1', ()=>this.computePL(ENC1_SHADER,'main'));}
getBN() {return this.pl('bn', ()=>this.computePL(BN_SHADER,'main'));}
getDec1() {return this.pl('dec1', ()=>this.computePL(DEC1_SHADER,'main'));}
getDec0() {return this.pl('dec0', ()=>this.computePL(DEC0_SHADER,'main'));}
getDisp() {return this.pl('disp', ()=>this.renderPL(DISP_SHADER,'vs','fs'));}
getVizF32() {return this.pl('vf32', ()=>this.renderPL(VIZ_F32,'vs','fs'));}
getVizU32() {return this.pl('vu32', ()=>this.renderPL(VIZ_U32,'vs','fs'));}
getMip() {
return this.pl('mip', ()=>{
const code=`@group(0) @binding(0) var src:texture_2d;
@vertex fn vs(@builtin(vertex_index) i:u32)->@builtin(position) vec4f{
var p=array(vec2f(-1.,-1.),vec2f(1.,-1.),vec2f(-1.,1.),vec2f(-1.,1.),vec2f(1.,-1.),vec2f(1.,1.));
return vec4f(p[i],0.,1.);}
@fragment fn fs(@builtin(position) pos:vec4f)->@location(0) vec4f{
let c=vec2i(i32(pos.x)*2,i32(pos.y)*2); var s=vec4f(0.);
for(var y:i32=0;y<2;y++){for(var x:i32=0;x<2;x++){s+=textureLoad(src,c+vec2i(x,y),0);}}
return s*.25;}`;
const m=this.device.createShaderModule({code});
return this.device.createRenderPipeline({layout:'auto',
vertex:{module:m,entryPoint:'vs'},
fragment:{module:m,entryPoint:'fs',targets:[{format:'rgba8unorm'}]}});
});
}
// ── Mipmap generation ────────────────────────────────────────────────────
generateMipmaps(tex,w,h) {
const enc=this.device.createCommandEncoder();
const pl=this.getMip();
for(let mip=1;mip<3;mip++){
const mw=Math.max(1,w>>mip),mh=Math.max(1,h>>mip);
const bg=this.device.createBindGroup({layout:pl.getBindGroupLayout(0),
entries:[{binding:0,resource:tex.createView({baseMipLevel:mip-1,mipLevelCount:1})}]});
const rp=enc.beginRenderPass({colorAttachments:[{
view:tex.createView({baseMipLevel:mip,mipLevelCount:1}),loadOp:'clear',storeOp:'store'}]});
rp.setPipeline(pl);rp.setBindGroup(0,bg);rp.setViewport(0,0,mw,mh,0,1);rp.draw(6);rp.end();
}
this.device.queue.submit([enc.finish()]);
}
// ── File loading ─────────────────────────────────────────────────────────
async loadImage(file) {
this.image=await createImageBitmap(file); this.isVideo=false;
this.canvas.width=this.image.width; this.canvas.height=this.image.height;
this.setVideoCtrl(false);
this.log(`Image: ${file.name} (${this.image.width}×${this.image.height})`);
if(this.weightsU32){this.setStatus('Ready');this.run();}
else{this.setStatus('Image loaded — drop weights .bin');this.showOriginal();}
}
async loadVideo(file) {
return new Promise((res,rej)=>{
this.video.src=URL.createObjectURL(file);
this.video.onloadedmetadata=()=>{
const w=this.video.videoWidth,h=this.video.videoHeight;
if(!w||!h){rej(new Error('Bad dims'));return;}
this.isVideo=true; this.canvas.width=w; this.canvas.height=h;
this.log(`Video: ${file.name} (${w}×${h})`);
this.setVideoCtrl(true);
this.video.onpause=()=>{document.getElementById('btnPP').textContent='Play';};
this.video.onplay =()=>{document.getElementById('btnPP').textContent='Pause';this.playLoop();};
const go=()=>{
this.video.onseeked=()=>{if(!this.isProcessing)this.procFrame();};
if(this.video.readyState>=2){this.weightsU32?this.procFrame().then(res):res(this.showOriginal());}
else setTimeout(go,50);
};
this.video.onseeked=go; this.video.currentTime=0;
};
this.video.onerror=()=>rej(new Error('Video load failed'));
});
}
setVideoCtrl(en){['btnPP','btnBk','btnFw'].forEach(id=>document.getElementById(id).disabled=!en);}
togglePlay(){this.video.paused?this.video.play():this.video.pause();}
stepFrame(d){if(!this.isVideo)return;this.video.pause();
this.video.currentTime=Math.max(0,Math.min(this.video.duration,this.video.currentTime+d/this.fps));}
playLoop(){if(this.video.paused||this.video.ended)return;
if(!this.isProcessing)this.procFrame();requestAnimationFrame(()=>this.playLoop());}
async procFrame(){if(!this.weightsU32||this.isProcessing)return;this.isProcessing=true;await this.run();this.isProcessing=false;}
async loadWeights(file) {
try {
const buf=await file.arrayBuffer();
this.weightsU32=this.parseWeights(buf); this.weightsBuffer=buf;
if(this.weightsGPU){this.weightsGPU.destroy();this.weightsGPU=null;}
const el=document.getElementById('wDrop');
el.textContent=`✓ ${file.name}`; el.classList.add('ok');
this.log(`Weights: ${file.name}`);
if(this.image||this.isVideo){this.setStatus('Ready');this.run();}
else this.setStatus('Weights loaded — drop image/video');
} catch(e){this.log(`Weights error: ${e.message}`,'err');document.getElementById('wDrop').classList.add('err');}
}
async loadFilm(file) {
try {
const buf=await file.arrayBuffer();
this.filmMlp=this.parseFilm(buf);
const el=document.getElementById('fDrop');
el.textContent=`✓ ${file.name}`; el.classList.add('ok');
document.getElementById('fSt').textContent='FiLM MLP loaded';
document.getElementById('fSt').style.color='#28a745';
if(this.image||this.isVideo)this.run();
} catch(e){this.log(`FiLM error: ${e.message}`,'err');document.getElementById('fDrop').classList.add('err');}
}
fslide(valId,el){document.getElementById(valId).textContent=parseFloat(el.value).toFixed(2);this.rerun();}
rerun(){if(this.image||this.isVideo)this.run();}
setBlend(v){this.blend=parseFloat(v);document.getElementById('blendV').textContent=this.blend.toFixed(2);if(this.lastResult)this.redisplay();}
// ── Main run ─────────────────────────────────────────────────────────────
async run() {
if(!this.weightsU32||!this.device)return;
const src=this.isVideo?this.video:this.image;
if(!src)return;
const t0=performance.now();
const {w,h}=this.getDims();
const W2=w>>1,H2=h>>1,W4=W2>>1,H4=H2>>1;
this.context.configure({device:this.device,format:this.format});
// Input texture with mipmaps
if(this.inputTex)this.inputTex.destroy();
this.inputTex=this.device.createTexture({size:[w,h],format:'rgba8unorm',mipLevelCount:3,
usage:GPUTextureUsage.TEXTURE_BINDING|GPUTextureUsage.COPY_DST|GPUTextureUsage.RENDER_ATTACHMENT});
this.device.queue.copyExternalImageToTexture({source:src},{texture:this.inputTex,mipLevel:0},[w,h]);
this.generateMipmaps(this.inputTex,w,h);
// Intermediate textures
const mk=(fmt,tw,th)=>this.device.createTexture({size:[tw,th],format:fmt,
usage:GPUTextureUsage.STORAGE_BINDING|GPUTextureUsage.TEXTURE_BINDING|GPUTextureUsage.COPY_SRC});
const f0=mk('rgba32uint',w,h),f1=mk('rgba32uint',w,h);
const e0=mk('rgba16float',w,h),e1=mk('rgba32uint',W2,H2);
const bn=mk('rgba32uint',W4,H4),d1=mk('rgba16float',W2,H2),ot=mk('rgba16float',w,h);
// Weights GPU buffer (cached)
if(!this.weightsGPU){
this.weightsGPU=this.device.createBuffer({size:this.weightsBuffer.byteLength,
usage:GPUBufferUsage.STORAGE|GPUBufferUsage.COPY_DST});
this.device.queue.writeBuffer(this.weightsGPU,0,this.weightsBuffer);
}
const wg=this.weightsGPU;
const fp=this.filmParams();
const wu=(data)=>{
const b=this.device.createBuffer({size:data.byteLength,usage:GPUBufferUsage.UNIFORM|GPUBufferUsage.COPY_DST});
this.device.queue.writeBuffer(b,0,data); return b;
};
const uE0=wu(this.u4(ENC0_OFF,fp.ge0,fp.be0));
const uE1=wu(this.u8(ENC1_OFF,fp.ge1,fp.be1));
const uBN=wu(this.ubn(BN_OFF));
const uD1=wu(this.u4(DEC1_OFF,fp.gd1,fp.bd1));
const uD0=wu(this.u4(DEC0_OFF,fp.gd0,fp.bd0));
const dispData=new ArrayBuffer(16);
const dispView=new DataView(dispData);
dispView.setFloat32(0,this.viewMode,true); dispView.setFloat32(4,this.blend,true);
const uDp=wu(dispData);
const enc=this.device.createCommandEncoder();
const bg=(pl,...entries)=>this.device.createBindGroup({layout:pl.getBindGroupLayout(0),
entries:entries.map((r,i)=>({binding:i,resource:r}))});
const rv=(t)=>t.createView();
const cp=(pl,bgr,wx,wy)=>{const p=enc.beginComputePass();p.setPipeline(pl);p.setBindGroup(0,bgr);p.dispatchWorkgroups(wx,wy);p.end();};
const ceil8=(n)=>Math.ceil(n/8);
cp(this.getPack(), bg(this.getPack(), rv(this.inputTex),this.linearSampler,rv(f0),rv(f1)), ceil8(w),ceil8(h));
cp(this.getEnc0(), bg(this.getEnc0(), rv(f0),rv(f1),{buffer:wg},{buffer:uE0},rv(e0)), ceil8(w),ceil8(h));
cp(this.getEnc1(), bg(this.getEnc1(), rv(e0),{buffer:wg},{buffer:uE1},rv(e1)), ceil8(W2),ceil8(H2));
cp(this.getBN(), bg(this.getBN(), rv(e1),{buffer:wg},{buffer:uBN},rv(bn)), ceil8(W4),ceil8(H4));
cp(this.getDec1(), bg(this.getDec1(), rv(bn),rv(e1),{buffer:wg},{buffer:uD1},rv(d1)), ceil8(W2),ceil8(H2));
cp(this.getDec0(), bg(this.getDec0(), rv(d1),rv(e0),{buffer:wg},{buffer:uD0},rv(ot)), ceil8(w),ceil8(h));
const dbg=bg(this.getDisp(),rv(ot),rv(this.inputTex),{buffer:uDp});
const rp=enc.beginRenderPass({colorAttachments:[{view:this.context.getCurrentTexture().createView(),loadOp:'clear',storeOp:'store'}]});
rp.setPipeline(this.getDisp());rp.setBindGroup(0,dbg);rp.draw(6);rp.end();
this.device.queue.submit([enc.finish()]);
await this.device.queue.onSubmittedWorkDone();
const ms=(performance.now()-t0).toFixed(1);
this.setStatus(`${ms}ms · ${w}×${h} · ${['CNN','Orig','Diff'][this.viewMode]}`);
this.log(`Run: ${ms}ms`);
// Cleanup uniforms
[uE0,uE1,uBN,uD1,uD0].forEach(b=>b.destroy());
// Store for layer viz & redisplay
this.destroyLayerTex();
this.layerTextures={feat0:f0,feat1:f1,enc0:e0,enc1:e1,bn,dec1:d1,output:ot};
this.lastResult={ot,itex:this.inputTex,uDp,dispPL:this.getDisp(),w,h};
this.updateVizPanel();
}
destroyLayerTex(){for(const t of Object.values(this.layerTextures||{}))try{t.destroy();}catch(_){} this.layerTextures={};}
redisplay() {
if(!this.lastResult||!this.device)return;
const {ot,itex,dispPL,w,h}=this.lastResult;
const dispData=new ArrayBuffer(16),dv=new DataView(dispData);
dv.setFloat32(0,this.viewMode,true);dv.setFloat32(4,this.blend,true);
const uDp=this.device.createBuffer({size:16,usage:GPUBufferUsage.UNIFORM|GPUBufferUsage.COPY_DST});
this.device.queue.writeBuffer(uDp,0,dispData);
const dbg=this.device.createBindGroup({layout:dispPL.getBindGroupLayout(0),entries:[
{binding:0,resource:ot.createView()},{binding:1,resource:itex.createView()},{binding:2,resource:{buffer:uDp}}]});
this.context.configure({device:this.device,format:this.format});
const enc=this.device.createCommandEncoder();
const rp=enc.beginRenderPass({colorAttachments:[{view:this.context.getCurrentTexture().createView(),loadOp:'clear',storeOp:'store'}]});
rp.setPipeline(dispPL);rp.setBindGroup(0,dbg);rp.draw(6);rp.end();
this.device.queue.submit([enc.finish()]);
uDp.destroy();
this.setStatus(`${w}×${h} · ${['CNN','Orig','Diff'][this.viewMode]}`);
}
showOriginal() {
const src=this.isVideo?this.video:this.image;
if(!src||!this.device)return;
const {w,h}=this.getDims();
this.context.configure({device:this.device,format:this.format});
const tex=this.device.createTexture({size:[w,h],format:'rgba8unorm',
usage:GPUTextureUsage.TEXTURE_BINDING|GPUTextureUsage.COPY_DST|GPUTextureUsage.RENDER_ATTACHMENT});
this.device.queue.copyExternalImageToTexture({source:src},{texture:tex},[w,h]);
const code=`@group(0) @binding(0) var t:texture_2d;
@vertex fn vs(@builtin(vertex_index) i:u32)->@builtin(position) vec4f{
var p=array(vec2f(-1.,-1.),vec2f(1.,-1.),vec2f(-1.,1.),vec2f(-1.,1.),vec2f(1.,-1.),vec2f(1.,1.));
return vec4f(p[i],0.,1.);}
@fragment fn fs(@builtin(position) pos:vec4f)->@location(0) vec4f{return textureLoad(t,vec2i(pos.xy),0);}`;
const pl=this.device.createRenderPipeline({layout:'auto',
vertex:{module:this.device.createShaderModule({code}),entryPoint:'vs'},
fragment:{module:this.device.createShaderModule({code}),entryPoint:'fs',targets:[{format:this.format}]}});
const bg=this.device.createBindGroup({layout:pl.getBindGroupLayout(0),entries:[{binding:0,resource:tex.createView()}]});
const enc=this.device.createCommandEncoder();
const rp=enc.beginRenderPass({colorAttachments:[{view:this.context.getCurrentTexture().createView(),loadOp:'clear',storeOp:'store'}]});
rp.setPipeline(pl);rp.setBindGroup(0,bg);rp.draw(6);rp.end();
this.device.queue.submit([enc.finish()]);
tex.destroy();
}
// ── Layer visualization ──────────────────────────────────────────────────
updateVizPanel() {
const DEFS=[
{id:'feat0', lbl:'Feat', t:'u32',nch:8, ch:['alb.r','alb.g','alb.b','nrm.x','nrm.y','depth','dgx','dgy']},
{id:'enc0', lbl:'Enc0', t:'f32',nch:4, ch:['c0','c1','c2','c3']},
{id:'enc1', lbl:'Enc1', t:'u32',nch:8, ch:['c0','c1','c2','c3','c4','c5','c6','c7']},
{id:'bn', lbl:'BN', t:'u32',nch:8, ch:['c0','c1','c2','c3','c4','c5','c6','c7']},
{id:'dec1', lbl:'Dec1', t:'f32',nch:4, ch:['c0','c1','c2','c3']},
{id:'output',lbl:'Output', t:'f32',nch:4, ch:['R','G','B','A']},
];
this.vizDefs=DEFS;
const panel=document.getElementById('layerViz');
let html='';
for(const d of DEFS) html+=``;
html+='
';
panel.innerHTML=html;
this.vizLayer('output');
}
async vizLayer(id) {
const tex=this.layerTextures[id]; if(!tex||!this.device)return;
this.vizDefs.forEach(d=>document.getElementById(`vb_${d.id}`)?.classList.remove('act'));
document.getElementById(`vb_${id}`)?.classList.add('act');
const def=this.vizDefs.find(d=>d.id===id); if(!def)return;
const grid=document.getElementById('chgrid'); grid.innerHTML='';
for(let c=0;c{
const s=(bits>>15)&1,e=(bits>>10)&0x1F,f=bits&0x3FF;
if(e===0)return 0;if(e===31)return s?0:1;
return Math.max(0,Math.min(1,(s?-1:1)*Math.pow(2,e-15)*(1+f/1024)));
};
const px=new Uint8ClampedArray(w*h*4);
for(let y=0;ycvs.toBlob(r,'image/png'));
const a=document.createElement('a');a.href=URL.createObjectURL(blob);
a.download=`cnn_v3_${w}x${h}.png`;a.click();URL.revokeObjectURL(a.href);
this.log(`Saved: ${a.download}`);
} catch(e){this.log(`Save failed: ${e.message}`,'err');}
}
f16pair(packed) {
const lo=packed&0xFFFF,hi=(packed>>16)&0xFFFF;
const f=(b)=>{const s=(b>>15)&1,e=(b>>10)&0x1F,m=b&0x3FF;
if(e===0)return(s?-1:1)*Math.pow(2,-14)*(m/1024);
if(e===31)return m?NaN:(s?-Infinity:Infinity);
return(s?-1:1)*Math.pow(2,e-15)*(1+m/1024);};
return [f(lo),f(hi)];
}
// ── Full G-buffer pack pipeline ───────────────────────────────────────────
getFullPack() {
return this.pl('fullpack', () => this.computePL(FULL_PACK_SHADER, 'main'));
}
// Create a 1×1 rgba8unorm fallback texture with given RGBA bytes [0-255].
makeFallbackTex(r, g, b, a) {
const tex = this.device.createTexture({size:[1,1], format:'rgba8unorm',
usage: GPUTextureUsage.TEXTURE_BINDING|GPUTextureUsage.COPY_DST});
this.device.queue.writeTexture({texture:tex}, new Uint8Array([r,g,b,a]),
{bytesPerRow:4,rowsPerImage:1}, [1,1]);
return tex;
}
// Load an image File as a GPU rgba8unorm texture. Returns {tex, w, h}.
async loadGpuTex(file) {
const bmp = await createImageBitmap(file);
const w = bmp.width, h = bmp.height;
const tex = this.device.createTexture({size:[w,h], format:'rgba8unorm',
usage: GPUTextureUsage.TEXTURE_BINDING|GPUTextureUsage.COPY_DST|GPUTextureUsage.RENDER_ATTACHMENT});
this.device.queue.copyExternalImageToTexture({source:bmp}, {texture:tex}, [w,h]);
bmp.close();
return {tex, w, h};
}
// ── Load sample directory ─────────────────────────────────────────────────
async loadSampleDir(files) {
if (!files || files.length === 0) return;
if (!this.weightsU32) { this.setStatus('Load weights first', true); return; }
this.setMode('full');
const st = document.getElementById('sampleSt');
st.textContent = 'Loading…';
// Match files by name pattern
const match = (pat) => {
for (const f of files) {
const n = f.name.toLowerCase();
if (pat.some(p => n.includes(p))) return f;
}
return null;
};
const fAlbedo = match(['albedo', 'color']);
const fNormal = match(['normal', 'nrm']);
const fDepth = match(['depth']);
const fMatid = match(['matid', 'index', 'mat_id']);
const fShadow = match(['shadow']);
const fTransp = match(['transp', 'alpha']);
const fTarget = match(['target', 'output', 'ground_truth']);
if (!fAlbedo) {
st.textContent = '✗ No albedo.png found';
this.setStatus('No albedo.png in sample dir', true);
return;
}
try {
const t0 = performance.now();
// Load primary albedo to get dimensions
const {tex: albTex, w, h} = await this.loadGpuTex(fAlbedo);
this.canvas.width = w; this.canvas.height = h;
this.context.configure({device:this.device, format:this.format});
// Load optional channels — fall back to neutral 1×1 textures
const nrmTex = fNormal ? (await this.loadGpuTex(fNormal)).tex
: this.makeFallbackTex(128, 128, 0, 255); // oct-encoded (0,0) normal
const dptTex = fDepth ? (await this.loadGpuTex(fDepth)).tex
: this.makeFallbackTex(0, 0, 0, 255);
const midTex = fMatid ? (await this.loadGpuTex(fMatid)).tex
: this.makeFallbackTex(0, 0, 0, 255);
const shdTex = fShadow ? (await this.loadGpuTex(fShadow)).tex
: this.makeFallbackTex(255, 255, 255, 255); // fully lit
const trpTex = fTransp ? (await this.loadGpuTex(fTransp)).tex
: this.makeFallbackTex(0, 0, 0, 255); // fully opaque
// Load target if present
if (this.targetBitmap) { this.targetBitmap.close(); this.targetBitmap = null; }
if (fTarget) {
this.targetBitmap = await createImageBitmap(fTarget);
this.showTarget();
} else {
document.getElementById('targetPane').style.display = 'none';
}
// Pack G-buffer into feat0/feat1
const mk = (fmt, tw, th) => this.device.createTexture({size:[tw,th], format:fmt,
usage:GPUTextureUsage.STORAGE_BINDING|GPUTextureUsage.TEXTURE_BINDING|GPUTextureUsage.COPY_SRC});
const f0 = mk('rgba32uint', w, h);
const f1 = mk('rgba32uint', w, h);
const ceil8 = (n) => Math.ceil(n/8);
const pl = this.getFullPack();
const bg = this.device.createBindGroup({layout: pl.getBindGroupLayout(0),
entries: [
{binding:0, resource: albTex.createView()},
{binding:1, resource: nrmTex.createView()},
{binding:2, resource: dptTex.createView()},
{binding:3, resource: midTex.createView()},
{binding:4, resource: shdTex.createView()},
{binding:5, resource: trpTex.createView()},
{binding:6, resource: this.linearSampler},
{binding:7, resource: f0.createView()},
{binding:8, resource: f1.createView()},
]});
const enc = this.device.createCommandEncoder();
const cp = enc.beginComputePass();
cp.setPipeline(pl); cp.setBindGroup(0, bg);
cp.dispatchWorkgroups(ceil8(w), ceil8(h));
cp.end();
this.device.queue.submit([enc.finish()]);
await this.device.queue.onSubmittedWorkDone();
// Cleanup source textures
[albTex, nrmTex, dptTex, midTex, shdTex, trpTex].forEach(t => t.destroy());
const found = [fAlbedo, fNormal, fDepth, fMatid, fShadow, fTransp]
.filter(Boolean).map(f => f.name).join(', ');
st.textContent = `✓ ${found}`;
this.log(`Sample packed: ${w}×${h}, ${((performance.now()-t0)).toFixed(0)}ms`);
// Run inference from packed feat textures
await this.runFromFeat(f0, f1, w, h);
f0.destroy(); f1.destroy();
} catch(e) {
st.textContent = `✗ ${e.message}`;
this.setStatus(`Sample error: ${e.message}`, true);
this.log(`Sample error: ${e.message}`, 'err');
}
}
// Show target.png in the #targetPane alongside main canvas.
showTarget() {
if (!this.targetBitmap) return;
const tc = document.getElementById('targetCanvas');
tc.width = this.targetBitmap.width;
tc.height = this.targetBitmap.height;
const ctx2d = tc.getContext('2d');
ctx2d.drawImage(this.targetBitmap, 0, 0);
document.getElementById('targetPane').style.display = 'flex';
}
// Run CNN inference starting from pre-packed feat_tex0 / feat_tex1.
// Used by loadSampleDir() to skip the photo-pack step.
async runFromFeat(f0, f1, w, h) {
if (!this.weightsU32 || !this.device) return;
const t0 = performance.now();
const W2=w>>1, H2=h>>1, W4=W2>>1, H4=H2>>1;
this.context.configure({device:this.device, format:this.format});
// Create a neutral "original" texture so the display shader can still
// render Orig/Diff modes (just black for sample mode).
if (this.inputTex) this.inputTex.destroy();
this.inputTex = this.device.createTexture({size:[w,h], format:'rgba8unorm',
usage:GPUTextureUsage.TEXTURE_BINDING|GPUTextureUsage.COPY_DST|GPUTextureUsage.RENDER_ATTACHMENT});
// Leave it cleared to black — Diff mode against target would need more work
const mk = (fmt, tw, th) => this.device.createTexture({size:[tw,th], format:fmt,
usage:GPUTextureUsage.STORAGE_BINDING|GPUTextureUsage.TEXTURE_BINDING|GPUTextureUsage.COPY_SRC});
const e0=mk('rgba16float',w,h), e1=mk('rgba32uint',W2,H2);
const bn=mk('rgba32uint',W4,H4), d1=mk('rgba16float',W2,H2), ot=mk('rgba16float',w,h);
if (!this.weightsGPU) {
this.weightsGPU = this.device.createBuffer({size:this.weightsBuffer.byteLength,
usage:GPUBufferUsage.STORAGE|GPUBufferUsage.COPY_DST});
this.device.queue.writeBuffer(this.weightsGPU, 0, this.weightsBuffer);
}
const wg = this.weightsGPU;
const fp = this.filmParams();
const wu = (data) => {
const b = this.device.createBuffer({size:data.byteLength, usage:GPUBufferUsage.UNIFORM|GPUBufferUsage.COPY_DST});
this.device.queue.writeBuffer(b, 0, data); return b;
};
const uE0=wu(this.u4(ENC0_OFF,fp.ge0,fp.be0));
const uE1=wu(this.u8(ENC1_OFF,fp.ge1,fp.be1));
const uBN=wu(this.ubn(BN_OFF));
const uD1=wu(this.u4(DEC1_OFF,fp.gd1,fp.bd1));
const uD0=wu(this.u4(DEC0_OFF,fp.gd0,fp.bd0));
const dispData=new ArrayBuffer(16);
new DataView(dispData).setFloat32(4, this.blend, true);
const uDp=wu(dispData);
const enc = this.device.createCommandEncoder();
const bg = (pl,...entries) => this.device.createBindGroup({layout:pl.getBindGroupLayout(0),
entries:entries.map((r,i)=>({binding:i,resource:r}))});
const rv = (t) => t.createView();
const cp = (pl,bgr,wx,wy) => {const p=enc.beginComputePass();p.setPipeline(pl);p.setBindGroup(0,bgr);p.dispatchWorkgroups(wx,wy);p.end();};
const ceil8 = (n) => Math.ceil(n/8);
cp(this.getEnc0(), bg(this.getEnc0(), rv(f0),rv(f1),{buffer:wg},{buffer:uE0},rv(e0)), ceil8(w), ceil8(h));
cp(this.getEnc1(), bg(this.getEnc1(), rv(e0),{buffer:wg},{buffer:uE1},rv(e1)), ceil8(W2), ceil8(H2));
cp(this.getBN(), bg(this.getBN(), rv(e1),{buffer:wg},{buffer:uBN},rv(bn)), ceil8(W4), ceil8(H4));
cp(this.getDec1(), bg(this.getDec1(), rv(bn),rv(e1),{buffer:wg},{buffer:uD1},rv(d1)), ceil8(W2), ceil8(H2));
cp(this.getDec0(), bg(this.getDec0(), rv(d1),rv(e0),{buffer:wg},{buffer:uD0},rv(ot)), ceil8(w), ceil8(h));
const dbg = bg(this.getDisp(), rv(ot), rv(this.inputTex), {buffer:uDp});
const rp = enc.beginRenderPass({colorAttachments:[{
view:this.context.getCurrentTexture().createView(), loadOp:'clear', storeOp:'store'}]});
rp.setPipeline(this.getDisp()); rp.setBindGroup(0, dbg); rp.draw(6); rp.end();
this.device.queue.submit([enc.finish()]);
await this.device.queue.onSubmittedWorkDone();
[uE0,uE1,uBN,uD1,uD0].forEach(b => b.destroy());
// Compute PSNR against target if available
let psnrStr = '';
if (this.targetBitmap) {
this.showTarget();
try { psnrStr = await this.computePSNR(ot, w, h); } catch(_) {}
}
this.destroyLayerTex();
this.layerTextures = {feat0:f0, feat1:f1, enc0:e0, enc1:e1, bn, dec1:d1, output:ot};
this.lastResult = {ot, itex:this.inputTex, uDp, dispPL:this.getDisp(), w, h};
this.updateVizPanel();
const ms = (performance.now()-t0).toFixed(1);
document.getElementById('cnnLabel').textContent = `CNN output (${ms}ms)`;
if (psnrStr) document.getElementById('psnrSt').textContent = psnrStr;
this.setStatus(`Sample: ${ms}ms · ${w}×${h}`);
this.log(`runFromFeat: ${ms}ms`);
}
// Compute PSNR between CNN rgba16float output texture and target.png bitmap.
async computePSNR(outTex, w, h) {
const bpr = Math.ceil(w * 8 / 256) * 256;
const stg = this.device.createBuffer({size:bpr*h,
usage:GPUBufferUsage.COPY_DST|GPUBufferUsage.MAP_READ});
const enc = this.device.createCommandEncoder();
enc.copyTextureToBuffer({texture:outTex}, {buffer:stg, bytesPerRow:bpr, rowsPerImage:h}, [w,h]);
this.device.queue.submit([enc.finish()]);
await stg.mapAsync(GPUMapMode.READ);
const raw = new DataView(stg.getMappedRange());
// Decode output pixels from f16
const f16 = (bits) => {
const s=(bits>>15)&1, e=(bits>>10)&0x1F, m=bits&0x3FF;
if(e===0) return 0; if(e===31) return s?0:1;
return Math.max(0,Math.min(1,(s?-1:1)*Math.pow(2,e-15)*(1+m/1024)));
};
const cnnPx = new Float32Array(w*h*3);
for (let y=0;y 0 ? (10 * Math.log10(1 / mse)).toFixed(2) : '∞';
return `MSE=${mse.toFixed(5)} PSNR=${psnr}dB`;
}
}
// ── UI helpers ───────────────────────────────────────────────────────────────
function togglePanel(hdr) {
hdr.parentElement.classList.toggle('collapsed');
const sp=hdr.querySelector('span');
if(sp)sp.textContent=hdr.parentElement.classList.contains('collapsed')?'▶':'▼';
}
// ── Init & events ─────────────────────────────────────────────────────────
const tester=new CNNv3Tester();
document.getElementById('wFile').addEventListener('change',e=>{if(e.target.files[0])tester.loadWeights(e.target.files[0]);});
document.getElementById('fFile').addEventListener('change',e=>{if(e.target.files[0])tester.loadFilm(e.target.files[0]);});
const mainEl=document.getElementById('mainDrop');
mainEl.addEventListener('dragover',e=>{e.preventDefault();mainEl.classList.add('dragover');});
mainEl.addEventListener('dragleave',()=>mainEl.classList.remove('dragover'));
mainEl.addEventListener('drop',async e=>{
e.preventDefault();mainEl.classList.remove('dragover');
for(const f of e.dataTransfer.files){
if(f.name.endsWith('.bin')){
if(f.name.includes('film')||f.name.includes('mlp'))tester.loadFilm(f);
else tester.loadWeights(f);
} else if(f.type.startsWith('image/'))tester.loadImage(f);
else if(f.type.startsWith('video/'))tester.loadVideo(f);
}
});
['wDrop','fDrop'].forEach(id=>{
const el=document.getElementById(id);
el.addEventListener('dragover',e=>{e.preventDefault();el.classList.add('dragover');});
el.addEventListener('dragleave',()=>el.classList.remove('dragover'));
el.addEventListener('drop',async e=>{
e.preventDefault();el.classList.remove('dragover');
const f=e.dataTransfer.files[0];if(!f)return;
if(id==='fDrop')tester.loadFilm(f);else tester.loadWeights(f);
});
});
document.addEventListener('keydown',e=>{
if(e.target.tagName==='INPUT')return;
if(e.key===' '){e.preventDefault();tester.viewMode=tester.viewMode===1?0:1;tester.redisplay();}
else if(e.key==='d'||e.key==='D'){tester.viewMode=tester.viewMode===2?0:2;tester.redisplay();}
});