summaryrefslogtreecommitdiff
path: root/training
diff options
context:
space:
mode:
Diffstat (limited to 'training')
-rwxr-xr-xtraining/train_cnn.py20
1 files changed, 8 insertions, 12 deletions
diff --git a/training/train_cnn.py b/training/train_cnn.py
index d974ce7..e1fd27b 100755
--- a/training/train_cnn.py
+++ b/training/train_cnn.py
@@ -271,7 +271,7 @@ class SimpleCNN(nn.Module):
# Final layer (grayscale output)
final_input = torch.cat([out, x_coords, y_coords, gray], dim=1)
out = self.layers[-1](final_input) # [B,1,H,W]
- out = torch.clamp(out, 0.0, 1.0) # Clip to [0,1]
+ out = torch.sigmoid(out) # Map to [0,1] with smooth gradients
return out.expand(-1, 3, -1, -1)
@@ -336,7 +336,7 @@ def generate_layer_shader(output_path, num_layers, kernel_sizes):
else:
f.write(f" else if (params.layer_index == {layer_idx}) {{\n")
f.write(f" let gray_out = {conv_fn}(txt, smplr, uv, uniforms.resolution, gray, weights_layer{layer_idx});\n")
- f.write(f" // gray_out in [0,1] (clamped to match PyTorch training)\n")
+ f.write(f" // gray_out in [0,1] (sigmoid activation)\n")
f.write(f" result = vec4<f32>(gray_out, gray_out, gray_out, 1.0);\n")
f.write(f" return mix(original_raw, result, params.blend_amount); // [0,1]\n")
f.write(f" }}\n")
@@ -452,7 +452,7 @@ def generate_conv_final_function(kernel_size, output_path):
with open(output_path, 'a') as f:
f.write(f"\n// Final layer: 7→1 channel (vec4-optimized)\n")
f.write(f"// Assumes 'tex' is already normalized to [-1,1]\n")
- f.write(f"// Output clamped to [0,1] to match PyTorch training\n")
+ f.write(f"// Output uses sigmoid activation to match PyTorch training\n")
f.write(f"fn cnn_conv{k}x{k}_7to1(\n")
f.write(f" tex: texture_2d<f32>,\n")
f.write(f" samp: sampler,\n")
@@ -479,7 +479,7 @@ def generate_conv_final_function(kernel_size, output_path):
f.write(f" }}\n")
f.write(f" }}\n\n")
- f.write(f" return clamp(sum, 0.0, 1.0);\n")
+ f.write(f" return 1.0 / (1.0 + exp(-sum));\n")
f.write(f"}}\n")
@@ -631,12 +631,10 @@ def train(args):
with open(conv_path, 'r') as f:
content = f.read()
- # Generate 7to1 final layer with clamp (all kernel sizes)
+ # Generate 7to1 final layer with sigmoid (all kernel sizes)
if f"cnn_conv{ks}x{ks}_7to1" not in content:
generate_conv_final_function(ks, conv_path)
- print(f"Added 7to1 variant with clamp to {conv_path}")
- elif "clamp(sum, 0.0, 1.0)" not in content:
- print(f"Warning: {conv_path} has 7to1 but missing clamp - manual fix needed")
+ print(f"Added 7to1 variant with sigmoid to {conv_path}")
print("Training complete!")
@@ -687,12 +685,10 @@ def export_from_checkpoint(checkpoint_path, output_path=None):
with open(conv_path, 'r') as f:
content = f.read()
- # Generate 7to1 final layer with clamp (all kernel sizes)
+ # Generate 7to1 final layer with sigmoid (all kernel sizes)
if f"cnn_conv{ks}x{ks}_7to1" not in content:
generate_conv_final_function(ks, conv_path)
- print(f"Added 7to1 variant with clamp to {conv_path}")
- elif "clamp(sum, 0.0, 1.0)" not in content:
- print(f"Warning: {conv_path} has 7to1 but missing clamp - manual fix needed")
+ print(f"Added 7to1 variant with sigmoid to {conv_path}")
print("Export complete!")