diff options
Diffstat (limited to 'training/train_cnn.py')
| -rwxr-xr-x | training/train_cnn.py | 8 |
1 files changed, 4 insertions, 4 deletions
diff --git a/training/train_cnn.py b/training/train_cnn.py index e1fd27b..dc14192 100755 --- a/training/train_cnn.py +++ b/training/train_cnn.py @@ -335,8 +335,8 @@ def generate_layer_shader(output_path, num_layers, kernel_sizes): f.write(f" }}\n") else: f.write(f" else if (params.layer_index == {layer_idx}) {{\n") - f.write(f" let gray_out = {conv_fn}(txt, smplr, uv, uniforms.resolution, gray, weights_layer{layer_idx});\n") - f.write(f" // gray_out in [0,1] (sigmoid activation)\n") + f.write(f" let sum = {conv_fn}(txt, smplr, uv, uniforms.resolution, gray, weights_layer{layer_idx});\n") + f.write(f" let gray_out = 1.0 / (1.0 + exp(-sum)); // Sigmoid activation\n") f.write(f" result = vec4<f32>(gray_out, gray_out, gray_out, 1.0);\n") f.write(f" return mix(original_raw, result, params.blend_amount); // [0,1]\n") f.write(f" }}\n") @@ -452,7 +452,7 @@ def generate_conv_final_function(kernel_size, output_path): with open(output_path, 'a') as f: f.write(f"\n// Final layer: 7→1 channel (vec4-optimized)\n") f.write(f"// Assumes 'tex' is already normalized to [-1,1]\n") - f.write(f"// Output uses sigmoid activation to match PyTorch training\n") + f.write(f"// Returns raw sum (activation applied at call site)\n") f.write(f"fn cnn_conv{k}x{k}_7to1(\n") f.write(f" tex: texture_2d<f32>,\n") f.write(f" samp: sampler,\n") @@ -479,7 +479,7 @@ def generate_conv_final_function(kernel_size, output_path): f.write(f" }}\n") f.write(f" }}\n\n") - f.write(f" return 1.0 / (1.0 + exp(-sum));\n") + f.write(f" return sum;\n") f.write(f"}}\n") |
