summaryrefslogtreecommitdiff
path: root/doc/CNN.py
diff options
context:
space:
mode:
authorskal <pascal.massimino@gmail.com>2026-02-10 07:36:32 +0100
committerskal <pascal.massimino@gmail.com>2026-02-10 07:36:32 +0100
commitc51c146da9590845b864cbba3a7317c5b5bed56a (patch)
tree80fda2cad06622f367ae004527e4bea21d687e68 /doc/CNN.py
parentdcd52c3c595c1f37229b880fad11248b98bbced1 (diff)
initial doc for the CNN project
Diffstat (limited to 'doc/CNN.py')
-rw-r--r--doc/CNN.py244
1 files changed, 244 insertions, 0 deletions
diff --git a/doc/CNN.py b/doc/CNN.py
new file mode 100644
index 0000000..9952c97
--- /dev/null
+++ b/doc/CNN.py
@@ -0,0 +1,244 @@
+/* Python source code - Rory McHenry
+
+import tensorflow as tf
+import numpy as np
+from PIL import Image
+import os
+
+learning_rate = 0.1
+training_iters = 50
+batch_size = 1
+display_step = 5
+W = 799
+H = 449
+
+im = Image.open('min.png')
+target = Image.open('mout.png')
+
+x = np.ones( [1, H,W,4] )
+x[0,:,:,0:3] = np.array(im)[:,:,0:3].astype(np.float32)/255
+y = np.ones( [1, H,W,4] )
+y[0,:,:,0:3] = np.array(target)[:,:,0:3].astype(np.float32)/255
+
+x=tf.constant(x,dtype=tf.float32,shape=[1, H,W,4])
+y=tf.constant(y,dtype=tf.float32,shape=[1, H,W,4])
+
+keep_prob = tf.placeholder(tf.float32)
+
+def conv2d(x, W, b, strides=1):
+ x = tf.nn.conv2d(x, W, strides=[1, strides, strides, 1], padding='SAME')
+ x = tf.nn.bias_add(x, b)
+ return tf.tanh(x)
+
+def shaderNet(x, weights, biases, dropout):
+
+ conv1 = conv2d(x , weights['wc1'], biases['bc1'])
+ conv2 = conv2d(conv1, weights['wc2'], biases['bc2'])
+ conv3 = conv2d(conv2, weights['wc3'], biases['bc3'])
+ conv4 = conv2d(conv3, weights['wc4'], biases['bc4'])
+
+ return x + conv2d(conv4, weights['out'], biases['out']);
+
+
+
+weights = {
+ 'wc1': tf.Variable(tf.random_normal([3, 3, 4, 4], stddev=.1)),
+ 'wc2': tf.Variable(tf.random_normal([3, 3, 4, 4], stddev=.1)),
+ 'wc3': tf.Variable(tf.random_normal([3, 3, 4, 4], stddev=.1)),
+ 'wc4': tf.Variable(tf.random_normal([3, 3, 4, 4], stddev=.1)),
+ 'out': tf.Variable(tf.random_normal([3, 3, 4, 4], stddev=.1))
+}
+
+biases = {
+ 'bc1': tf.Variable(tf.random_normal([4], stddev=.01)),
+ 'bc2': tf.Variable(tf.random_normal([4], stddev=.01)),
+ 'bc3': tf.Variable(tf.random_normal([4], stddev=.01)),
+ 'bc4': tf.Variable(tf.random_normal([4], stddev=.01)),
+ 'out': tf.Variable(tf.random_normal([4], stddev=.01))
+}
+
+pred = shaderNet(x, weights, biases, keep_prob)
+
+
+cost = tf.reduce_mean(tf.contrib.losses.mean_squared_error(pred , y ))
+optimizer = tf.train.ProximalGradientDescentOptimizer(learning_rate=learning_rate).minimize(cost)
+
+correct_pred = tf.equal(tf.argmax(pred, 1), tf.argmax(y, 1))
+accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))
+
+init = tf.global_variables_initializer()
+
+saver = tf.train.Saver()
+
+with tf.Session() as sess:
+
+
+ saver.restore(sess, 'C:/Users/rory/py/modelaa.ckpt')
+
+ #sess.run(init)
+
+ step = 1
+ while step * batch_size < training_iters:
+ sess.run(optimizer)
+ if step % display_step == 0:
+ loss, acc = sess.run([cost, accuracy])
+ print("Iter " + str(step*batch_size) + ", Minibatch Loss= " + \
+ "{:.6f}".format(loss) + ", Training Accuracy= " + \
+ "{:.5f}".format(acc))
+
+ img = np.clip(sess.run(pred),0,1)
+ img = Image.fromarray((img[0,:,:,0:3]*255).astype(np.uint8), "RGB")
+ img.save(os.path.join(os.getcwd(),"out.png"))
+
+ step += 1
+ print("Optimization Finished!")
+ print("Testing Accuracy:", \
+ sess.run(accuracy))
+
+ save_path = saver.save(sess, 'C:/Users/rory/py/modelaa.ckpt')
+
+ w = sess.run(weights['wc1'])
+ b = sess.run(biases['bc1'])
+
+ print( 'vec4 conv3x3(vec2 fragCoord) {')
+ print( 'vec4 res = vec4(0);')
+ for x in range(-1,2):
+ for y in range(-1,2):
+ print('res += im((fragCoord + vec2('+str(x)+','+str(y)+')) / iChannelResolution[0].xy)*mat4('+",".join(np.transpose(w[x+1,y+1]).reshape(16).astype(str))+');')
+ print( 'return res;')
+ print( '}')
+
+ print( 'vec4 ReLU(vec4 i) {')
+ print(' return max(vec4(0),i);')
+ print('}')
+
+ print('vec4 sigmoid(vec4 i) {')
+ print(' return 1./(1.+exp(-i));')
+ print('}')
+
+
+ print('vec4 tanh(vec4 i) {')
+ print(' return (exp(2.*i)-1.)/(exp(2.*i)+1.);')
+ print('}')
+
+ print( 'void mainImage( out vec4 fragColor, in vec2 fragCoord )')
+ print('{')
+ print(' fragColor = tanh(conv3x3(fragCoord)+vec4('+",".join(b.astype(str))+'));')
+ print('}')
+ w = sess.run(weights['wc2'])
+ b = sess.run(biases['bc2'])
+
+ print( 'vec4 conv3x3(vec2 fragCoord) {')
+ print( 'vec4 res = vec4(0);')
+ for x in range(-1,2):
+ for y in range(-1,2):
+ print('res += texture(iChannel0,(fragCoord + vec2('+str(x)+','+str(y)+')) / iChannelResolution[0].xy)*mat4('+",".join(np.transpose(w[x+1,y+1]).reshape(16).astype(str))+');')
+ print( 'return res;')
+ print( '}')
+
+ print( 'vec4 ReLU(vec4 i) {')
+ print(' return max(vec4(0),i);')
+ print('}')
+
+ print('vec4 sigmoid(vec4 i) {')
+ print(' return 1./(1.+exp(-i));')
+ print('}')
+
+
+ print('vec4 tanh(vec4 i) {')
+ print(' return (exp(2.*i)-1.)/(exp(2.*i)+1.);')
+ print('}')
+
+ print( 'void mainImage( out vec4 fragColor, in vec2 fragCoord )')
+ print('{')
+ print(' fragColor = tanh(conv3x3(fragCoord)+vec4('+",".join(b.astype(str))+'));')
+ print('}')
+ w = sess.run(weights['wc3'])
+ b = sess.run(biases['bc3'])
+
+ print( 'vec4 conv3x3(vec2 fragCoord) {')
+ print( 'vec4 res = vec4(0);')
+ for x in range(-1,2):
+ for y in range(-1,2):
+ print('res += texture(iChannel0,(fragCoord + vec2('+str(x)+','+str(y)+')) / iChannelResolution[0].xy)*mat4('+",".join(np.transpose(w[x+1,y+1]).reshape(16).astype(str))+');')
+ print( 'return res;')
+ print( '}')
+
+ print( 'vec4 ReLU(vec4 i) {')
+ print(' return max(vec4(0),i);')
+ print('}')
+
+ print('vec4 sigmoid(vec4 i) {')
+ print(' return 1./(1.+exp(-i));')
+ print('}')
+
+
+ print('vec4 tanh(vec4 i) {')
+ print(' return (exp(2.*i)-1.)/(exp(2.*i)+1.);')
+ print('}')
+
+ print( 'void mainImage( out vec4 fragColor, in vec2 fragCoord )')
+ print('{')
+ print(' fragColor = tanh(conv3x3(fragCoord)+vec4('+",".join(b.astype(str))+'));')
+ print('}')
+ w = sess.run(weights['wc4'])
+ b = sess.run(biases['bc4'])
+
+ print( 'vec4 conv3x3(vec2 fragCoord) {')
+ print( 'vec4 res = vec4(0);')
+ for x in range(-1,2):
+ for y in range(-1,2):
+ print('res += texture(iChannel0,(fragCoord + vec2('+str(x)+','+str(y)+')) / iChannelResolution[0].xy)*mat4('+",".join(np.transpose(w[x+1,y+1]).reshape(16).astype(str))+');')
+ print( 'return res;')
+ print( '}')
+
+ print( 'vec4 ReLU(vec4 i) {')
+ print(' return max(vec4(0),i);')
+ print('}')
+
+ print('vec4 sigmoid(vec4 i) {')
+ print(' return 1./(1.+exp(-i));')
+ print('}')
+
+
+ print('vec4 tanh(vec4 i) {')
+ print(' return (exp(2.*i)-1.)/(exp(2.*i)+1.);')
+ print('}')
+
+ print( 'void mainImage( out vec4 fragColor, in vec2 fragCoord )')
+ print('{')
+ print(' fragColor = tanh(conv3x3(fragCoord)+vec4('+",".join(b.astype(str))+'));')
+ print('}')
+
+ w = sess.run(weights['out'])
+ b = sess.run(biases['out'])
+
+ print( 'vec4 conv3x3(vec2 fragCoord) {')
+ print( 'vec4 res = vec4(0);')
+ for x in range(-1,2):
+ for y in range(-1,2):
+ print('res += texture(iChannel0,(fragCoord + vec2('+str(x)+','+str(y)+')) / iChannelResolution[0].xy)*mat4('+",".join(np.transpose(w[x+1,y+1]).reshape(16).astype(str))+');')
+ print( 'return res;')
+ print( '}')
+
+ print( 'vec4 ReLU(vec4 i) {')
+ print(' return max(vec4(0),i);')
+ print('}')
+
+ print('vec4 sigmoid(vec4 i) {')
+ print(' return 1./(1.+exp(-i));')
+ print('}')
+
+
+ print('vec4 tanh(vec4 i) {')
+ print(' return (exp(2.*i)-1.)/(exp(2.*i)+1.);')
+ print('}')
+
+ print( 'void mainImage( out vec4 fragColor, in vec2 fragCoord )')
+ print('{')
+ print(' fragColor = im(uv);')
+ print(' if(fragCoord.x>iMouse.x){')
+ print(' fragColor = tanh(conv3x3(fragCoord)+vec4('+",".join(b.astype(str))+'));')
+ print(' }')
+ print('}')
+*/