Selaa lähdekoodia

tensorflow basic operations with a NN sample

youchen 7 vuotta sitten
vanhempi
commit
9d8052225d
5 muutettua tiedostoa jossa 99 lisäystä ja 1 poistoa
  1. 55 0
      tests/NNSample01.py
  2. 0 0
      tests/__init__.py
  3. 22 0
      tests/forward_propagation.py
  4. 1 1
      graph_test.py
  5. 21 0
      tests/tensor_session.py

+ 55 - 0
tests/NNSample01.py

@@ -0,0 +1,55 @@
+import tensorflow as tf
+from numpy.random import RandomState
+
+# define the size of a batch
+batch_size=4
+
+# define coefficient matrices
+w1 = tf.Variable(tf.random_normal([2, 3], stddev=1, seed=1))
+w2 = tf.Variable(tf.random_normal([3, 1], stddev=1, seed=1))
+
+# define place for input and output, use param 'None' in shape can make the placeholder more flexible
+x = tf.placeholder(tf.float32, shape=[None, 2], name="x-input")
+y_ = tf.placeholder(tf.float32, shape=[None, 1], name="y-input")
+
+# forward propagation
+a = tf.matmul(x, w1)
+y = tf.matmul(a, w2)
+
+# define loss function ( sigmoid : 1/1+exp(-x) ), cross_entropy and train_step
+y = tf.sigmoid(y)
+cross_entropy=-tf.reduce_mean(
+    y_ * tf.log(tf.clip_by_value(y, 1e-10, 1.0))
+    + (1-y)*tf.log(tf.clip_by_value(1-y, 1e-10, 1.0)))
+train_step = tf.train.AdamOptimizer().minimize(cross_entropy)
+
+# create a simulated dataset with a random number generator
+rdm = RandomState(1)
+dataset_size = 1280
+X = rdm.rand(dataset_size, 2)
+Y = [[int(x1+x2<1)] for (x1, x2) in X]
+
+with tf.Session() as sess:
+    sess.run(tf.global_variables_initializer())
+    print w1.eval(session=sess)
+    print sess.run(w2)
+
+    # writer = tf.summary.FileWriter("logs", tf.get_default_graph())
+    # set the number of iteration
+    STEPS = 50000
+    for i in range(STEPS):
+        start = (i * batch_size) % dataset_size
+        end = min(start+ batch_size, dataset_size)
+        sess.run(train_step, feed_dict={x: X[start: end], y_: Y[start: end]})
+        if i%1000==0:
+            # calculate cross entropy with some interval
+            total_cross_entropy = sess.run(cross_entropy, feed_dict={x: X, y_: Y})
+            print ("after %d training step(s), cross entropy on all data is %g." % (i, total_cross_entropy))
+            # tf.summary.histogram("iteration-w1", w1)
+            # tf.summary.histogram("iteration-w2", w2)
+
+    print sess.run(w1)
+    print sess.run(w2)
+
+
+# writer.close()

+ 0 - 0
tests/__init__.py


+ 22 - 0
tests/forward_propagation.py

@@ -0,0 +1,22 @@
+import tensorflow as tf
+
+# define two variables w1 and w2 as weight matrices, use seed to guarantee we get constant result.
+
+w1 = tf.Variable(tf.random_normal([2, 3], stddev=1, seed=1))
+w2 = tf.Variable(tf.random_normal([3, 1], stddev=1, seed=1))
+
+# define input eigenvector as a constant vector
+# x = tf.constant([[0.7, 0.9]])
+
+# use placeholder to store data in a constant place rather than create a large number of variables
+x = tf.placeholder(tf.float32,shape=[3, 2], name="input")
+
+# forward propagation to receive the output
+a = tf.matmul(x, w1)
+y = tf.matmul(a, w2)
+
+with tf.Session() as sess:
+    # sess.run(w1.initializer)
+    # sess.run(w2.initializer)
+    sess.run(tf.global_variables_initializer())
+    print (sess.run(y, feed_dict={x: [[0.7, 0.9], [0.1, 0.4], [0.5, 0.8]]}))

+ 1 - 1
graph_test.py

@@ -29,4 +29,4 @@ g = tf.Graph()
 with g.device('/gpu:0'):
     result = a+b
     sess = tf.Session()
-    print (sess.run(result))
+    print (sess.run(result))

+ 21 - 0
tests/tensor_session.py

@@ -0,0 +1,21 @@
+# tensor described with ( name shape type )
+# data types:
+#   int: tf.int8, 16, 32, 64, uint 8
+#   float: tf.float32, float64
+#   bool: tf.bool
+#   complex: tf.complex64, complex128
+import tensorflow as tf
+
+a = tf.constant([1.0, 3.0], name="a")
+b = tf.constant([3.0, 6.0], name="b")
+sum = a+b
+print tf.add(a, b, name="add")
+with tf.Session().as_default():
+    print (sum.eval())
+
+with tf.Session() as sess:
+    result = sess.run(a+b)
+    print result
+
+writer = tf.summary.FileWriter("logs", tf.get_default_graph())
+writer.close()