ExponentialMovingAvg.py 1.1 KB

12345678910111213141516171819202122232425262728293031323334353637
  1. # improve the robustness of stochastic gradient descend training system using EMA func
  2. import tensorflow as tf
  3. v1 = tf.Variable(0, dtype=tf.float32)
  4. # animate iterations in nn, control decay dynamically
  5. step = tf.Variable(0, trainable=False)
  6. # decay is 0.99 by default
  7. ema = tf.train.ExponentialMovingAverage(0.99, step)
  8. maintain_average_op = ema.apply([v1])
  9. with tf.Session() as sess:
  10. sess.run(tf.global_variables_initializer())
  11. # output [0,0]
  12. print sess.run([v1, ema.average(v1)])
  13. sess.run(tf.assign(v1, 5))
  14. sess.run(maintain_average_op)
  15. # decay = min(0.99, 1+step/10+step) = 0.1, var = 0*0.1+0.9*5 = 4.5
  16. # output [5,4.5]
  17. print sess.run([v1, ema.average(v1)])
  18. sess.run(tf.assign(step, 10000))
  19. sess.run(tf.assign(v1, 10))
  20. sess.run(maintain_average_op)
  21. # decay = min(0.99, 1+step/10+step) = 0.99, var = 4.5*0.99+10*0.01 = 4.555
  22. # output [10, 4.555]
  23. print sess.run([v1, ema.average(v1)])
  24. sess.run(maintain_average_op)
  25. # decay = min(0.99, 1+step/10+step) = 0.99, var = 4.555*0.99+10*0.01 = 4.60945
  26. # output [10, 4.60945]
  27. print sess.run([v1, ema.average(v1)])