Skip to content

Commit

Permalink
Make compatible with TensorFlow 0.12
Browse files Browse the repository at this point in the history
* `ones_initializer` is now callable.
* Replaced `tf.initialize_all_variables` with `tf.global_variables_initializer`.
* Replaced `tf.initialize_variables` with `tf.variables_initializer`.
  • Loading branch information
sergomezcol committed Dec 9, 2016
1 parent 6ee5253 commit 9b1c649
Show file tree
Hide file tree
Showing 8 changed files with 22 additions and 19 deletions.
3 changes: 3 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
# [Learning to Learn](https://arxiv.org/abs/1606.04474) in TensorFlow

Compatible with TensorFlow 0.12.


## Training

```
Expand Down
2 changes: 1 addition & 1 deletion convergence_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ def testSimple(self):
minimize_ops = optimizer.meta_minimize(problem, 20, learning_rate=1e-2)
# L2L should solve the simple problem is less than 500 epochs.
with self.test_session() as sess:
sess.run(tf.initialize_all_variables())
sess.run(tf.global_variables_initializer())
cost, _ = train(sess, minimize_ops, 500, 5)
self.assertLess(cost, 1e-5)

Expand Down
4 changes: 2 additions & 2 deletions evaluate.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,10 +56,10 @@ def main(_):
if FLAGS.optimizer == "Adam":
cost_op = problem()
problem_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES)
problem_reset = tf.initialize_variables(problem_vars)
problem_reset = tf.variables_initializer(problem_vars)

optimizer = tf.train.AdamOptimizer(FLAGS.learning_rate)
optimizer_reset = tf.initialize_variables(optimizer.get_slot_names())
optimizer_reset = tf.variables_initializer(optimizer.get_slot_names())
update = optimizer.minimize(cost_op)
reset = [problem_reset, optimizer_reset]
elif FLAGS.optimizer == "L2L":
Expand Down
2 changes: 1 addition & 1 deletion meta.py
Original file line number Diff line number Diff line change
Expand Up @@ -367,7 +367,7 @@ def time_step(t, fx_array, x, state):
variables = (nest.flatten(state) +
x + constants)
# Empty array as part of the reset process.
reset = [tf.initialize_variables(variables), fx_array.close()]
reset = [tf.variables_initializer(variables), fx_array.close()]

# Operator to update the parameters and the RNN state after our loop, but
# during an epoch.
Expand Down
16 changes: 8 additions & 8 deletions meta_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def testResults(self):
}))
minimize_ops = optimizer.meta_minimize(problem, 5)
with self.test_session() as sess:
sess.run(tf.initialize_all_variables())
sess.run(tf.global_variables_initializer())
cost, final_x = train(sess, minimize_ops, 1, 2)

# Torch results
Expand Down Expand Up @@ -121,7 +121,7 @@ def testMultiOptimizer(self, net_assignments, net_config):
minimize_ops = optimizer.meta_minimize(problem, 3,
net_assignments=net_assignments)
with self.test_session() as sess:
sess.run(tf.initialize_all_variables())
sess.run(tf.global_variables_initializer())
train(sess, minimize_ops, 1, 2)

def testSecondDerivatives(self):
Expand All @@ -133,7 +133,7 @@ def testSecondDerivatives(self):
minimize_ops = optimizer.meta_minimize(problem, 3,
second_derivatives=True)
with self.test_session() as sess:
sess.run(tf.initialize_all_variables())
sess.run(tf.global_variables_initializer())
train(sess, minimize_ops, 1, 2)

def testConvolutional(self):
Expand Down Expand Up @@ -162,13 +162,13 @@ def convolutional_problem():
net_assignments=[("conv", ["conv/w"])]
)
with self.test_session() as sess:
sess.run(tf.initialize_all_variables())
sess.run(tf.global_variables_initializer())
train(sess, minimize_ops, 1, 2)

def testWhileLoopProblem(self):
"""Tests L2L applied to problem with while loop."""
def while_loop_problem():
x = tf.get_variable("x", shape=[], initializer=tf.ones_initializer)
x = tf.get_variable("x", shape=[], initializer=tf.ones_initializer())

# Strange way of squaring the variable.
_, x_squared = tf.while_loop(
Expand All @@ -183,7 +183,7 @@ def while_loop_problem():
net_options={"layers": ()}))
minimize_ops = optimizer.meta_minimize(while_loop_problem, 3)
with self.test_session() as sess:
sess.run(tf.initialize_all_variables())
sess.run(tf.global_variables_initializer())
train(sess, minimize_ops, 1, 2)

def testSaveAndLoad(self):
Expand All @@ -203,7 +203,7 @@ def testSaveAndLoad(self):
minimize_ops = optimizer.meta_minimize(problem, 3)

with self.test_session(graph=g1) as sess:
sess.run(tf.initialize_all_variables())
sess.run(tf.global_variables_initializer())
train(sess, minimize_ops, 1, 2)

# Save optimizer.
Expand All @@ -222,7 +222,7 @@ def testSaveAndLoad(self):
minimize_ops = optimizer.meta_minimize(problem, 3)

with self.test_session(graph=g2) as sess:
sess.run(tf.initialize_all_variables())
sess.run(tf.global_variables_initializer())
cost_loaded, x_loaded = train(sess, minimize_ops, num_unrolls, num_epochs)

# The last cost should be the same.
Expand Down
4 changes: 2 additions & 2 deletions networks_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def testResults(self, initializer):
update, _ = net(gradients, state)

with self.test_session() as sess:
sess.run(tf.initialize_all_variables())
sess.run(tf.global_variables_initializer())
update_np = sess.run(update)
self.assertAllEqual(update_np, np.zeros(shape))

Expand Down Expand Up @@ -111,7 +111,7 @@ def testResults(self, initializer):
update, _ = net(gradients, state)

with self.test_session() as sess:
sess.run(tf.initialize_all_variables())
sess.run(tf.global_variables_initializer())
update_np = sess.run(update)
self.assertAllEqual(update_np, np.zeros(shape))

Expand Down
6 changes: 3 additions & 3 deletions nn/batch_norm.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ def _set_default_initializer(self, var_name):
"""
if var_name not in self._initializers:
if var_name == self.GAMMA:
self._initializers[self.GAMMA] = tf.ones_initializer
self._initializers[self.GAMMA] = tf.ones_initializer()
elif var_name == self.BETA:
self._initializers[self.BETA] = tf.zeros_initializer

Expand Down Expand Up @@ -160,7 +160,7 @@ def _build_statistics_variance(self, input_batch,
shape=self._mean_shape,
collections=[tf.GraphKeys.MOVING_AVERAGE_VARIABLES,
tf.GraphKeys.VARIABLES],
initializer=tf.ones_initializer,
initializer=tf.ones_initializer(),
trainable=False)

def build_batch_stats():
Expand Down Expand Up @@ -226,7 +226,7 @@ def _build_statistics_second_moment(self, input_batch,
shape=self._mean_shape,
collections=[tf.GraphKeys.MOVING_AVERAGE_VARIABLES,
tf.GraphKeys.VARIABLES],
initializer=tf.ones_initializer,
initializer=tf.ones_initializer(),
trainable=False)

self._moving_variance = tf.sub(self._moving_second_moment,
Expand Down
4 changes: 2 additions & 2 deletions problems.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def build():
"x",
shape=[],
dtype=tf.float32,
initializer=tf.ones_initializer)
initializer=tf.ones_initializer())
return tf.square(x, name="x_squared")

return build
Expand All @@ -59,7 +59,7 @@ def get_coordinate(i):
return tf.get_variable("x_{}".format(i),
shape=[],
dtype=tf.float32,
initializer=tf.ones_initializer)
initializer=tf.ones_initializer())

def build():
coordinates = [get_coordinate(i) for i in xrange(num_dims)]
Expand Down

0 comments on commit 9b1c649

Please sign in to comment.