diff --git a/setup.py b/setup.py index 5a76eb641..36e7e4a86 100644 --- a/setup.py +++ b/setup.py @@ -5,7 +5,7 @@ setup( name='tensor2tensor', - version='1.15.4', + version='1.15.5', description='Tensor2Tensor', long_description=( 'Tensor2Tensor, or T2T for short, is a library of ' @@ -61,7 +61,7 @@ 'pypng', 'requests', 'scipy', - 'six', + 'six>=1.12.0', 'sympy', 'tensorflow-datasets', 'tensorflow-gan', diff --git a/tensor2tensor/layers/common_attention_test.py b/tensor2tensor/layers/common_attention_test.py index f11b283e5..79aa383ca 100644 --- a/tensor2tensor/layers/common_attention_test.py +++ b/tensor2tensor/layers/common_attention_test.py @@ -34,7 +34,7 @@ tfe = contrib.tfe() # from tensorflow.contrib.eager.python import tfe as tfe -tf.compat.v1.enable_eager_execution() +tf.enable_eager_execution() class CommonAttentionTest(parameterized.TestCase, tf.test.TestCase): diff --git a/tensor2tensor/layers/common_layers.py b/tensor2tensor/layers/common_layers.py index c0193fbb4..f6d680bb3 100644 --- a/tensor2tensor/layers/common_layers.py +++ b/tensor2tensor/layers/common_layers.py @@ -2743,7 +2743,7 @@ def _fn_with_custom_grad(fn, inputs, grad_fn, use_global_vars=False): Returns: fn(*inputs) """ - vs = tf.compat.v1.get_variable_scope() + vs = tf.get_variable_scope() get_vars_fn = ( vs.global_variables if use_global_vars else vs.trainable_variables) len_before_vars = len(get_vars_fn()) @@ -3145,7 +3145,7 @@ def grad_fn(inputs, variables, outputs, output_grads): @fn_with_custom_grad(grad_fn) def fn_with_recompute(*args): - cached_vs.append(tf.compat.v1.get_variable_scope()) + cached_vs.append(tf.get_variable_scope()) cached_arg_scope.append(contrib.framework().current_arg_scope()) return fn(*args) @@ -3160,7 +3160,7 @@ def dense(x, units, **kwargs): # We need to find the layer parameters using scope name for the layer, so # check that the layer is named. Otherwise parameters for different layers # may get mixed up. - layer_name = tf.compat.v1.get_variable_scope().name + layer_name = tf.get_variable_scope().name if (not layer_name) or ("name" not in kwargs): raise ValueError( "Variable scope and layer name cannot be empty. Actual: " @@ -3491,7 +3491,7 @@ def should_generate_summaries(): if name_scope and "while/" in name_scope: # Summaries don't work well within tf.while_loop() return False - if tf.compat.v1.get_variable_scope().reuse: + if tf.get_variable_scope().reuse: # Avoid generating separate summaries for different data shards return False return True diff --git a/tensor2tensor/layers/common_layers_test.py b/tensor2tensor/layers/common_layers_test.py index d67e49650..1cc3c1b18 100644 --- a/tensor2tensor/layers/common_layers_test.py +++ b/tensor2tensor/layers/common_layers_test.py @@ -28,7 +28,7 @@ import tensorflow.compat.v1 as tf -tf.compat.v1.enable_eager_execution() +tf.enable_eager_execution() class CommonLayersTest(parameterized.TestCase, tf.test.TestCase): diff --git a/tensor2tensor/utils/avg_checkpoints.py b/tensor2tensor/utils/avg_checkpoints.py index e4bf366be..c2c134670 100644 --- a/tensor2tensor/utils/avg_checkpoints.py +++ b/tensor2tensor/utils/avg_checkpoints.py @@ -114,4 +114,4 @@ def main(_): if __name__ == "__main__": - tf.compat.v1.app.run() + tf.app.run()