Skip to content
This repository was archived by the owner on Jul 7, 2023. It is now read-only.

Commit f5d7374

Browse files
afrozenatorcopybara-github
authored andcommitted
Bump T2T version to 1.15.5
With this change Travis will turn green. When we import compat.v1, we shouldn't do tf.compat.v1 anymore. Also the following to unclog Travis: six>=1.12.0 is required by tf-hub 0.8.0, one of the core dependencies seems to be installing tf-hub, so pinning tf-hub to 0.7.0 (which is in the extras section) doesn't seem to help. Locally the previous setup.py installs fine, but only errors on Travis for some reason. PiperOrigin-RevId: 307203166
1 parent 12d63a3 commit f5d7374

File tree

5 files changed

+9
-9
lines changed

5 files changed

+9
-9
lines changed

setup.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55

66
setup(
77
name='tensor2tensor',
8-
version='1.15.4',
8+
version='1.15.5',
99
description='Tensor2Tensor',
1010
long_description=(
1111
'Tensor2Tensor, or T2T for short, is a library of '
@@ -61,7 +61,7 @@
6161
'pypng',
6262
'requests',
6363
'scipy',
64-
'six',
64+
'six>=1.12.0',
6565
'sympy',
6666
'tensorflow-datasets',
6767
'tensorflow-gan',

tensor2tensor/layers/common_attention_test.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@
3434

3535
tfe = contrib.tfe()
3636
# from tensorflow.contrib.eager.python import tfe as tfe
37-
tf.compat.v1.enable_eager_execution()
37+
tf.enable_eager_execution()
3838

3939

4040
class CommonAttentionTest(parameterized.TestCase, tf.test.TestCase):

tensor2tensor/layers/common_layers.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -2743,7 +2743,7 @@ def _fn_with_custom_grad(fn, inputs, grad_fn, use_global_vars=False):
27432743
Returns:
27442744
fn(*inputs)
27452745
"""
2746-
vs = tf.compat.v1.get_variable_scope()
2746+
vs = tf.get_variable_scope()
27472747
get_vars_fn = (
27482748
vs.global_variables if use_global_vars else vs.trainable_variables)
27492749
len_before_vars = len(get_vars_fn())
@@ -3145,7 +3145,7 @@ def grad_fn(inputs, variables, outputs, output_grads):
31453145

31463146
@fn_with_custom_grad(grad_fn)
31473147
def fn_with_recompute(*args):
3148-
cached_vs.append(tf.compat.v1.get_variable_scope())
3148+
cached_vs.append(tf.get_variable_scope())
31493149
cached_arg_scope.append(contrib.framework().current_arg_scope())
31503150
return fn(*args)
31513151

@@ -3160,7 +3160,7 @@ def dense(x, units, **kwargs):
31603160
# We need to find the layer parameters using scope name for the layer, so
31613161
# check that the layer is named. Otherwise parameters for different layers
31623162
# may get mixed up.
3163-
layer_name = tf.compat.v1.get_variable_scope().name
3163+
layer_name = tf.get_variable_scope().name
31643164
if (not layer_name) or ("name" not in kwargs):
31653165
raise ValueError(
31663166
"Variable scope and layer name cannot be empty. Actual: "
@@ -3491,7 +3491,7 @@ def should_generate_summaries():
34913491
if name_scope and "while/" in name_scope:
34923492
# Summaries don't work well within tf.while_loop()
34933493
return False
3494-
if tf.compat.v1.get_variable_scope().reuse:
3494+
if tf.get_variable_scope().reuse:
34953495
# Avoid generating separate summaries for different data shards
34963496
return False
34973497
return True

tensor2tensor/layers/common_layers_test.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@
2828

2929
import tensorflow.compat.v1 as tf
3030

31-
tf.compat.v1.enable_eager_execution()
31+
tf.enable_eager_execution()
3232

3333

3434
class CommonLayersTest(parameterized.TestCase, tf.test.TestCase):

tensor2tensor/utils/avg_checkpoints.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -114,4 +114,4 @@ def main(_):
114114

115115

116116
if __name__ == "__main__":
117-
tf.compat.v1.app.run()
117+
tf.app.run()

0 commit comments

Comments
 (0)