Skip to content
This repository has been archived by the owner on Jul 7, 2023. It is now read-only.

Commit

Permalink
Bump T2T version to 1.15.5
Browse files Browse the repository at this point in the history
With this change Travis will turn green.

When we import compat.v1, we shouldn't do tf.compat.v1 anymore.

Also the following to unclog Travis:
six>=1.12.0 is required by tf-hub 0.8.0, one of the core dependencies seems to
be installing tf-hub, so pinning tf-hub to 0.7.0 (which is in the extras
section) doesn't seem to help.

Locally the previous setup.py installs fine, but only errors on Travis for some
reason.

PiperOrigin-RevId: 307203166
  • Loading branch information
afrozenator authored and copybara-github committed Apr 18, 2020
1 parent 12d63a3 commit f5d7374
Show file tree
Hide file tree
Showing 5 changed files with 9 additions and 9 deletions.
4 changes: 2 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

setup(
name='tensor2tensor',
version='1.15.4',
version='1.15.5',
description='Tensor2Tensor',
long_description=(
'Tensor2Tensor, or T2T for short, is a library of '
Expand Down Expand Up @@ -61,7 +61,7 @@
'pypng',
'requests',
'scipy',
'six',
'six>=1.12.0',
'sympy',
'tensorflow-datasets',
'tensorflow-gan',
Expand Down
2 changes: 1 addition & 1 deletion tensor2tensor/layers/common_attention_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@

tfe = contrib.tfe()
# from tensorflow.contrib.eager.python import tfe as tfe
tf.compat.v1.enable_eager_execution()
tf.enable_eager_execution()


class CommonAttentionTest(parameterized.TestCase, tf.test.TestCase):
Expand Down
8 changes: 4 additions & 4 deletions tensor2tensor/layers/common_layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -2743,7 +2743,7 @@ def _fn_with_custom_grad(fn, inputs, grad_fn, use_global_vars=False):
Returns:
fn(*inputs)
"""
vs = tf.compat.v1.get_variable_scope()
vs = tf.get_variable_scope()
get_vars_fn = (
vs.global_variables if use_global_vars else vs.trainable_variables)
len_before_vars = len(get_vars_fn())
Expand Down Expand Up @@ -3145,7 +3145,7 @@ def grad_fn(inputs, variables, outputs, output_grads):

@fn_with_custom_grad(grad_fn)
def fn_with_recompute(*args):
cached_vs.append(tf.compat.v1.get_variable_scope())
cached_vs.append(tf.get_variable_scope())
cached_arg_scope.append(contrib.framework().current_arg_scope())
return fn(*args)

Expand All @@ -3160,7 +3160,7 @@ def dense(x, units, **kwargs):
# We need to find the layer parameters using scope name for the layer, so
# check that the layer is named. Otherwise parameters for different layers
# may get mixed up.
layer_name = tf.compat.v1.get_variable_scope().name
layer_name = tf.get_variable_scope().name
if (not layer_name) or ("name" not in kwargs):
raise ValueError(
"Variable scope and layer name cannot be empty. Actual: "
Expand Down Expand Up @@ -3491,7 +3491,7 @@ def should_generate_summaries():
if name_scope and "while/" in name_scope:
# Summaries don't work well within tf.while_loop()
return False
if tf.compat.v1.get_variable_scope().reuse:
if tf.get_variable_scope().reuse:
# Avoid generating separate summaries for different data shards
return False
return True
Expand Down
2 changes: 1 addition & 1 deletion tensor2tensor/layers/common_layers_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@

import tensorflow.compat.v1 as tf

tf.compat.v1.enable_eager_execution()
tf.enable_eager_execution()


class CommonLayersTest(parameterized.TestCase, tf.test.TestCase):
Expand Down
2 changes: 1 addition & 1 deletion tensor2tensor/utils/avg_checkpoints.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,4 +114,4 @@ def main(_):


if __name__ == "__main__":
tf.compat.v1.app.run()
tf.app.run()

0 comments on commit f5d7374

Please sign in to comment.