From d16f020329d15a887fd8100cbc1bf8ecbc87550b Mon Sep 17 00:00:00 2001 From: Michael Reneer Date: Mon, 25 Jul 2022 10:48:07 -0700 Subject: [PATCH] Fix usage of logging API. PiperOrigin-RevId: 463123944 --- .../tf_estimator_evaluation_example.py | 3 +-- tutorials/lm_dpsgd_tutorial.py | 3 +-- tutorials/mnist_dpsgd_tutorial_vectorized.py | 3 +-- tutorials/mnist_lr_tutorial.py | 3 +-- tutorials/walkthrough/mnist_scratch.py | 3 +-- 5 files changed, 5 insertions(+), 10 deletions(-) diff --git a/tensorflow_privacy/privacy/privacy_tests/membership_inference_attack/tf_estimator_evaluation_example.py b/tensorflow_privacy/privacy/privacy_tests/membership_inference_attack/tf_estimator_evaluation_example.py index 1481af58..2b2d87ba 100644 --- a/tensorflow_privacy/privacy/privacy_tests/membership_inference_attack/tf_estimator_evaluation_example.py +++ b/tensorflow_privacy/privacy/privacy_tests/membership_inference_attack/tf_estimator_evaluation_example.py @@ -92,8 +92,7 @@ def load_cifar10(): def main(unused_argv): - logger = tf.get_logger() - logger.set_level(logging.ERROR) + logging.set_verbosity(logging.ERROR) # Load training and test data. x_train, y_train, x_test, y_test = load_cifar10() diff --git a/tutorials/lm_dpsgd_tutorial.py b/tutorials/lm_dpsgd_tutorial.py index a512171e..0f840200 100644 --- a/tutorials/lm_dpsgd_tutorial.py +++ b/tutorials/lm_dpsgd_tutorial.py @@ -163,8 +163,7 @@ def compute_epsilon(steps): def main(unused_argv): - logger = tf.get_logger() - logger.set_level(logging.INFO) + logging.set_verbosity(logging.INFO) if FLAGS.batch_size % FLAGS.microbatches != 0: raise ValueError('Number of microbatches should divide evenly batch_size') diff --git a/tutorials/mnist_dpsgd_tutorial_vectorized.py b/tutorials/mnist_dpsgd_tutorial_vectorized.py index 393e2399..21f6163a 100644 --- a/tutorials/mnist_dpsgd_tutorial_vectorized.py +++ b/tutorials/mnist_dpsgd_tutorial_vectorized.py @@ -146,8 +146,7 @@ def load_mnist(): def main(unused_argv): - logger = tf.get_logger() - logger.set_level(logging.INFO) + logging.set_verbosity(logging.INFO) if FLAGS.dpsgd and FLAGS.batch_size % FLAGS.microbatches != 0: raise ValueError('Number of microbatches should divide evenly batch_size') diff --git a/tutorials/mnist_lr_tutorial.py b/tutorials/mnist_lr_tutorial.py index 2268bf4c..89546df4 100644 --- a/tutorials/mnist_lr_tutorial.py +++ b/tutorials/mnist_lr_tutorial.py @@ -183,8 +183,7 @@ def print_privacy_guarantees(epochs, batch_size, samples, noise_multiplier): def main(unused_argv): - logger = tf.get_logger() - logger.set_level(logging.INFO) + logging.set_verbosity(logging.INFO) if FLAGS.data_l2_norm <= 0: raise ValueError('data_l2_norm must be positive.') diff --git a/tutorials/walkthrough/mnist_scratch.py b/tutorials/walkthrough/mnist_scratch.py index 9cf83414..5583533c 100644 --- a/tutorials/walkthrough/mnist_scratch.py +++ b/tutorials/walkthrough/mnist_scratch.py @@ -89,8 +89,7 @@ def load_mnist(): def main(unused_argv): - logger = tf.get_logger() - logger.set_level(logging.INFO) + logging.set_verbosity(logging.INFO) # Load training and test data. train_data, train_labels, test_data, test_labels = load_mnist()