From 89b078797d237405c0a4396b4fa9c9ecec28db2b Mon Sep 17 00:00:00 2001 From: Xinyi Wang Date: Sat, 31 Oct 2020 07:59:03 -0700 Subject: [PATCH] Disable flaky timing out tests (MWMS host training loop). PiperOrigin-RevId: 340026157 Change-Id: Ibf94e17f0cd2cf88738d1d08be06cbe68a848ecf --- .../keras/distribute/distribute_strategy_test.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/tensorflow/python/keras/distribute/distribute_strategy_test.py b/tensorflow/python/keras/distribute/distribute_strategy_test.py index b2d5bc550f1..ce0544213b3 100644 --- a/tensorflow/python/keras/distribute/distribute_strategy_test.py +++ b/tensorflow/python/keras/distribute/distribute_strategy_test.py @@ -29,6 +29,7 @@ from tensorflow.python.data.experimental.ops.distribute_options import AutoShard from tensorflow.python.data.ops import dataset_ops from tensorflow.python.data.ops import readers from tensorflow.python.distribute import central_storage_strategy +from tensorflow.python.distribute import collective_all_reduce_strategy from tensorflow.python.distribute import combinations as ds_combinations from tensorflow.python.distribute import distribution_strategy_context from tensorflow.python.distribute import mirrored_strategy @@ -1884,6 +1885,9 @@ class TestDistributionStrategyWithKerasModels(test.TestCase, @ds_combinations.generate( combinations.combine(distribution=all_strategies, mode=['eager'])) def test_host_training_loop(self, distribution): + if isinstance(distribution, + collective_all_reduce_strategy.CollectiveAllReduceStrategy): + self.skipTest('b/172032817') with distribution.scope(): inputs = keras.Input((10, 10, 3)) x = keras.layers.Conv2D(3, kernel_size=3)(inputs) @@ -1910,6 +1914,9 @@ class TestDistributionStrategyWithKerasModels(test.TestCase, @ds_combinations.generate( combinations.combine(distribution=all_strategies, mode=['eager'])) def test_host_training_loop_last_partial_execution(self, distribution): + if isinstance(distribution, + collective_all_reduce_strategy.CollectiveAllReduceStrategy): + self.skipTest('b/172032817') with distribution.scope(): inputs = keras.Input(10) outputs = keras.layers.Dense(1)(inputs) @@ -1934,6 +1941,9 @@ class TestDistributionStrategyWithKerasModels(test.TestCase, @ds_combinations.generate( combinations.combine(distribution=all_strategies, mode=['eager'])) def test_host_training_loop_dataset_unknown_size(self, distribution): + if isinstance(distribution, + collective_all_reduce_strategy.CollectiveAllReduceStrategy): + self.skipTest('b/172032817') with distribution.scope(): inputs = keras.Input(10) outputs = keras.layers.Dense(1)(inputs) @@ -1970,6 +1980,9 @@ class TestDistributionStrategyWithKerasModels(test.TestCase, @ds_combinations.generate( combinations.combine(distribution=all_strategies, mode=['eager'])) def test_host_training_loop_truncate_to_epoch(self, distribution): + if isinstance(distribution, + collective_all_reduce_strategy.CollectiveAllReduceStrategy): + self.skipTest('b/172032817') with distribution.scope(): inputs = keras.Input(10) outputs = keras.layers.Dense(1)(inputs) -- GitLab