Skip to content

Commit e982a7b

Browse files
tf-transform-teamtfx-copybara
authored andcommitted
cleanup of deprecated test methods
PiperOrigin-RevId: 716245406
1 parent a14023c commit e982a7b

File tree

10 files changed

+41
-31
lines changed

10 files changed

+41
-31
lines changed

tensorflow_transform/beam/analyzer_cache_test.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -55,8 +55,9 @@ def test_validate_dataset_keys(self):
5555
})
5656

5757
for key in {analyzer_cache.DatasetKey(k) for k in ('^foo^', 'foo 1')}:
58-
with self.assertRaisesRegexp(
59-
ValueError, 'Dataset key .* does not match allowed pattern:'):
58+
with self.assertRaisesRegex(
59+
ValueError, 'Dataset key .* does not match allowed pattern:'
60+
):
6061
analyzer_cache.validate_dataset_keys({key})
6162

6263
@test_case.named_parameters(

tensorflow_transform/beam/bucketize_integration_test.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -418,8 +418,9 @@ def no_assert():
418418

419419
assertion = no_assert()
420420
if input_dtype == tf.float16:
421-
assertion = self.assertRaisesRegexp(
422-
TypeError, '.*DataType float16 not in list of allowed values.*')
421+
assertion = self.assertRaisesRegex(
422+
TypeError, '.*DataType float16 not in list of allowed values.*'
423+
)
423424

424425
with assertion:
425426
self.assertAnalyzeAndTransformResults(
@@ -504,8 +505,9 @@ def no_assert():
504505

505506
assertion = no_assert()
506507
if input_dtype == tf.float16:
507-
assertion = self.assertRaisesRegexp(
508-
TypeError, '.*DataType float16 not in list of allowed values.*')
508+
assertion = self.assertRaisesRegex(
509+
TypeError, '.*DataType float16 not in list of allowed values.*'
510+
)
509511

510512
with assertion:
511513
self.assertAnalyzeAndTransformResults(

tensorflow_transform/beam/impl_test.py

Lines changed: 11 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1463,8 +1463,9 @@ def preprocessing_fn(inputs):
14631463
expected_data = [{'x_scaled': float('nan')}]
14641464
expected_metadata = tft.DatasetMetadata.from_feature_spec(
14651465
{'x_scaled': tf.io.FixedLenFeature([], tf.float32)})
1466-
with self.assertRaisesRegexp( # pylint: disable=g-error-prone-assert-raises
1467-
ValueError, 'output_min must be less than output_max'):
1466+
with self.assertRaisesRegex( # pylint: disable=g-error-prone-assert-raises
1467+
ValueError, 'output_min must be less than output_max'
1468+
):
14681469
self.assertAnalyzeAndTransformResults(input_data, input_metadata,
14691470
preprocessing_fn, expected_data,
14701471
expected_metadata)
@@ -4656,8 +4657,9 @@ def preprocessing_fn(inputs):
46564657
preprocessing_fn, expected_outputs)
46574658

46584659
def testEmptySchema(self):
4659-
with self.assertRaisesRegexp( # pylint: disable=g-error-prone-assert-raises
4660-
ValueError, 'The input metadata is empty.'):
4660+
with self.assertRaisesRegex( # pylint: disable=g-error-prone-assert-raises
4661+
ValueError, 'The input metadata is empty.'
4662+
):
46614663
self.assertAnalyzeAndTransformResults(
46624664
input_data=[{'x': x} for x in range(5)],
46634665
input_metadata=tft.DatasetMetadata.from_feature_spec({}),
@@ -4785,10 +4787,12 @@ def preprocessing_fn(inputs):
47854787
preprocessing_fn, expected_outputs)
47864788

47874789
def test_preprocessing_fn_returns_wrong_type(self):
4788-
with self.assertRaisesRegexp( # pylint: disable=g-error-prone-assert-raises
4789-
ValueError, r'A `preprocessing_fn` must return a '
4790+
with self.assertRaisesRegex( # pylint: disable=g-error-prone-assert-raises
4791+
ValueError,
4792+
r'A `preprocessing_fn` must return a '
47904793
r'Dict\[str, Union\[tf.Tensor, tf.SparseTensor, tf.RaggedTensor\]\]. '
4791-
'Got: Tensor.*'):
4794+
'Got: Tensor.*',
4795+
):
47924796
self.assertAnalyzeAndTransformResults(
47934797
input_data=[{'f1': 0}],
47944798
input_metadata=tft.DatasetMetadata.from_feature_spec(

tensorflow_transform/beam/tft_beam_io/transform_fn_io_test.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,7 @@ def mock_write_metadata_expand(unused_self, unused_metadata):
9696

9797
with mock.patch.object(transform_fn_io.beam_metadata_io.WriteMetadata,
9898
'expand', mock_write_metadata_expand):
99-
with self.assertRaisesRegexp(ArithmeticError, 'Some error'):
99+
with self.assertRaisesRegex(ArithmeticError, 'Some error'):
100100
_ = ((saved_model_dir_pcoll, object())
101101
| transform_fn_io.WriteTransformFn(transform_output_dir))
102102

tensorflow_transform/coders/csv_coder_test.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -253,7 +253,7 @@ def test_constructor_error(self,
253253
error_type=ValueError,
254254
**kwargs):
255255
schema = schema_utils.schema_from_feature_spec(feature_spec)
256-
with self.assertRaisesRegexp(error_type, error_msg):
256+
with self.assertRaisesRegex(error_type, error_msg):
257257
csv_coder.CsvCoder(columns, schema, **kwargs)
258258

259259
@test_case.named_parameters(*_ENCODE_ERROR_CASES)
@@ -266,7 +266,7 @@ def test_encode_error(self,
266266
**kwargs):
267267
schema = schema_utils.schema_from_feature_spec(feature_spec)
268268
coder = csv_coder.CsvCoder(columns, schema, **kwargs)
269-
with self.assertRaisesRegexp(error_type, error_msg):
269+
with self.assertRaisesRegex(error_type, error_msg):
270270
coder.encode(instance)
271271

272272
def test_picklable(self):

tensorflow_transform/coders/example_proto_coder_test.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -373,7 +373,7 @@ def test_encode_error(self,
373373
error_type=ValueError,
374374
**kwargs):
375375
schema = schema_utils.schema_from_feature_spec(feature_spec)
376-
with self.assertRaisesRegexp(error_type, error_msg):
376+
with self.assertRaisesRegex(error_type, error_msg):
377377
coder = example_proto_coder.ExampleProtoCoder(schema, **kwargs)
378378
coder.encode(instance)
379379

tensorflow_transform/graph_tools_test.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -602,7 +602,7 @@ def testInitializableGraphAnalyzerConstructorRaises(
602602
tensors = create_graph_fn()
603603
replaced_tensors_ready = [(tensors[name], ready)
604604
for name, ready in replaced_tensors_ready.items()]
605-
with self.assertRaisesRegexp(ValueError, error_msg_regex):
605+
with self.assertRaisesRegex(ValueError, error_msg_regex):
606606
graph_tools.InitializableGraphAnalyzer(graph,
607607
{x: tensors[x] for x in feeds},
608608
replaced_tensors_ready)
@@ -639,7 +639,7 @@ def testInitializableGraphAnalyzerReadyToRunRaises(
639639
tensors[name], ready) for name, ready in replaced_tensors_ready.items()]
640640
graph_analyzer = graph_tools.InitializableGraphAnalyzer(
641641
graph, {x: tensors[x] for x in feeds}, replaced_tensors_ready)
642-
with self.assertRaisesRegexp(ValueError, error_msg_regex):
642+
with self.assertRaisesRegex(ValueError, error_msg_regex):
643643
tensor = tensors[fetch]
644644
graph_analyzer.ready_to_run(tensor)
645645

tensorflow_transform/mappers_test.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -162,9 +162,9 @@ def testNGramsWithRepeatedTokensPerRow(self):
162162
def testNGramsBadSizes(self):
163163
string_tensor = tf.constant(['abc', 'def', 'fghijklm', 'z', ''])
164164
tokenized_tensor = tf.compat.v1.string_split(string_tensor, delimiter='')
165-
with self.assertRaisesRegexp(ValueError, 'Invalid ngram_range'):
165+
with self.assertRaisesRegex(ValueError, 'Invalid ngram_range'):
166166
mappers.ngrams(tokenized_tensor, (0, 5), separator='')
167-
with self.assertRaisesRegexp(ValueError, 'Invalid ngram_range'):
167+
with self.assertRaisesRegex(ValueError, 'Invalid ngram_range'):
168168
mappers.ngrams(tokenized_tensor, (6, 5), separator='')
169169

170170
def testNGramsBagOfWordsEmpty(self):
@@ -837,8 +837,9 @@ def testApplyBucketsWithInterpolationAllNanBoundariesRaises(self):
837837
with self.test_session() as sess:
838838
x = tf.constant([float('-inf'), float('nan'), 0.0, 1.0])
839839
boundaries = tf.constant([[float('nan'), float('nan'), float('nan')]])
840-
with self.assertRaisesRegexp(tf.errors.InvalidArgumentError,
841-
'num_boundaries'):
840+
with self.assertRaisesRegex(
841+
tf.errors.InvalidArgumentError, 'num_boundaries'
842+
):
842843
sess.run(mappers.apply_buckets_with_interpolation(x, boundaries))
843844

844845
def testApplyBucketsWithInterpolationRaises(self):

tensorflow_transform/test_case_test.py

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -64,15 +64,17 @@ def testAssertDataCloseOrEqual(self):
6464
'd': ('second', 2.0000001)},
6565
{'e': 2,
6666
'f': 3}])
67-
with self.assertRaisesRegexp(AssertionError, r'len\(.*\) != len\(\[\]\)'):
67+
with self.assertRaisesRegex(AssertionError, r'len\(.*\) != len\(\[\]\)'):
6868
self.assertDataCloseOrEqual([{'a': 1}], [])
69-
with self.assertRaisesRegexp(
69+
with self.assertRaisesRegex(
7070
AssertionError,
71-
re.compile('Element counts were not equal.*: Row 0', re.DOTALL)):
71+
re.compile('Element counts were not equal.*: Row 0', re.DOTALL),
72+
):
7273
self.assertDataCloseOrEqual([{'a': 1}], [{'b': 1}])
73-
with self.assertRaisesRegexp(
74+
with self.assertRaisesRegex(
7475
AssertionError,
75-
re.compile('Not equal to tolerance.*: Row 0, key a', re.DOTALL)):
76+
re.compile('Not equal to tolerance.*: Row 0, key a', re.DOTALL),
77+
):
7678
self.assertDataCloseOrEqual([{'a': 1}], [{'a': 2}])
7779

7880
@test_case.parameters((1, 'a'), (2, 'b'))

tensorflow_transform/tf_utils_test.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -774,7 +774,7 @@ def test_same_shape_exceptions(self, x_input, y_input, x_shape, y_shape,
774774
x = tf.compat.v1.placeholder(tf.int32, x_shape)
775775
y = tf.compat.v1.placeholder(tf.int32, y_shape)
776776
with tf.compat.v1.Session() as sess:
777-
with self.assertRaisesRegexp(exception_cls, error_string):
777+
with self.assertRaisesRegex(exception_cls, error_string):
778778
sess.run(tf_utils.assert_same_shape(x, y), {x: x_input, y: y_input})
779779

780780
@test_case.named_parameters(test_case.FUNCTION_HANDLERS)
@@ -1965,7 +1965,7 @@ def test_sparse_indices(self):
19651965
x = tf.compat.v1.sparse_placeholder(tf.int64, shape=[None, None])
19661966
key = tf.compat.v1.sparse_placeholder(tf.string, shape=[None, None])
19671967
with tf.compat.v1.Session() as sess:
1968-
with self.assertRaisesRegexp(exception_cls, error_string):
1968+
with self.assertRaisesRegex(exception_cls, error_string):
19691969
sess.run(tf_utils.reduce_batch_minus_min_and_max_per_key(x, key),
19701970
feed_dict={x: value, key: key_value})
19711971

@@ -2000,7 +2000,7 @@ def test_convert_sparse_indices(self):
20002000
dense_shape=[4, 2, 5])
20012001

20022002
with tf.compat.v1.Session() as sess:
2003-
with self.assertRaisesRegexp(exception_cls, error_string):
2003+
with self.assertRaisesRegex(exception_cls, error_string):
20042004
sess.run(tf_utils._validate_and_get_dense_value_key_inputs(sparse1,
20052005
sparse2),
20062006
feed_dict={sparse1: sparse_value1, sparse2: sparse_value2})

0 commit comments

Comments
 (0)