Skip to content

Commit 3c3e202

Browse files
authored
add some debug checks in test case and remove an extra line (#37237)
1 parent 8a90113 commit 3c3e202

File tree

2 files changed

+10
-3
lines changed

2 files changed

+10
-3
lines changed

sdks/python/apache_beam/dataframe/io.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -793,7 +793,6 @@ def __init__(
793793
if format == 'csv':
794794
kwargs['filename_column'] = filename_column
795795
self._reader = globals()['read_%s' % format](*args, **kwargs)
796-
self._reader = globals()['read_%s' % format](*args, **kwargs)
797796
self._include_indexes = include_indexes
798797
self._objects_as_strings = objects_as_strings
799798
self._filename_column = filename_column

sdks/python/apache_beam/yaml/yaml_transform_test.py

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,8 @@
3434
except ImportError:
3535
jsonschema = None
3636

37+
_LOGGER = logging.getLogger(__name__)
38+
3739

3840
class CreateTimestamped(beam.PTransform):
3941
_yaml_requires_inputs = False
@@ -244,6 +246,10 @@ def test_csv_to_json(self):
244246
input = os.path.join(tmpdir, 'input.csv')
245247
output = os.path.join(tmpdir, 'output.json')
246248
data.to_csv(input, index=False)
249+
with open(input, 'r') as f:
250+
lines = f.readlines()
251+
_LOGGER.debug("input.csv has these {lines} lines.")
252+
self.assertEqual(len(lines), len(data) + 1) # +1 for header
247253

248254
with beam.Pipeline() as p:
249255
result = p | YamlTransform(
@@ -256,9 +262,11 @@ def test_csv_to_json(self):
256262
- type: WriteToJson
257263
config:
258264
path: %s
259-
num_shards: 1
265+
num_shards: 1
266+
- type: LogForTesting
260267
''' % (repr(input), repr(output)))
261-
268+
all_output = list(glob.glob(output + "*"))
269+
self.assertEqual(len(all_output), 1)
262270
output_shard = list(glob.glob(output + "*"))[0]
263271
result = pd.read_json(
264272
output_shard, orient='records',

0 commit comments

Comments
 (0)