forked from apache/beam
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtest_write.py
More file actions
177 lines (155 loc) · 6.49 KB
/
test_write.py
File metadata and controls
177 lines (155 loc) · 6.49 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# python -m apache_beam.examples.unbounded_sinks.test_write
# This file contains multiple examples of writing unbounded PCollection to files
import argparse
import json
import logging
import pyarrow
import apache_beam as beam
from apache_beam.examples.unbounded_sinks.generate_event import GenerateEvent
from apache_beam.io.fileio import WriteToFiles
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.runners.runner import PipelineResult
from apache_beam.transforms.trigger import AccumulationMode
from apache_beam.transforms.trigger import AfterWatermark
from apache_beam.transforms.util import LogElements
from apache_beam.transforms.window import FixedWindows
from apache_beam.utils.timestamp import Duration
class CountEvents(beam.PTransform):
def expand(self, events):
return (
events
| beam.WindowInto(
FixedWindows(5),
trigger=AfterWatermark(),
accumulation_mode=AccumulationMode.DISCARDING,
allowed_lateness=Duration(seconds=0))
| beam.CombineGlobally(
beam.combiners.CountCombineFn()).without_defaults())
def run(argv=None, save_main_session=True) -> PipelineResult:
"""Main entry point; defines and runs the wordcount pipeline."""
parser = argparse.ArgumentParser()
_, pipeline_args = parser.parse_known_args(argv)
# We use the save_main_session option because one or more DoFn's in this
# workflow rely on global context (e.g., a module imported at module level).
pipeline_options = PipelineOptions(pipeline_args)
pipeline_options.view_as(SetupOptions).save_main_session = save_main_session
p = beam.Pipeline(options=pipeline_options)
output = p | GenerateEvent.sample_data()
#TextIO
output2 = output | 'TextIO WriteToText' >> beam.io.WriteToText(
file_path_prefix="__output__/ouput_WriteToText",
file_name_suffix=".txt",
#shard_name_template='-V-SSSSS-of-NNNNN',
num_shards=2,
triggering_frequency=5,
)
_ = output2 | 'LogElements after WriteToText' >> LogElements(
prefix='after WriteToText ', with_window=True, level=logging.INFO)
#FileIO
_ = (
output
| 'FileIO window' >> beam.WindowInto(
FixedWindows(5),
trigger=AfterWatermark(),
accumulation_mode=AccumulationMode.DISCARDING,
allowed_lateness=Duration(seconds=0))
| 'Serialize' >> beam.Map(json.dumps)
| 'FileIO WriteToFiles' >>
WriteToFiles(path="__output__/output_WriteToFiles"))
#ParquetIO
pyschema = pyarrow.schema([('age', pyarrow.int64())])
output4a = output | 'WriteToParquet' >> beam.io.WriteToParquet(
file_path_prefix="__output__/output_parquet",
#shard_name_template='-V-SSSSS-of-NNNNN',
file_name_suffix=".parquet",
num_shards=2,
triggering_frequency=5,
schema=pyschema)
_ = output4a | 'LogElements after WriteToParquet' >> LogElements(
prefix='after WriteToParquet 4a ', with_window=True, level=logging.INFO)
output4aw = (
output
| 'ParquetIO window' >> beam.WindowInto(
FixedWindows(20),
trigger=AfterWatermark(),
accumulation_mode=AccumulationMode.DISCARDING,
allowed_lateness=Duration(seconds=0))
| 'WriteToParquet windowed' >> beam.io.WriteToParquet(
file_path_prefix="__output__/output_parquet",
shard_name_template='-W-SSSSS-of-NNNNN',
file_name_suffix=".parquet",
num_shards=2,
schema=pyschema))
_ = output4aw | 'LogElements after WriteToParquet windowed' >> LogElements(
prefix='after WriteToParquet 4aw ', with_window=True, level=logging.INFO)
output4b = (
output
| 'To PyArrow Table' >>
beam.Map(lambda x: pyarrow.Table.from_pylist([x], schema=pyschema))
| 'WriteToParquetBatched to parquet' >> beam.io.WriteToParquetBatched(
file_path_prefix="__output__/output_parquet_batched",
shard_name_template='-V-SSSSS-of-NNNNN',
file_name_suffix=".parquet",
num_shards=2,
triggering_frequency=5,
schema=pyschema))
_ = output4b | 'LogElements after WriteToParquetBatched' >> LogElements(
prefix='after WriteToParquetBatched 4b ',
with_window=True,
level=logging.INFO)
#AvroIO
avroschema = {
'name': 'dummy', # your supposed to be file name with .avro extension
'type': 'record', # type of avro serilazation, there are more (see above
# docs) but as per me this will do most of the time
'fields': [ # this defines actual keys & their types
{'name': 'age', 'type': 'int'},
],
}
output5 = output | 'WriteToAvro' >> beam.io.WriteToAvro(
file_path_prefix="__output__/output_avro",
#shard_name_template='-V-SSSSS-of-NNNNN',
file_name_suffix=".avro",
num_shards=2,
#triggering_frequency=5,
schema=avroschema)
_ = output5 | 'LogElements after WriteToAvro' >> LogElements(
prefix='after WriteToAvro 5 ', with_window=True, level=logging.INFO)
#TFrecordIO
output6 = (
output
| "encode" >> beam.Map(lambda s: json.dumps(s).encode('utf-8'))
| 'WriteToTFRecord' >> beam.io.WriteToTFRecord(
file_path_prefix="__output__/output_tfrecord",
#shard_name_template='-V-SSSSS-of-NNNNN',
file_name_suffix=".tfrecord",
num_shards=2,
triggering_frequency=5))
_ = output6 | 'LogElements after WriteToTFRecord' >> LogElements(
prefix='after WriteToTFRecord 6 ', with_window=True, level=logging.INFO)
# Execute the pipeline and return the result.
result = p.run()
result.wait_until_finish()
return result
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
run()