Skip to content
This repository was archived by the owner on Dec 16, 2021. It is now read-only.

Commit 0d7e227

Browse files
authored
Upgrade beam to 2.16.0 (#28)
1 parent c92ae1c commit 0d7e227

File tree

4 files changed

+61
-69
lines changed

4 files changed

+61
-69
lines changed

README.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ Apache Beam's `DatastoreIO` doesn't allow us to write same key at once.
4646
mvn clean package
4747
4848
# Run bigquery-to-datastore via the compiled JAR file
49-
java -cp $(pwd)/target/bigquery-to-datastore-bundled-0.5.2.jar \
49+
java -cp $(pwd)/target/bigquery-to-datastore-bundled-0.6.0.jar \
5050
com.github.yuiskw.beam.BigQuery2Datastore \
5151
--project=your-gcp-project \
5252
--runner=DataflowRunner \
@@ -71,7 +71,7 @@ mvn clean package
7171
make package
7272
7373
# run
74-
java -cp $(pwd)/target/bigquery-to-datastore-bundled-0.5.2.jar --help
74+
java -cp $(pwd)/target/bigquery-to-datastore-bundled-0.6.0.jar --help
7575
# or
7676
./bin/bigquery-to-datastore --help
7777
```
@@ -80,7 +80,7 @@ java -cp $(pwd)/target/bigquery-to-datastore-bundled-0.5.2.jar --help
8080
We also offers docker images for this project in [yuiskw/bigquery\-to\-datastore \- Docker Hub](https://hub.docker.com/r/yuiskw/bigquery-to-datastore/).
8181
We have several docker images based on Apache Beam versions.
8282
```
83-
docker run yuiskw/bigquery-to-datastore:0.5.2-beam-2.1 --help
83+
docker run yuiskw/bigquery-to-datastore:0.6.0-beam-2.16.0 --help
8484
```
8585

8686
### How to install it with homebrew

pom.xml

Lines changed: 45 additions & 56 deletions
Original file line numberDiff line numberDiff line change
@@ -22,28 +22,31 @@
2222

2323
<groupId>com.github.yuiskw</groupId>
2424
<artifactId>bigquery-to-datastore</artifactId>
25-
<version>0.5.2</version>
25+
<version>0.6.0</version>
2626

2727
<packaging>jar</packaging>
2828

2929
<properties>
30-
<beam.version>2.1.0</beam.version>
30+
<beam.version>2.16.0</beam.version>
3131

32-
<bigquery.version>v2-rev295-1.22.0</bigquery.version>
33-
<google-clients.version>1.22.0</google-clients.version>
34-
<guava.version>20.0</guava.version>
35-
<hamcrest.version>1.3</hamcrest.version>
36-
<jackson.version>2.8.9</jackson.version>
37-
<joda.version>2.4</joda.version>
38-
<junit.version>4.12</junit.version>
39-
<maven-compiler-plugin.version>3.6.1</maven-compiler-plugin.version>
40-
<maven-exec-plugin.version>1.4.0</maven-exec-plugin.version>
32+
<bigquery.version>v2-rev20181104-1.27.0</bigquery.version>
33+
<google-clients.version>1.27.0</google-clients.version>
34+
<hamcrest.version>2.1</hamcrest.version>
35+
<jackson.version>2.9.10</jackson.version>
36+
<joda.version>2.10.3</joda.version>
37+
<junit.version>4.13-beta-3</junit.version>
38+
<maven-compiler-plugin.version>3.7.0</maven-compiler-plugin.version>
39+
<maven-exec-plugin.version>1.6.0</maven-exec-plugin.version>
4140
<maven-jar-plugin.version>3.0.2</maven-jar-plugin.version>
42-
<maven-shade-plugin.version>3.0.0</maven-shade-plugin.version>
43-
<pubsub.version>v1-rev10-1.22.0</pubsub.version>
44-
<slf4j.version>1.7.14</slf4j.version>
45-
<spark.version>1.6.3</spark.version>
46-
<surefire-plugin.version>2.20</surefire-plugin.version>
41+
<maven-shade-plugin.version>3.1.0</maven-shade-plugin.version>
42+
<mockito.version>3.0.0</mockito.version>
43+
<pubsub.version>v1-rev20181105-1.27.0</pubsub.version>
44+
<slf4j.version>1.7.25</slf4j.version>
45+
<spark.version>2.4.4</spark.version>
46+
<hadoop.version>2.7.3</hadoop.version>
47+
<maven-surefire-plugin.version>2.21.0</maven-surefire-plugin.version>
48+
<nemo.version>0.1</nemo.version>
49+
<flink.artifact.name>beam-runners-flink-1.8</flink.artifact.name>
4750
</properties>
4851

4952
<repositories>
@@ -75,7 +78,7 @@
7578
<plugin>
7679
<groupId>org.apache.maven.plugins</groupId>
7780
<artifactId>maven-surefire-plugin</artifactId>
78-
<version>${surefire-plugin.version}</version>
81+
<version>${maven-surefire-plugin.version}</version>
7982
<configuration>
8083
<parallel>all</parallel>
8184
<threadCount>4</threadCount>
@@ -85,7 +88,7 @@
8588
<dependency>
8689
<groupId>org.apache.maven.surefire</groupId>
8790
<artifactId>surefire-junit47</artifactId>
88-
<version>${surefire-plugin.version}</version>
91+
<version>${maven-surefire-plugin.version}</version>
8992
</dependency>
9093
</dependencies>
9194
</plugin>
@@ -166,54 +169,34 @@
166169
</profile>
167170

168171
<profile>
169-
<id>dataflow-runner</id>
170-
<!-- Makes the DataflowRunner available when running a pipeline. -->
172+
<id>portable-runner</id>
173+
<activation>
174+
<activeByDefault>true</activeByDefault>
175+
</activation>
176+
<!-- Makes the PortableRunner available when running a pipeline. -->
171177
<dependencies>
172178
<dependency>
173179
<groupId>org.apache.beam</groupId>
174-
<artifactId>beam-runners-google-cloud-dataflow-java</artifactId>
180+
<artifactId>beam-runners-reference-java</artifactId>
175181
<version>${beam.version}</version>
182+
<scope>runtime</scope>
176183
</dependency>
177184
</dependencies>
178185
</profile>
179186

180187
<profile>
181-
<id>spark-runner</id>
182-
<!-- Makes the SparkRunner available when running a pipeline. Additionally,
183-
overrides some Spark dependencies to Beam-compatible versions. -->
188+
<id>dataflow-runner</id>
189+
<!-- Makes the DataflowRunner available when running a pipeline. -->
184190
<dependencies>
185191
<dependency>
186192
<groupId>org.apache.beam</groupId>
187-
<artifactId>beam-runners-spark</artifactId>
188-
<version>${beam.version}</version>
189-
<scope>runtime</scope>
190-
</dependency>
191-
<dependency>
192-
<groupId>org.apache.beam</groupId>
193-
<artifactId>beam-sdks-java-io-hadoop-file-system</artifactId>
193+
<artifactId>beam-runners-google-cloud-dataflow-java</artifactId>
194194
<version>${beam.version}</version>
195195
<scope>runtime</scope>
196196
</dependency>
197-
<dependency>
198-
<groupId>org.apache.spark</groupId>
199-
<artifactId>spark-streaming_2.10</artifactId>
200-
<version>${spark.version}</version>
201-
<scope>runtime</scope>
202-
<exclusions>
203-
<exclusion>
204-
<groupId>org.slf4j</groupId>
205-
<artifactId>jul-to-slf4j</artifactId>
206-
</exclusion>
207-
</exclusions>
208-
</dependency>
209-
<dependency>
210-
<groupId>com.fasterxml.jackson.module</groupId>
211-
<artifactId>jackson-module-scala_2.10</artifactId>
212-
<version>${jackson.version}</version>
213-
<scope>runtime</scope>
214-
</dependency>
215197
</dependencies>
216198
</profile>
199+
217200
</profiles>
218201

219202
<dependencies>
@@ -300,12 +283,6 @@
300283
<version>${joda.version}</version>
301284
</dependency>
302285

303-
<dependency>
304-
<groupId>com.google.guava</groupId>
305-
<artifactId>guava</artifactId>
306-
<version>${guava.version}</version>
307-
</dependency>
308-
309286
<!-- Add slf4j API frontend binding with JUL backend -->
310287
<dependency>
311288
<groupId>org.slf4j</groupId>
@@ -325,7 +302,13 @@
325302
which is used in the main code of DebuggingWordCount example. -->
326303
<dependency>
327304
<groupId>org.hamcrest</groupId>
328-
<artifactId>hamcrest-all</artifactId>
305+
<artifactId>hamcrest-core</artifactId>
306+
<version>${hamcrest.version}</version>
307+
</dependency>
308+
309+
<dependency>
310+
<groupId>org.hamcrest</groupId>
311+
<artifactId>hamcrest-library</artifactId>
329312
<version>${hamcrest.version}</version>
330313
</dependency>
331314

@@ -343,5 +326,11 @@
343326
<scope>test</scope>
344327
</dependency>
345328

329+
<dependency>
330+
<groupId>org.mockito</groupId>
331+
<artifactId>mockito-core</artifactId>
332+
<version>${mockito.version}</version>
333+
<scope>test</scope>
334+
</dependency>
346335
</dependencies>
347336
</project>

src/main/java/com/github/yuiskw/beam/BigQuery2Datastore.java

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,10 @@ public static void main(String[] args) {
7272
List<String> indexedColumns = parseIndexedColumns(options.getIndexedColumns());
7373

7474
// Input
75-
TableReference tableRef = new TableReference().setDatasetId(datasetId).setTableId(tableId);
75+
TableReference tableRef = new TableReference()
76+
.setProjectId(projectId)
77+
.setDatasetId(datasetId)
78+
.setTableId(tableId);
7679
BigQueryIO.Read reader = BigQueryIO.read().from(tableRef);
7780

7881
// Output

src/main/java/com/github/yuiskw/beam/TableRow2EntityFn.java

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,6 @@
1313

1414
import com.google.api.services.bigquery.model.TableRow;
1515
import com.google.datastore.v1.*;
16-
import com.google.protobuf.Timestamp;
1716
import org.apache.beam.sdk.transforms.DoFn;
1817
import org.joda.time.LocalDate;
1918
import org.joda.time.LocalDateTime;
@@ -54,19 +53,20 @@ public TableRow2EntityFn(
5453

5554
/**
5655
* Convert Date to Timestamp
56+
* @return
5757
*/
58-
public static Timestamp toTimestamp(Date date) {
58+
public static com.google.protobuf.Timestamp toTimestamp(Date date) {
5959
long millis = date.getTime();
60-
Timestamp timestamp = Timestamp.newBuilder()
60+
com.google.protobuf.Timestamp timestamp = com.google.protobuf.Timestamp.newBuilder()
6161
.setSeconds(millis / 1000)
6262
.setNanos((int) ((millis % 1000) * 1000000))
6363
.build();
6464
return timestamp;
6565
}
6666

67-
public static Timestamp toTimestamp(Instant instant) {
67+
public static com.google.protobuf.Timestamp toTimestamp(Instant instant) {
6868
long second = instant.getEpochSecond();
69-
Timestamp timestamp = Timestamp.newBuilder()
69+
com.google.protobuf.Timestamp timestamp = com.google.protobuf.Timestamp.newBuilder()
7070
.setSeconds(second)
7171
.build();
7272
return timestamp;
@@ -130,24 +130,24 @@ else if (value instanceof java.lang.Double) {
130130
// }
131131
// TIMESTAMP
132132
else if (value instanceof org.joda.time.LocalDateTime) {
133-
Timestamp timestamp = toTimestamp(((LocalDateTime) value).toLocalDate().toDate());
133+
com.google.protobuf.Timestamp timestamp = toTimestamp(((LocalDateTime) value).toLocalDate().toDate());
134134
v = Value.newBuilder().setTimestampValue(timestamp)
135135
.setExcludeFromIndexes(isExcluded).build();
136136
}
137137
else if (value instanceof String && parseTimestamp((String) value) != null) {
138138
Instant instant = parseTimestamp((String) value);
139-
Timestamp timestamp = toTimestamp(instant);
139+
com.google.protobuf.Timestamp timestamp = toTimestamp(instant);
140140
v = Value.newBuilder().setTimestampValue(timestamp)
141141
.setExcludeFromIndexes(isExcluded).build();
142142
}
143143
// DATE
144144
else if (value instanceof org.joda.time.LocalDate) {
145-
Timestamp timestamp = toTimestamp(((LocalDate) value).toDate());
145+
com.google.protobuf.Timestamp timestamp = toTimestamp(((LocalDate) value).toDate());
146146
v = Value.newBuilder().setTimestampValue(timestamp)
147147
.setExcludeFromIndexes(isExcluded).build();
148148
} else if (value instanceof String && parseDate((String) value) != null) {
149149
Instant instant = parseDate((String) value);
150-
Timestamp timestamp = toTimestamp(instant);
150+
com.google.protobuf.Timestamp timestamp = toTimestamp(instant);
151151
v = Value.newBuilder().setTimestampValue(timestamp)
152152
.setExcludeFromIndexes(isExcluded).build();
153153
}

0 commit comments

Comments
 (0)