Skip to content

Commit efd75e1

Browse files
authored
deps: update versions and allow disabling client side metrics (#4350)
* deps: update client to latest version and allow disabling client side metrics * update * update pom * update pom
1 parent 33facf5 commit efd75e1

File tree

16 files changed

+86
-56
lines changed

16 files changed

+86
-56
lines changed

bigtable-client-core-parent/bigtable-hbase/src/main/java/com/google/cloud/bigtable/hbase/BigtableOptionsFactory.java

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -337,4 +337,8 @@ public class BigtableOptionsFactory {
337337
/** Override idle timeout, for testing only. */
338338
@VisibleForTesting
339339
public static final String BIGTABLE_TEST_IDLE_TIMEOUT_MS = "google.bigtable.idle.timeout.ms";
340+
341+
/** Sets if client side metrics should be enabled. Client side metrics is enabled by default. */
342+
public static final String BIGTABLE_ENABLE_CLIENT_SIDE_METRICS =
343+
"google.bigtable.enable.client.side.metrics";
340344
}

bigtable-client-core-parent/bigtable-hbase/src/main/java/com/google/cloud/bigtable/hbase/wrappers/DataClientWrapper.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,14 +17,14 @@
1717

1818
import com.google.api.core.ApiFuture;
1919
import com.google.api.core.InternalApi;
20+
import com.google.api.gax.rpc.ResponseObserver;
2021
import com.google.cloud.bigtable.data.v2.models.ConditionalRowMutation;
2122
import com.google.cloud.bigtable.data.v2.models.Filters;
2223
import com.google.cloud.bigtable.data.v2.models.KeyOffset;
2324
import com.google.cloud.bigtable.data.v2.models.Query;
2425
import com.google.cloud.bigtable.data.v2.models.ReadModifyWriteRow;
2526
import com.google.cloud.bigtable.data.v2.models.RowMutation;
2627
import com.google.protobuf.ByteString;
27-
import io.grpc.stub.StreamObserver;
2828
import java.io.IOException;
2929
import java.util.List;
3030
import javax.annotation.Nullable;
@@ -87,7 +87,7 @@ ApiFuture<Result> readRowAsync(
8787

8888
/** Read {@link Result} asynchronously, and pass them to a stream observer to be processed. */
8989
// TODO: once veneer is implemented update this with gax's ResponseObserver.
90-
void readRowsAsync(Query request, StreamObserver<Result> observer);
90+
void readRowsAsync(Query request, ResponseObserver<Result> observer);
9191

9292
@Override
9393
void close() throws IOException;

bigtable-client-core-parent/bigtable-hbase/src/main/java/com/google/cloud/bigtable/hbase/wrappers/veneer/BigtableHBaseVeneerSettings.java

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@
3030
import static com.google.cloud.bigtable.hbase.BigtableOptionsFactory.BIGTABLE_DATA_CHANNEL_COUNT_KEY;
3131
import static com.google.cloud.bigtable.hbase.BigtableOptionsFactory.BIGTABLE_EMULATOR_HOST_KEY;
3232
import static com.google.cloud.bigtable.hbase.BigtableOptionsFactory.BIGTABLE_ENABLE_BULK_MUTATION_FLOW_CONTROL;
33+
import static com.google.cloud.bigtable.hbase.BigtableOptionsFactory.BIGTABLE_ENABLE_CLIENT_SIDE_METRICS;
3334
import static com.google.cloud.bigtable.hbase.BigtableOptionsFactory.BIGTABLE_HOST_KEY;
3435
import static com.google.cloud.bigtable.hbase.BigtableOptionsFactory.BIGTABLE_MUTATE_RPC_ATTEMPT_TIMEOUT_MS_KEY;
3536
import static com.google.cloud.bigtable.hbase.BigtableOptionsFactory.BIGTABLE_MUTATE_RPC_TIMEOUT_MS_KEY;
@@ -81,6 +82,7 @@
8182
import com.google.cloud.bigtable.data.v2.models.Row;
8283
import com.google.cloud.bigtable.data.v2.stub.BigtableBatchingCallSettings;
8384
import com.google.cloud.bigtable.data.v2.stub.BigtableBulkReadRowsCallSettings;
85+
import com.google.cloud.bigtable.data.v2.stub.metrics.NoopMetricsProvider;
8486
import com.google.cloud.bigtable.hbase.BigtableConfiguration;
8587
import com.google.cloud.bigtable.hbase.BigtableExtendedConfiguration;
8688
import com.google.cloud.bigtable.hbase.BigtableHBaseVersion;
@@ -340,6 +342,10 @@ private BigtableDataSettings buildBigtableDataSettings(ClientOperationTimeouts c
340342
configureRetryableCallSettings(
341343
dataBuilder.stubSettings().sampleRowKeysSettings(), clientTimeouts.getUnaryTimeouts());
342344

345+
if (!configuration.getBoolean(BIGTABLE_ENABLE_CLIENT_SIDE_METRICS, true)) {
346+
dataBuilder.setMetricsProvider(NoopMetricsProvider.INSTANCE);
347+
}
348+
343349
return dataBuilder.build();
344350
}
345351

bigtable-client-core-parent/bigtable-hbase/src/main/java/com/google/cloud/bigtable/hbase/wrappers/veneer/DataClientVeneerApi.java

Lines changed: 2 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@
2323
import com.google.api.gax.rpc.ApiCallContext;
2424
import com.google.api.gax.rpc.ResponseObserver;
2525
import com.google.api.gax.rpc.ServerStream;
26-
import com.google.api.gax.rpc.StateCheckingResponseObserver;
2726
import com.google.api.gax.rpc.StreamController;
2827
import com.google.cloud.bigtable.data.v2.BigtableDataClient;
2928
import com.google.cloud.bigtable.data.v2.models.ConditionalRowMutation;
@@ -48,7 +47,6 @@
4847
import com.google.protobuf.ByteString;
4948
import io.grpc.CallOptions;
5049
import io.grpc.Deadline;
51-
import io.grpc.stub.StreamObserver;
5250
import java.util.ArrayDeque;
5351
import java.util.ArrayList;
5452
import java.util.Iterator;
@@ -162,10 +160,8 @@ public ApiFuture<List<Result>> readRowsAsync(Query request) {
162160
}
163161

164162
@Override
165-
public void readRowsAsync(Query request, StreamObserver<Result> observer) {
166-
delegate
167-
.readRowsCallable(RESULT_ADAPTER)
168-
.call(request, new StreamObserverAdapter<>(observer), createScanCallContext());
163+
public void readRowsAsync(Query request, ResponseObserver<Result> observer) {
164+
delegate.readRowsCallable(RESULT_ADAPTER).call(request, observer, createScanCallContext());
169165
}
170166

171167
// Point reads are implemented using a streaming ReadRows RPC. So timeouts need to be managed
@@ -218,29 +214,6 @@ public void close() {
218214
delegate.close();
219215
}
220216

221-
/** wraps {@link StreamObserver} onto GCJ {@link com.google.api.gax.rpc.ResponseObserver}. */
222-
private static class StreamObserverAdapter<T> extends StateCheckingResponseObserver<T> {
223-
private final StreamObserver<T> delegate;
224-
225-
StreamObserverAdapter(StreamObserver<T> delegate) {
226-
this.delegate = delegate;
227-
}
228-
229-
protected void onStartImpl(StreamController controller) {}
230-
231-
protected void onResponseImpl(T response) {
232-
this.delegate.onNext(response);
233-
}
234-
235-
protected void onErrorImpl(Throwable t) {
236-
this.delegate.onError(t);
237-
}
238-
239-
protected void onCompleteImpl() {
240-
this.delegate.onCompleted();
241-
}
242-
}
243-
244217
/**
245218
* wraps {@link ServerStream} onto HBase {@link ResultScanner}. {@link PaginatedRowResultScanner}
246219
* gets a paginator and a {@link Query.QueryPaginator} used to get a {@link ServerStream}<{@link

bigtable-client-core-parent/bigtable-hbase/src/main/java/com/google/cloud/bigtable/hbase/wrappers/veneer/SharedDataClientWrapper.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
package com.google.cloud.bigtable.hbase.wrappers.veneer;
1717

1818
import com.google.api.core.ApiFuture;
19+
import com.google.api.gax.rpc.ResponseObserver;
1920
import com.google.cloud.bigtable.data.v2.models.ConditionalRowMutation;
2021
import com.google.cloud.bigtable.data.v2.models.Filters.Filter;
2122
import com.google.cloud.bigtable.data.v2.models.KeyOffset;
@@ -27,7 +28,6 @@
2728
import com.google.cloud.bigtable.hbase.wrappers.DataClientWrapper;
2829
import com.google.cloud.bigtable.hbase.wrappers.veneer.SharedDataClientWrapperFactory.Key;
2930
import com.google.protobuf.ByteString;
30-
import io.grpc.stub.StreamObserver;
3131
import java.io.IOException;
3232
import java.util.List;
3333
import javax.annotation.Nullable;
@@ -99,7 +99,7 @@ public ApiFuture<List<Result>> readRowsAsync(Query request) {
9999
}
100100

101101
@Override
102-
public void readRowsAsync(Query request, StreamObserver<Result> observer) {
102+
public void readRowsAsync(Query request, ResponseObserver<Result> observer) {
103103
delegate.readRowsAsync(request, observer);
104104
}
105105

bigtable-client-core-parent/bigtable-hbase/src/test/java/com/google/cloud/bigtable/hbase/wrappers/veneer/TestDataClientVeneerApi.java

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,6 @@
5353
import com.google.common.collect.Lists;
5454
import com.google.protobuf.ByteString;
5555
import io.grpc.Status.Code;
56-
import io.grpc.stub.StreamObserver;
5756
import java.io.IOException;
5857
import java.util.Iterator;
5958
import java.util.List;
@@ -470,11 +469,14 @@ public void testReadRowsAsync() throws Exception {
470469
public void testReadRowsAsyncWithStreamOb() {
471470
final Exception readException = new Exception();
472471
Query request = Query.create(TABLE_ID).rowKey(ROW_KEY);
473-
StreamObserver<Result> resultStreamOb =
474-
new StreamObserver<Result>() {
472+
ResponseObserver<Result> resultStreamOb =
473+
new ResponseObserver<Result>() {
475474
@Override
476-
public void onNext(Result result) {
477-
assertResult(EXPECTED_RESULT, result);
475+
public void onStart(StreamController controller) {}
476+
477+
@Override
478+
public void onResponse(Result response) {
479+
assertResult(EXPECTED_RESULT, response);
478480
}
479481

480482
@Override
@@ -483,7 +485,7 @@ public void onError(Throwable throwable) {
483485
}
484486

485487
@Override
486-
public void onCompleted() {}
488+
public void onComplete() {}
487489
};
488490
when(mockDataClient.readRowsCallable(Mockito.<RowResultAdapter>any()))
489491
.thenReturn(mockStreamingCallable);

bigtable-dataflow-parent/bigtable-beam-import/pom.xml

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -47,10 +47,6 @@ limitations under the License.
4747
<groupId>org.apache.beam</groupId>
4848
<artifactId>beam-sdks-java-core</artifactId>
4949
</dependency>
50-
<dependency>
51-
<groupId>org.apache.beam</groupId>
52-
<artifactId>beam-sdks-java-io-google-cloud-platform</artifactId>
53-
</dependency>
5450
<dependency>
5551
<groupId>org.apache.beam</groupId>
5652
<artifactId>beam-sdks-java-io-hadoop-format</artifactId>

bigtable-dataflow-parent/bigtable-beam-import/src/test/java/com/google/cloud/bigtable/beam/test_env/EnvSetup.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,13 +20,13 @@
2020
import java.io.UncheckedIOException;
2121
import java.util.List;
2222
import java.util.stream.Collectors;
23-
import org.apache.beam.runners.core.construction.Environments;
24-
import org.apache.beam.runners.core.construction.resources.PipelineResources;
2523
import org.apache.beam.runners.dataflow.options.DataflowPipelineOptions;
2624
import org.apache.beam.runners.dataflow.util.GcsStager;
2725
import org.apache.beam.runners.dataflow.util.PackageUtil;
2826
import org.apache.beam.sdk.io.FileSystems;
2927
import org.apache.beam.sdk.options.PipelineOptionsFactory;
28+
import org.apache.beam.sdk.util.construction.Environments;
29+
import org.apache.beam.sdk.util.construction.resources.PipelineResources;
3030
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.hash.HashCode;
3131
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.hash.Hashing;
3232
import org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.io.Files;

bigtable-dataflow-parent/bigtable-hbase-beam/pom.xml

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,9 +33,18 @@ limitations under the License.
3333
<dependency>
3434
<groupId>com.google.errorprone</groupId>
3535
<artifactId>error_prone_annotations</artifactId>
36-
<version>2.14.0</version>
36+
<version>2.20.0</version>
3737
</dependency>
3838

39+
<!-- fix the version conflict where beam is using 1.8.2 and google-cloud-shared-config
40+
is using a newer version -->
41+
<dependency>
42+
<groupId>com.google.auto.value</groupId>
43+
<artifactId>auto-value-annotations</artifactId>
44+
<version>1.8.2</version>
45+
</dependency>
46+
47+
3948
<!-- fix conflict between hbase slf & beam -->
4049
<dependency>
4150
<groupId>org.slf4j</groupId>

bigtable-hbase-1.x-parent/bigtable-hbase-1.x-hadoop/pom.xml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -186,6 +186,7 @@ limitations under the License.
186186
<classEntry>org/apache/hadoop/hbase/client/AbstractBigtableConnection</classEntry>
187187
<classEntry>org/apache/hadoop/hbase/client/CommonConnection</classEntry>
188188
<classEntry>org/apache/hadoop/hbase/client/BigtableAsyncRegistry</classEntry>
189+
<classEntry>META-INF/versions/9/com/google/cloud/bigtable/repackaged</classEntry>
189190
</allowedJarClassEntries>
190191
</configuration>
191192
</execution>

0 commit comments

Comments
 (0)