Skip to content

Commit 888b74d

Browse files
authored
deps: update shared config (#4492)
* deps: update shared config * update
1 parent 9a5b087 commit 888b74d

File tree

10 files changed

+14
-19
lines changed

10 files changed

+14
-19
lines changed

bigtable-client-core-parent/bigtable-hbase/src/main/java/com/google/cloud/bigtable/hbase/wrappers/BigtableHBaseSettings.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ public abstract class BigtableHBaseSettings {
4949
"bulk.mutation.close.timeout.milliseconds";
5050

5151
// Must be non-negative. Set to 0 to disable timeout.
52-
private final long bulkMutationCloseTimeoutMilliseconds;;
52+
private final long bulkMutationCloseTimeoutMilliseconds;
5353

5454
public static BigtableHBaseSettings create(Configuration configuration) throws IOException {
5555
return BigtableHBaseVeneerSettings.create(configuration);

bigtable-dataflow-parent/bigtable-beam-import/src/main/java/com/google/cloud/bigtable/beam/validation/BufferedHadoopHashTableSource.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,8 @@ class BufferedHadoopHashTableSource extends BoundedSource<KV<String, List<RangeH
4949

5050
private static final int DEFAULT_BATCH_SIZE = 50;
5151
private static final Coder<KV<String, List<RangeHash>>> CODER =
52-
KvCoder.of(StringUtf8Coder.of(), ListCoder.of(RangeHashCoder.of()));;
52+
KvCoder.of(StringUtf8Coder.of(), ListCoder.of(RangeHashCoder.of()));
53+
;
5354

5455
// Max number of RangeHashes to buffer.
5556
private final int maxBufferSize;

bigtable-dataflow-parent/bigtable-beam-import/src/test/java/com/google/cloud/bigtable/beam/hbasesnapshots/EndToEndIT.java

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -179,9 +179,7 @@ private static void uploadFixture(GcsUtil gcsUtil, String fixtureName, String de
179179
}
180180

181181
// Upload to GCS in parallel
182-
filesToUpload
183-
.entrySet()
184-
.parallelStream()
182+
filesToUpload.entrySet().parallelStream()
185183
.forEach(
186184
e -> {
187185
GcsPath path = GcsPath.fromUri(destPath + e.getKey());

bigtable-dataflow-parent/bigtable-beam-import/src/test/java/com/google/cloud/bigtable/beam/sequencefiles/EndToEndIT.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ public class EndToEndIT {
7272

7373
private Connection connection;
7474

75-
private GcsUtil gcsUtil;;
75+
private GcsUtil gcsUtil;
7676

7777
@Before
7878
public void setup() throws Exception {

bigtable-dataflow-parent/bigtable-hbase-beam/src/main/java/com/google/cloud/bigtable/batch/common/CloudBigtableServiceImpl.java

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -46,11 +46,9 @@ public List<KeyOffset> getSampleRowKeys(CloudBigtableTableConfiguration config)
4646
builder
4747
.stubSettings()
4848
.setTransportChannelProvider(
49-
oldTransportProvider
50-
.toBuilder()
49+
oldTransportProvider.toBuilder()
5150
.setChannelPoolSettings(
52-
ChannelPoolSettings.staticallySized(1)
53-
.toBuilder()
51+
ChannelPoolSettings.staticallySized(1).toBuilder()
5452
.setPreemptiveRefreshEnabled(false)
5553
.build())
5654
.build());

bigtable-dataflow-parent/bigtable-hbase-beam/src/main/java/com/google/cloud/bigtable/beam/CloudBigtableIO.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -762,8 +762,7 @@ private void resetScanner() throws IOException {
762762
// start key to be inclusive and end key to be exclusive.
763763
byte[] newStartKey = Arrays.copyOf(rowKey, rowKey.length + 1);
764764
scan =
765-
scanConfiguration
766-
.toBuilder()
765+
scanConfiguration.toBuilder()
767766
.withKeys(newStartKey, scanConfiguration.getStopRow())
768767
.build()
769768
.getScanValueProvider()

bigtable-dataflow-parent/bigtable-hbase-beam/src/test/java/com/google/cloud/bigtable/beam/CloudBigtableScanConfigurationTest.java

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -109,8 +109,7 @@ public void testRegularAndRuntimeParametersAreEqualWithScan() {
109109
public void testRegularAndRuntimeParametersAreEqualWithRequest() {
110110
ReadRowsRequest request = ReadRowsRequest.newBuilder().setRowsLimit(10).build();
111111
CloudBigtableScanConfiguration withRegularParameters =
112-
config
113-
.toBuilder()
112+
config.toBuilder()
114113
.withRequest(request)
115114
.withKeys(START_ROW, STOP_ROW)
116115
.withConfiguration("somekey", "somevalue")
@@ -130,8 +129,7 @@ public void testRegularAndRuntimeParametersAreEqualWithRequest() {
130129
ReadRowsRequest updatedRequest = withRegularParameters.getRequest();
131130
withRegularParameters = withRegularParameters.toBuilder().withRequest(updatedRequest).build();
132131
withRuntimeParameters =
133-
withRuntimeParameters
134-
.toBuilder()
132+
withRuntimeParameters.toBuilder()
135133
.withRequest(StaticValueProvider.of(updatedRequest))
136134
.build();
137135
Assert.assertEquals(withRegularParameters, withRuntimeParameters);

bigtable-hbase-2.x-parent/bigtable-hbase-2.x/src/main/java/org/apache/hadoop/hbase/client/BigtableAsyncConnection.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -395,7 +395,8 @@ public List<HRegionInfo> getAllRegionInfos(TableName tableName) throws IOExcepti
395395
FutureUtil.unwrap(
396396
this.bigtableApi.getDataClient().sampleRowKeysAsync(tableName.getNameAsString()));
397397

398-
return getSampledRowKeysAdapter(tableName, serverName).adaptResponse(sampleRowKeyResponse)
398+
return getSampledRowKeysAdapter(tableName, serverName)
399+
.adaptResponse(sampleRowKeyResponse)
399400
.stream()
400401
.map(HRegionLocation::getRegionInfo)
401402
.collect(Collectors.toCollection(CopyOnWriteArrayList::new));

hbase-migration-tools/bigtable-hbase-replication/bigtable-hbase-2.x-replication/src/test/java/com/google/cloud/bigtable/hbase2_x/replication/HbaseToCloudBigtableReplicationEndpointTest.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,7 @@ public Entry filter(Entry entry) {
103103
}
104104
});
105105
}
106-
};
106+
}
107107

108108
private static final Logger LOG =
109109
LoggerFactory.getLogger(HbaseToCloudBigtableReplicationEndpointTest.class);

pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ limitations under the License.
3232
<parent>
3333
<groupId>com.google.cloud</groupId>
3434
<artifactId>google-cloud-shared-config</artifactId>
35-
<version>1.15.0</version>
35+
<version>1.15.4</version>
3636
</parent>
3737

3838
<licenses>

0 commit comments

Comments
 (0)