Skip to content

Commit be6cb77

Browse files
author
elasticsearchmachine
committed
[CI] Auto commit changes from spotless
1 parent 64f9cda commit be6cb77

File tree

2 files changed

+21
-20
lines changed

2 files changed

+21
-20
lines changed

benchmarks/src/main/java/org/elasticsearch/benchmark/common/compress/FSSTCompressBenchmark.java

Lines changed: 17 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,7 @@ public static class CompressionMetrics {
4848
}
4949

5050
private static final int MB_8 = 8 * 1024 * 1024;
51+
5152
private byte[] concatenateTo8mb(byte[] contentBytes) {
5253
byte[] bytes = new byte[MB_8 + 8];
5354
int i = 0;
@@ -66,7 +67,7 @@ public void setup() throws IOException {
6667

6768
byte[] contentBytes = FSST.toBytes(content);
6869
input = concatenateTo8mb(contentBytes);
69-
offsets = new int[]{0, MB_8};
70+
offsets = new int[] { 0, MB_8 };
7071
outBuf = new byte[MB_8];
7172
outOffsets = new int[2];
7273
}
@@ -100,19 +101,19 @@ public void compressLZ4Fast(Blackhole bh, CompressionMetrics metrics) throws IOE
100101
metrics.compressionRatio = compressedSize / (double) inputSize;
101102
}
102103

103-
// @Benchmark
104-
// public void compressLZ4High(Blackhole bh, CompressionMetrics metrics) throws IOException {
105-
// int inputSize = offsets[1];
106-
//
107-
// var dataInput = new ByteBuffersDataInput(List.of(ByteBuffer.wrap(input)));
108-
// var dataOutput = new ByteArrayDataOutput(outBuf);
109-
//
110-
// Compressor compressor = CompressionMode.HIGH_COMPRESSION.newCompressor();
111-
// compressor.compress(dataInput, dataOutput);
112-
//
113-
// long compressedSize = dataOutput.getPosition();
114-
// bh.consume(dataOutput);
115-
//
116-
// metrics.compressionRatio = compressedSize / (double) inputSize;
117-
// }
104+
// @Benchmark
105+
// public void compressLZ4High(Blackhole bh, CompressionMetrics metrics) throws IOException {
106+
// int inputSize = offsets[1];
107+
//
108+
// var dataInput = new ByteBuffersDataInput(List.of(ByteBuffer.wrap(input)));
109+
// var dataOutput = new ByteArrayDataOutput(outBuf);
110+
//
111+
// Compressor compressor = CompressionMode.HIGH_COMPRESSION.newCompressor();
112+
// compressor.compress(dataInput, dataOutput);
113+
//
114+
// long compressedSize = dataOutput.getPosition();
115+
// bh.consume(dataOutput);
116+
//
117+
// metrics.compressionRatio = compressedSize / (double) inputSize;
118+
// }
118119
}

benchmarks/src/main/java/org/elasticsearch/benchmark/common/compress/FSSTDecompressBenchmark.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,6 @@
2525
import java.nio.charset.StandardCharsets;
2626
import java.nio.file.Files;
2727
import java.nio.file.Path;
28-
import java.util.Arrays;
2928
import java.util.List;
3029
import java.util.concurrent.TimeUnit;
3130

@@ -37,8 +36,8 @@
3736
@State(Scope.Benchmark)
3837
public class FSSTDecompressBenchmark {
3938

40-
// @Param({ "fsst", "lz4_high", "lz4_fast" })
41-
@Param({"fsst", "lz4_fast"})
39+
// @Param({ "fsst", "lz4_high", "lz4_fast" })
40+
@Param({ "fsst", "lz4_fast" })
4241
public String compressionType;
4342

4443
@Param("")
@@ -61,6 +60,7 @@ public class FSSTDecompressBenchmark {
6160
private FSST.SymbolTable symbolTable;
6261

6362
private static final int MB_8 = 8 * 1024 * 1024;
63+
6464
private byte[] concatenateTo8mb(byte[] contentBytes) {
6565
byte[] bytes = new byte[MB_8 + 8];
6666
int i = 0;
@@ -79,7 +79,7 @@ public void setup() throws IOException {
7979
byte[] contentBytes = FSST.toBytes(content);
8080
originalSize = MB_8;
8181
input = concatenateTo8mb(contentBytes);
82-
offsets = new int[]{0, originalSize};
82+
offsets = new int[] { 0, originalSize };
8383

8484
outBuf = new byte[input.length];
8585
outOffsets = new int[2];

0 commit comments

Comments
 (0)