Skip to content

Commit 51ca480

Browse files
committed
draft implementation of tests
1 parent 1fcd888 commit 51ca480

File tree

3 files changed

+86
-11
lines changed

3 files changed

+86
-11
lines changed

performance/src/test/com/clickhouse/benchmark/BenchmarkRunner.java

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -30,20 +30,20 @@ public static void main(String[] args) throws Exception {
3030
Map<String, String> argMap = parseArguments(args);
3131

3232
Options opt = new OptionsBuilder()
33-
.include(QueryClient.class.getSimpleName())
34-
.include(InsertClient.class.getSimpleName())
35-
// .include(Components.class.getSimpleName())
33+
// .include(QueryClient.class.getSimpleName())
34+
// .include(InsertClient.class.getSimpleName())
35+
.include(Components.class.getSimpleName())
3636
.forks(1) // must be a fork. No fork only for debugging
3737
.mode(Mode.SampleTime)
3838
.timeUnit(TimeUnit.MILLISECONDS)
3939
.threads(1)
4040
.addProfiler(GCProfiler.class)
4141
.addProfiler(MemPoolProfiler.class)
42-
.warmupIterations(3)
42+
.warmupIterations(0)
4343
.warmupTime(TimeValue.seconds(10))
44-
.measurementIterations(10)
44+
.measurementIterations(1)
4545
.jvmArgs("-Xms8g", "-Xmx8g")
46-
.measurementTime(TimeValue.seconds(isCloud() ? 30 : 10))
46+
.measurementTime(TimeValue.seconds(isCloud() ? 30 : 120))
4747
.resultFormat(ResultFormatType.JSON)
4848
// .output(String.format("jmh-results-%s-%s.out", isCloud() ? "cloud" : "local", System.currentTimeMillis()))
4949
.result(String.format("jmh-results-%s-%s.json", isCloud() ? "cloud" : "local", System.currentTimeMillis()))

performance/src/test/com/clickhouse/benchmark/clients/BenchmarkBase.java

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@
3030

3131
import java.io.ByteArrayOutputStream;
3232
import java.io.InputStream;
33+
import java.nio.ByteBuffer;
3334
import java.util.ArrayList;
3435
import java.util.Collections;
3536
import java.math.BigInteger;
@@ -72,6 +73,7 @@ public void tearDownIteration() {
7273

7374
@State(Scope.Benchmark)
7475
public static class DataState {
76+
7577
@Param({"file://dataset_500k.csv"})
7678
String datasetSourceName;
7779
@Param({"300000", "220000", "100000", "10000"})
@@ -84,6 +86,8 @@ public static class DataState {
8486

8587
DataSet dataSet;
8688

89+
ByteBuffer datasetAsRowBinaryWithNamesAndTypes;
90+
8791
public void setDataSet(DataSet dataSet) {
8892
this.dataSet = dataSet;
8993
}

performance/src/test/com/clickhouse/benchmark/clients/Components.java

Lines changed: 76 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,31 +1,62 @@
11
package com.clickhouse.benchmark.clients;
22

33
import com.clickhouse.benchmark.data.DataSet;
4+
import com.clickhouse.client.ClickHouseConfig;
5+
import com.clickhouse.client.api.Client;
46
import com.clickhouse.client.api.data_formats.RowBinaryFormatWriter;
7+
import com.clickhouse.client.api.data_formats.RowBinaryWithNamesAndTypesFormatReader;
8+
import com.clickhouse.client.api.data_formats.internal.BinaryStreamReader;
59
import com.clickhouse.client.api.insert.InsertResponse;
610
import com.clickhouse.client.api.internal.ClickHouseLZ4OutputStream;
11+
import com.clickhouse.client.api.query.QueryResponse;
12+
import com.clickhouse.client.api.query.QuerySettings;
13+
import com.clickhouse.client.config.ClickHouseClientOption;
714
import com.clickhouse.data.ClickHouseColumn;
815
import com.clickhouse.data.ClickHouseDataProcessor;
16+
import com.clickhouse.data.ClickHouseDeserializer;
917
import com.clickhouse.data.ClickHouseFormat;
18+
import com.clickhouse.data.ClickHouseInputStream;
1019
import com.clickhouse.data.ClickHouseOutputStream;
1120
import com.clickhouse.data.ClickHousePassThruStream;
1221
import com.clickhouse.data.ClickHouseRecord;
1322
import com.clickhouse.data.ClickHouseSerializer;
23+
import com.clickhouse.data.format.ClickHouseRowBinaryProcessor;
1424
import com.clickhouse.data.stream.Lz4OutputStream;
1525
import net.jpountz.lz4.LZ4Factory;
1626
import org.openjdk.jmh.annotations.Benchmark;
27+
import org.openjdk.jmh.annotations.Level;
28+
import org.openjdk.jmh.annotations.Setup;
29+
import org.openjdk.jmh.infra.Blackhole;
1730
import org.slf4j.Logger;
1831
import org.slf4j.LoggerFactory;
1932

33+
import java.io.ByteArrayInputStream;
2034
import java.io.ByteArrayOutputStream;
35+
import java.io.InputStream;
2136
import java.io.OutputStream;
37+
import java.nio.ByteBuffer;
38+
import java.util.Collections;
2239
import java.util.List;
2340
import java.util.Map;
2441

2542
public class Components extends BenchmarkBase {
2643
private static final Logger LOGGER = LoggerFactory.getLogger(Components.class);
2744

28-
@Benchmark
45+
@Setup(Level.Iteration)
46+
public void setUpIteration(DataState dataState) {
47+
super.setUpIteration();
48+
49+
try (Client c = getClientV2(); QueryResponse r = c.query("SELECT * FROM " + dataState.tableNameFilled, new QuerySettings()
50+
.setFormat(ClickHouseFormat.RowBinaryWithNamesAndTypes)).get()){
51+
dataState.datasetAsRowBinaryWithNamesAndTypes = ByteBuffer.wrap(r.getInputStream().readAllBytes());
52+
LOGGER.info("Loaded {} from dataset", dataState.datasetAsRowBinaryWithNamesAndTypes.capacity());
53+
} catch (Exception e ) {
54+
LOGGER.error("Failed to init data for components benchmark", e);
55+
}
56+
}
57+
58+
59+
// @Benchmark
2960
public void CompressingOutputStreamV1(DataState dataState) {
3061
DataSet dataSet = dataState.dataSet;
3162
try (ByteArrayOutputStream baos = new ByteArrayOutputStream(); ClickHouseOutputStream out =
@@ -40,12 +71,12 @@ public void CompressingOutputStreamV1(DataState dataState) {
4071

4172
private static final LZ4Factory factory = LZ4Factory.fastestInstance();
4273

43-
@Benchmark
74+
// @Benchmark
4475
public void CompressingOutputStreamV2(DataState dataState) {
4576
DataSet dataSet = dataState.dataSet;
4677
try (ByteArrayOutputStream baos = new ByteArrayOutputStream();
4778
ClickHouseLZ4OutputStream out = new ClickHouseLZ4OutputStream(baos,
48-
factory.fastCompressor(), 8196)) {
79+
factory.fastCompressor(), 8196)) {
4980
for (byte[] bytes : dataSet.getBytesList(dataSet.getFormat())) {
5081
out.write(bytes);
5182
}
@@ -83,7 +114,8 @@ public void close() {
83114
}
84115
};
85116
}
86-
@Benchmark
117+
118+
// @Benchmark
87119
public void SerializerOutputStreamV1(DataState dataState) {
88120
OutputStream empty = createEmptyOutputStream();
89121
try {
@@ -101,7 +133,7 @@ public void SerializerOutputStreamV1(DataState dataState) {
101133
}
102134
}
103135

104-
@Benchmark
136+
// @Benchmark
105137
public void SerializerOutputStreamV2(DataState dataState) {
106138
OutputStream empty = createEmptyOutputStream();
107139
try {
@@ -119,4 +151,43 @@ public void SerializerOutputStreamV2(DataState dataState) {
119151
LOGGER.error("Error: ", e);
120152
}
121153
}
154+
155+
@Benchmark
156+
public void DeserializerOutputStreamV1(DataState dataState, Blackhole blackhole) {
157+
InputStream input = new ByteArrayInputStream(dataState.datasetAsRowBinaryWithNamesAndTypes.array());
158+
try {
159+
ClickHouseConfig config = new ClickHouseConfig(Collections.singletonMap(ClickHouseClientOption.FORMAT, ClickHouseFormat.RowBinaryWithNamesAndTypes));
160+
ClickHouseDataProcessor p = new ClickHouseRowBinaryProcessor(config,
161+
ClickHouseInputStream.of(input), null, null, Collections.emptyMap());
162+
List<ClickHouseColumn> columns = p.getColumns();
163+
for (ClickHouseRecord record : p.records()) {
164+
for (int i = 0; i < columns.size(); i++) {
165+
blackhole.consume(record.getValue(i).asObject());
166+
}
167+
}
168+
} catch (Exception e) {
169+
LOGGER.error("Error: ", e);
170+
}
171+
}
172+
173+
@Benchmark
174+
public void DeserializerOutputStreamV2(DataState dataState, Blackhole blackhole) {
175+
InputStream input = new ByteArrayInputStream(dataState.datasetAsRowBinaryWithNamesAndTypes.array());
176+
try {
177+
RowBinaryWithNamesAndTypesFormatReader r = new RowBinaryWithNamesAndTypesFormatReader(input,
178+
new QuerySettings()
179+
.setUseTimeZone("UTC")
180+
.setFormat(ClickHouseFormat.RowBinaryWithNamesAndTypes), new BinaryStreamReader.DefaultByteBufferAllocator());
181+
182+
Map<String, Object> row;
183+
while ((row = r.next()) != null) {
184+
for (String column : row.keySet()) {
185+
blackhole.consume(row.get(column));
186+
}
187+
}
188+
189+
} catch (Exception e) {
190+
LOGGER.error("Error: ", e);
191+
}
192+
}
122193
}

0 commit comments

Comments
 (0)