1+ package com .clickhouse .benchmark .clients ;
2+
3+ import com .clickhouse .benchmark .data .SyntheticDataSet ;
4+ import com .clickhouse .client .api .Client ;
5+ import com .clickhouse .client .api .data_formats .internal .BinaryStreamReader ;
6+ import com .clickhouse .client .api .data_formats .internal .SerializerUtils ;
7+ import com .clickhouse .client .api .query .QueryResponse ;
8+ import com .clickhouse .client .api .query .QuerySettings ;
9+ import com .clickhouse .data .ClickHouseColumn ;
10+ import com .clickhouse .data .ClickHouseFormat ;
11+ import com .clickhouse .data .ClickHouseInputStream ;
12+ import com .clickhouse .data .ClickHouseOutputStream ;
13+ import com .clickhouse .data .format .BinaryDataProcessor ;
14+ import com .clickhouse .data .format .BinaryStreamUtils ;
15+ import com .clickhouse .data .value .ClickHouseDateTimeValue ;
16+ import org .openjdk .jmh .annotations .Benchmark ;
17+ import org .openjdk .jmh .annotations .Level ;
18+ import org .openjdk .jmh .annotations .Setup ;
19+ import org .openjdk .jmh .infra .Blackhole ;
20+ import org .slf4j .Logger ;
21+ import org .slf4j .LoggerFactory ;
22+
23+ import java .io .EOFException ;
24+ import java .io .IOException ;
25+ import java .io .OutputStream ;
26+ import java .nio .ByteBuffer ;
27+ import java .time .LocalDateTime ;
28+ import java .util .TimeZone ;
29+
30+ public class DataTypes extends BenchmarkBase {
31+
32+ private static final Logger LOGGER = LoggerFactory .getLogger (DataTypes .class );
33+
34+ @ Setup (Level .Iteration )
35+ public void setUpIteration (DataState dataState ) {
36+ super .setUpIteration ();
37+
38+ try (Client c = getClientV2 (); QueryResponse r = c .query ("SELECT * FROM " + dataState .tableNameFilled , new QuerySettings ()
39+ .setFormat (ClickHouseFormat .RowBinaryWithNamesAndTypes )).get ()) {
40+ dataState .datasetAsRowBinaryWithNamesAndTypes = ByteBuffer .wrap (r .getInputStream ().readAllBytes ());
41+ LOGGER .info ("Loaded {} from dataset" , dataState .datasetAsRowBinaryWithNamesAndTypes .capacity ());
42+ } catch (Exception e ) {
43+ LOGGER .error ("Failed to init data for components benchmark" , e );
44+ }
45+
46+ if (dataState .syntheticDataSet != null ) {
47+ dataState .syntheticDataSet = new SyntheticDataSet (dataState .limit );
48+ }
49+ }
50+
51+ @ Benchmark
52+ public void readDateTimeV1 (DataState dataState , Blackhole blackhole ) {
53+ ClickHouseInputStream input = ClickHouseInputStream .of (dataState .syntheticDataSet .getDateTimeValuesRowBinaryStream ());
54+ BinaryDataProcessor .DateTime64SerDe serDe = new BinaryDataProcessor .DateTime64SerDe (3 , TimeZone .getTimeZone ("UTC" ));
55+
56+ ClickHouseDateTimeValue valueHolder = ClickHouseDateTimeValue .ofNull (3 , TimeZone .getTimeZone ("UTC" ));
57+
58+ int valueCount = 0 ;
59+ while (valueCount <= dataState .limit ) {
60+ try {
61+ serDe .deserialize (valueHolder , input );
62+ blackhole .consume (valueHolder );
63+ valueCount ++;
64+ } catch (IOException ex ) {
65+ if (valueCount < dataState .limit ) {
66+ throw new RuntimeException ("Failed to read all values" , ex );
67+ }
68+ break ;
69+ }
70+ }
71+ }
72+
73+ @ Benchmark
74+ public void readDateTimeV2 (DataState dataState , Blackhole blackhole ) {
75+ ClickHouseInputStream input = ClickHouseInputStream .of (dataState .syntheticDataSet .getDateTimeValuesRowBinaryStream ());
76+
77+ byte [] buffer = new byte [8 ];
78+ TimeZone zoneId = TimeZone .getTimeZone ("UTC" );
79+
80+ int valueCount = 0 ;
81+ while (valueCount <= dataState .limit ) {
82+ try {
83+ blackhole .consume (BinaryStreamReader .readDateTime64 (input , buffer , 3 , zoneId ));
84+ valueCount ++;
85+ } catch (EOFException ex ) {
86+ if (valueCount < dataState .limit ) {
87+ throw new RuntimeException ("Failed to read all values" , ex );
88+ }
89+ break ;
90+ } catch (IOException ex ) {
91+ throw new RuntimeException ("Failed to read all values" , ex );
92+ }
93+ }
94+ }
95+
96+
97+ @ Benchmark
98+ public void DateTimeSerializerV1 (DataState dataState , Blackhole blackhole ) {
99+ OutputStream empty = new BlackholeOutputStream (blackhole );
100+ BinaryDataProcessor .DateTime64SerDe serDe =
101+ new BinaryDataProcessor .DateTime64SerDe (3 , TimeZone .getTimeZone ("UTC" ));
102+
103+ ClickHouseOutputStream chos = ClickHouseOutputStream .of (empty );
104+ TimeZone tz = TimeZone .getTimeZone ("UTC" );
105+
106+ for (LocalDateTime dateTime : dataState .syntheticDataSet .getDateTimeValues ()) {
107+ try {
108+ BinaryStreamUtils .writeDateTime64 (chos , dateTime , 3 , tz );
109+ // App should wrap a value with a value object if it wants to use a data processor
110+ // serDe.serialize(ClickHouseDateTimeValue.of(dateTime, 3, tz) , chos);
111+ } catch (Exception e ) {
112+ LOGGER .error ("Error: " , e );
113+ }
114+ }
115+ try {
116+ chos .flush ();
117+ } catch (Exception e ) {
118+ LOGGER .error ("Error: " , e );
119+ }
120+ }
121+
122+ @ Benchmark
123+ public void DateTimeSerializerV2 (DataState dataState , Blackhole blackhole ) {
124+ OutputStream empty = new BlackholeOutputStream (blackhole );
125+ ClickHouseColumn column = ClickHouseColumn .of ("a" , "DateTime64(3, 'UTC')" );
126+
127+ for (LocalDateTime dateTime : dataState .syntheticDataSet .getDateTimeValues ()) {
128+ try {
129+ SerializerUtils .serializeData (empty , dateTime , column );
130+ } catch (Exception e ) {
131+ LOGGER .error ("Error: " , e );
132+ }
133+ }
134+ }
135+
136+ private static class BlackholeOutputStream extends OutputStream {
137+
138+ private final Blackhole blackhole ;
139+ public long count = 0 ;
140+
141+ public BlackholeOutputStream (Blackhole blackhole ) {
142+ this .blackhole = blackhole ;
143+ }
144+
145+ @ Override
146+ public void write (int b ) {
147+ blackhole .consume (b );
148+ count ++;
149+ }
150+
151+ @ Override
152+ public void write (byte [] b ) {
153+ write (b , 0 , b .length );
154+ }
155+
156+ @ Override
157+ public void write (byte [] b , int off , int len ) {
158+ blackhole .consume (b );
159+ count += len ;
160+ }
161+
162+ @ Override
163+ public void flush () {
164+
165+ }
166+
167+ @ Override
168+ public void close () {
169+ }
170+ }
171+ }
0 commit comments