Skip to content

Commit 073ec4b

Browse files
committed
Merge remote-tracking branch 'origin/main' into refactor-shrink-bloom-index-meta-cache-size
2 parents bb6834a + 9e554b4 commit 073ec4b

File tree

154 files changed

+3514
-2365
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

154 files changed

+3514
-2365
lines changed

.github/actions/benchmark_cloud/action.yml

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -69,11 +69,9 @@ runs:
6969
case ${{ inputs.source }} in
7070
pr)
7171
BENCHMARK_SYSTEM="Databend(PR#${{ inputs.source_id }})"
72-
BENCHMARK_TYPE="PR"
7372
;;
7473
release)
7574
BENCHMARK_SYSTEM="Databend(Release@${{ inputs.source_id }})"
76-
BENCHMARK_TYPE="Release"
7775
;;
7876
*)
7977
echo "Unspported benchmark source: ${{ inputs.source }}"
@@ -94,7 +92,7 @@ runs:
9492
esac
9593
jq ".cluster_size = \"${BENCHMARK_CLUSTER_SIZE}\"" <result.json >result.json.tmp && mv result.json.tmp result.json
9694
jq ".machine = \"${BENCHMARK_CLUSTER_SIZE}×${{ inputs.size }}\"" <result.json >result.json.tmp && mv result.json.tmp result.json
97-
jq ".tags = [\"${BENCHMARK_TYPE}\", \"s3\"]" <result.json >result.json.tmp && mv result.json.tmp result.json
95+
jq ".tags = [\"s3\"]" <result.json >result.json.tmp && mv result.json.tmp result.json
9896
jq ".comment = \"commit:${{ inputs.sha }}\"" <result.json >result.json.tmp && mv result.json.tmp result.json
9997
mv result.json result-${{ inputs.dataset }}-cloud-${{ inputs.size }}.json
10098

.github/actions/benchmark_local/action.yml

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -41,11 +41,9 @@ runs:
4141
case ${{ inputs.source }} in
4242
pr)
4343
BENCHMARK_SYSTEM="Databend(PR#${{ inputs.source_id }})"
44-
BENCHMARK_TYPE="PR"
4544
;;
4645
release)
4746
BENCHMARK_SYSTEM="Databend(Release@${{ inputs.source_id }})"
48-
BENCHMARK_TYPE="Release"
4947
;;
5048
*)
5149
echo "Unspported benchmark source: ${{ inputs.source }}"
@@ -56,7 +54,7 @@ runs:
5654
jq ".machine = \"c5.4xlarge\"" <result.json >result.json.tmp && mv result.json.tmp result.json
5755
jq ".cluster_size = 1" <result.json >result.json.tmp && mv result.json.tmp result.json
5856
jq ".comment = \"commit:${{ inputs.sha }}\"" <result.json >result.json.tmp && mv result.json.tmp result.json
59-
jq ".tags = [\"${BENCHMARK_TYPE}\", \"gp3\"]" <result.json >result.json.tmp && mv result.json.tmp result.json
57+
jq ".tags = [\"gp3\"]" <result.json >result.json.tmp && mv result.json.tmp result.json
6058
jq ".extra.${{ inputs.source }} = \"${{ inputs.source_id }}\"" <result.json >result.json.tmp && mv result.json.tmp result.json
6159
mv result.json result-${{ inputs.dataset }}-local.json
6260

.github/workflows/trusted-benchmark.yml

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -373,6 +373,11 @@ jobs:
373373
with:
374374
name: benchmark-${{ matrix.dataset }}
375375
path: benchmark/clickbench/results/${{ matrix.dataset }}
376+
- name: Generate report
377+
working-directory: benchmark/clickbench
378+
run: |
379+
aws s3 sync ${{ env.BENCHMARK_S3_PREFIX }}/release/${{ matrix.dataset }}/$(date -u +%Y)/$(date -u +%m)/ ./results/${{ matrix.dataset }}/ --no-progress
380+
./update-results.sh ${{ matrix.dataset }}
376381
- name: Upload nightly results to repo.databend.rs
377382
working-directory: benchmark/clickbench/
378383
run: |
@@ -382,10 +387,6 @@ jobs:
382387
aws s3 cp $file "${RESULT_PEFIX}-$(basename $file)"
383388
aws s3 cp $file "${LATEST_PREFIX}-$(basename $file)"
384389
done
385-
- name: Generate report
386-
working-directory: benchmark/clickbench
387-
run: |
388-
./update-results.sh ${{ matrix.dataset }}
389390
- name: Upload PR clickbench report to repo.databend.rs
390391
working-directory: benchmark/clickbench
391392
run: |

Cargo.lock

Lines changed: 4 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

benchmark/clickbench/hits/index.html

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -350,8 +350,7 @@
350350
<span class="nowrap themes"><span id="toggle-dark">🌚</span><span id="toggle-light">🌞</span></span>
351351
<h1>ClickBench — a Benchmark For Analytical DBMS</h1>
352352
<a href="https://github.com/ClickHouse/ClickBench/">Add a System</a> | <a
353-
href="https://benchmark.clickhouse.com/hardware/">Hardware Benchmark</a> | <a
354-
href="https://benchmark.clickhouse.com/versions/">Versions Benchmark</a>
353+
href="https://repo.databend.rs/benchmark/clickbench/release/hits.html">Release Versions Benchmark</a>
355354
</div>
356355

357356
<table class="selectors-container stick-left">

benchmark/clickbench/tpch/index.html

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -329,8 +329,7 @@
329329
<span class="nowrap themes"><span id="toggle-dark">🌚</span><span id="toggle-light">🌞</span></span>
330330
<h1>ClickBench — a Benchmark For Analytical DBMS</h1>
331331
<a href="https://github.com/ClickHouse/ClickBench/">Add a System</a> | <a
332-
href="https://benchmark.clickhouse.com/hardware/">Hardware Benchmark</a> | <a
333-
href="https://benchmark.clickhouse.com/versions/">Versions Benchmark</a>
332+
href="https://repo.databend.rs/benchmark/clickbench/release/tpch.html">Release Versions Benchmark</a>
334333
</div>
335334

336335
<table class="selectors-container stick-left">

benchmark/clickbench/tpch/load.sql

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,40 +1,40 @@
11
COPY INTO customer
2-
FROM 's3://repo.databend.rs/datasets/tpch10/customer/' pattern = 'customer.*' file_format =(
3-
type = 'CSV' field_delimiter = '|' record_delimiter = '\n' skip_header = 0
2+
FROM 's3://repo.databend.rs/tpch100/customer/' pattern = 'customer.tbl.*' file_format =(
3+
type = 'CSV' field_delimiter = '|' record_delimiter = '\n' skip_header = 1
44
);
55
ANALYZE TABLE customer;
66
COPY INTO lineitem
7-
FROM 's3://repo.databend.rs/datasets/tpch10/lineitem/' pattern = 'lineitem.*' file_format =(
7+
FROM 's3://repo.databend.rs/tpch100/lineitem/' pattern = 'lineitem.tbl.*' file_format =(
88
type = 'CSV' field_delimiter = '|' record_delimiter = '\n' skip_header = 0
99
);
1010
ANALYZE TABLE lineitem;
1111
COPY INTO nation
12-
FROM 's3://repo.databend.rs/datasets/tpch10/nation/' pattern = 'nation.*' file_format =(
12+
FROM 's3://repo.databend.rs/tpch100/nation.tbl' file_format =(
1313
type = 'CSV' field_delimiter = '|' record_delimiter = '\n' skip_header = 0
1414
);
1515
ANALYZE TABLE nation;
1616
COPY INTO orders
17-
FROM 's3://repo.databend.rs/datasets/tpch10/orders/' pattern = 'orders.*' file_format =(
17+
FROM 's3://repo.databend.rs/tpch100/orders/' pattern = 'orders.tbl.*' file_format =(
1818
type = 'CSV' field_delimiter = '|' record_delimiter = '\n' skip_header = 0
1919
);
2020
ANALYZE TABLE orders;
2121
COPY INTO partsupp
22-
FROM 's3://repo.databend.rs/datasets/tpch10/partsupp/' pattern = 'partsupp.*' file_format =(
22+
FROM 's3://repo.databend.rs/tpch100/partsupp/' pattern = 'partsupp.tbl.*' file_format =(
2323
type = 'CSV' field_delimiter = '|' record_delimiter = '\n' skip_header = 0
2424
);
2525
ANALYZE TABLE partsupp;
2626
COPY INTO part
27-
FROM 's3://repo.databend.rs/datasets/tpch10/part/' pattern = 'part.*' file_format =(
27+
FROM 's3://repo.databend.rs/tpch100/part/' pattern = 'part.tbl.*' file_format =(
2828
type = 'CSV' field_delimiter = '|' record_delimiter = '\n' skip_header = 0
2929
);
3030
ANALYZE TABLE part;
3131
COPY INTO region
32-
FROM 's3://repo.databend.rs/datasets/tpch10/region/' pattern = 'region.*' file_format =(
32+
FROM 's3://repo.databend.rs/tpch100/region.tbl' file_format =(
3333
type = 'CSV' field_delimiter = '|' record_delimiter = '\n' skip_header = 0
3434
);
3535
ANALYZE TABLE region;
3636
COPY INTO supplier
37-
FROM 's3://repo.databend.rs/datasets/tpch10/supplier/' pattern = 'supplier.*' file_format =(
37+
FROM 's3://repo.databend.rs/tpch100/supplier/' pattern = 'supplier.tbl.*' file_format =(
3838
type = 'CSV' field_delimiter = '|' record_delimiter = '\n' skip_header = 0
3939
);
4040
ANALYZE TABLE supplier;

docs/doc/10-deploy/01-installing-databend.md

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,11 @@ Databend offers you these options for downloading the installation packages:
1313

1414
## Manual Download
1515

16-
The primary distribution packages for Databend are `.tar.gz` archives containing single executable files that you can download from the [Download](https://databend.rs/download) page and extract them anywhere on your system.
16+
The primary distribution packages for Databend are `.tar.gz` archives containing single executable files that you can download from the [Download](https://databend.rs/download) page and extract them anywhere on your system.
17+
18+
:::note
19+
**Linux Generic (ARM, 64-bit)** is suitable for Linux distributions that use musl as the standard C library; **Linux Generic (x86, 64-bit)** is suitable for Linux distributions that use GNU C with a minimum version 2.29 of GLIBC.
20+
:::
1721

1822
## APT Package Manager
1923

docs/doc/10-deploy/07-query/10-query-config.md

Lines changed: 3 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -104,12 +104,6 @@ You can find [sample configuration files](https://github.com/datafuselabs/databe
104104
* Default: `3307`
105105
* Env variable: `QUERY_MYSQL_HANDLER_PORT`
106106

107-
### clickhouse_handler_host
108-
109-
* The IP address to listen on for ClickHouse handler, e.g., `0.0.0.0`.
110-
* Default: `"127.0.0.1"`
111-
* Env variable: `QUERY_CLICKHOUSE_HANDLER_HOST`
112-
113107
### clickhouse_http_handler_host
114108

115109
* The IP address to listen on for ClickHouse HTTP handler, e.g., `0.0.0.0`.
@@ -317,9 +311,9 @@ flight_api_address = "0.0.0.0:9091"
317311
mysql_handler_host = "0.0.0.0"
318312
mysql_handler_port = 3307
319313

320-
# Query ClickHouse Handler.
321-
clickhouse_handler_host = "0.0.0.0"
322-
clickhouse_handler_port = 9001
314+
# Query ClickHouse HTTP Handler.
315+
clickhouse_http_handler_host = "0.0.0.0"
316+
clickhouse_http_handler_port = 9001
323317

324318
# Query HTTP Handler.
325319
http_handler_host = "0.0.0.0"

docs/doc/13-sql-reference/20-system-tables/system-configs.md

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -43,9 +43,6 @@ mysql> SELECT * FROM system.configs;
4343
| query | management_mode | false | |
4444
| query | jwt_key_file | | |
4545
| query | jwt_key_files | | |
46-
| query | async_insert_max_data_size | 10000 | |
47-
| query | async_insert_busy_timeout | 200 | |
48-
| query | async_insert_stale_timeout | 0 | |
4946
| query | users | | |
5047
| query | share_endpoint_address | | |
5148
| query | share_endpoint_auth_token_file | | |

0 commit comments

Comments
 (0)