Skip to content

Commit de7ee0c

Browse files
authored
Merge branch 'master' into druid_case_sensitivity_fix
2 parents a89f8ad + aab9e8f commit de7ee0c

File tree

1,199 files changed

+284071
-47947
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,199 files changed

+284071
-47947
lines changed

.dockerignore

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,11 +11,13 @@
1111
!rust/cubestore/bin
1212
!rust/cubesql/package.json
1313

14+
# Ignoring builds for native from local machime to protect a problem with different architecture
15+
packages/cubejs-backend-native/index.node
16+
packages/cubejs-backend-native/native/
17+
# Caches
1418
packages/cubejs-backend-native/target
1519
packages/*/node_modules/
1620
packages/*/dist/
1721
packages/*/coverage/
22+
# Other
1823
packages/cubejs-server-core/playground/
19-
packages/cubejs-serverless
20-
packages/cubejs-serverless-aws
21-
packages/cubejs-serverless-google

.editorconfig

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,3 +11,6 @@ insert_final_newline = true
1111

1212
[*.rs]
1313
indent_size = 4
14+
15+
[rust/cubesql/cubesql/egraph-debug-template/**/*.{js,jsx,ts,tsx}]
16+
indent_size = 4

.github/ISSUE_TEMPLATE/sql_api_query_issue.md

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
name: SQL API Query Issue
33
about: Create a report to help us improve
44
title: ''
5-
labels: ''
5+
labels: 'api:sql'
66
assignees: ''
77

88
---
@@ -13,8 +13,11 @@ Search for `Failed SQL` log message.
1313
**Logical Plan**
1414
Search for `Can't rewrite plan` log message.
1515

16+
**Tool**
17+
Was thethe SQL query above generated by some BI tool or any other tool? Did you write it yourself?
18+
1619
**Version:**
17-
[e.g. 0.4.5]
20+
E.g., v1.1.0.
1821

1922
**Additional context**
2023
Add any other context about the problem here.
Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
#!/bin/bash
2+
set -eo pipefail
3+
4+
# Debug log for test containers
5+
export DEBUG=testcontainers
6+
7+
echo "::group::Dremio [cloud]"
8+
yarn lerna run --concurrency 1 --stream --no-prefix integration:dremio
9+
10+
echo "::endgroup::"
Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
#!/bin/bash
2+
set -eo pipefail
3+
4+
# Debug log for test containers
5+
export DEBUG=testcontainers
6+
7+
echo "::group::Firebolt [cloud]"
8+
yarn lerna run --concurrency 1 --stream --no-prefix integration:firebolt
9+
10+
echo "::endgroup::"
Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
#!/bin/bash
2+
set -eo pipefail
3+
4+
# Debug log for test containers
5+
export DEBUG=testcontainers
6+
7+
export TEST_VERTICA_VERSION=12.0.4-0
8+
9+
echo "::group::Vertica ${TEST_VERTICA_VERSION}"
10+
docker pull vertica/vertica-ce:${TEST_VERTICA_VERSION}
11+
yarn lerna run --concurrency 1 --stream --no-prefix integration:vertica
12+
echo "::endgroup::"

.github/actions/smoke.sh

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,4 +55,12 @@ echo "::endgroup::"
5555

5656
echo "::group::MongoBI"
5757
yarn lerna run --concurrency 1 --stream --no-prefix smoke:mongobi
58-
echo "::endgroup::"
58+
echo "::endgroup::"
59+
60+
echo "::group::Vertica"
61+
yarn lerna run --concurrency 1 --stream --no-prefix smoke:vertica
62+
echo "::endgroup::"
63+
64+
echo "::group::RBAC"
65+
yarn lerna run --concurrency 1 --stream --no-prefix smoke:rbac
66+
echo "::endgroup::"

.github/pull_request_template.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
**Check List**
2-
- [ ] Tests has been run in packages where changes made if available
2+
- [ ] Tests have been run in packages where changes made if available
33
- [ ] Linter has been run for changed code
44
- [ ] Tests for the changes have been added if not covered yet
55
- [ ] Docs have been added / updated if required

.github/workflows/cloud.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ jobs:
5757

5858
strategy:
5959
matrix:
60-
node-version: [ 18.x ]
60+
node-version: [ 20.x ]
6161
db: [ 'athena', 'bigquery', 'snowflake' ]
6262
fail-fast: false
6363

@@ -80,7 +80,7 @@ jobs:
8080
restore-keys: |
8181
${{ runner.os }}-yarn-
8282
- name: Set Yarn version
83-
run: yarn policies set-version v1.22.19
83+
run: yarn policies set-version v1.22.22
8484
- name: Yarn install
8585
run: CUBESTORE_SKIP_POST_INSTALL=true yarn install --frozen-lockfile
8686
- name: Build client

.github/workflows/drivers-tests.yml

Lines changed: 91 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ on:
2424
- 'packages/cubejs-mysql-driver/**'
2525
- 'packages/cubejs-postgres-driver/**'
2626
- 'packages/cubejs-snowflake-driver/**'
27+
- 'packages/cubejs-vertica-driver/**'
2728

2829
# To test SQL API Push down
2930
- 'packages/cubejs-backend-native/**'
@@ -49,6 +50,7 @@ on:
4950
- 'packages/cubejs-mysql-driver/**'
5051
- 'packages/cubejs-postgres-driver/**'
5152
- 'packages/cubejs-snowflake-driver/**'
53+
- 'packages/cubejs-vertica-driver/**'
5254

5355
# To test SQL API Push down
5456
- 'packages/cubejs-backend-native/**'
@@ -84,7 +86,7 @@ jobs:
8486
name: Build native Linux ${{ matrix.node-version }} ${{ matrix.target }} Python ${{ matrix.python-version }}
8587
strategy:
8688
matrix:
87-
node-version: [ 18 ]
89+
node-version: [ 20 ]
8890
python-version: [ "fallback" ]
8991
target: [ "x86_64-unknown-linux-gnu" ]
9092
fail-fast: false
@@ -109,7 +111,7 @@ jobs:
109111
- name: Install Yarn
110112
run: npm install -g yarn
111113
- name: Set Yarn version
112-
run: yarn policies set-version v1.22.19
114+
run: yarn policies set-version v1.22.22
113115
- name: Install cargo-cp-artifact
114116
run: npm install -g [email protected]
115117
- uses: Swatinem/rust-cache@v2
@@ -144,6 +146,8 @@ jobs:
144146
if: (needs['latest-tag-sha'].outputs.sha != github.sha)
145147
runs-on: ubuntu-20.04
146148
timeout-minutes: 30
149+
env:
150+
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
147151
steps:
148152
- name: Check out the repo
149153
uses: actions/checkout@v4
@@ -154,6 +158,7 @@ jobs:
154158
with:
155159
username: ${{ secrets.DOCKERHUB_USERNAME }}
156160
password: ${{ secrets.DOCKERHUB_TOKEN }}
161+
if: (env.DOCKERHUB_USERNAME != '')
157162
- name: Set up QEMU
158163
uses: docker/setup-qemu-action@v3
159164
- name: Download native build
@@ -167,40 +172,79 @@ jobs:
167172
context: .
168173
file: ./packages/cubejs-docker/testing-drivers.Dockerfile
169174
tags: cubejs/cube:testing-drivers
170-
push: true
175+
push: ${{ (env.DOCKERHUB_USERNAME != '') }}
176+
- name: Save Docker image as artifact
177+
run: |
178+
IMAGE_TAG=cubejs/cube:testing-drivers
179+
docker save -o image.tar $IMAGE_TAG
180+
gzip image.tar
181+
continue-on-error: true
182+
- name: Upload Docker image artifact
183+
uses: actions/upload-artifact@v4
184+
with:
185+
name: docker-image
186+
path: image.tar.gz
171187

172188
tests:
173189
runs-on: ubuntu-20.04
174190
timeout-minutes: 30
175191
needs: [latest-tag-sha, build]
176192
if: (needs['latest-tag-sha'].outputs.sha != github.sha)
193+
env:
194+
CLOUD_DATABASES: >
195+
athena-export-bucket-s3
196+
bigquery-export-bucket-gcs
197+
clickhouse-export-bucket-s3
198+
databricks-jdbc
199+
databricks-jdbc-export-bucket-s3
200+
databricks-jdbc-export-bucket-azure
201+
redshift
202+
redshift-export-bucket-s3
203+
snowflake
204+
snowflake-export-bucket-s3
205+
snowflake-export-bucket-azure
206+
snowflake-export-bucket-azure-via-storage-integration
207+
snowflake-export-bucket-gcs
208+
# As per docs:
209+
# Secrets cannot be directly referenced in if: conditionals. Instead, consider setting
210+
# secrets as job-level environment variables, then referencing the environment variables
211+
# to conditionally run steps in the job.
212+
DRIVERS_TESTS_ATHENA_CUBEJS_AWS_KEY: ${{ secrets.DRIVERS_TESTS_ATHENA_CUBEJS_AWS_KEY }}
177213
strategy:
178214
matrix:
179215
node:
180-
- 18.x
216+
- 20.x
181217
database:
182-
- athena
183-
- bigquery
218+
- athena-export-bucket-s3
219+
- bigquery-export-bucket-gcs
184220
- clickhouse
221+
- clickhouse-export-bucket-s3
185222
- databricks-jdbc
186-
- databricks-jdbc-export-bucket
223+
- databricks-jdbc-export-bucket-s3
224+
- databricks-jdbc-export-bucket-azure
187225
- mssql
188226
- mysql
189227
- postgres
228+
- redshift
229+
- redshift-export-bucket-s3
190230
- snowflake
231+
- snowflake-export-bucket-s3
232+
- snowflake-export-bucket-azure
233+
- snowflake-export-bucket-azure-via-storage-integration
234+
- snowflake-export-bucket-gcs
191235
fail-fast: false
192236

193237
steps:
194238
- name: Checkout
195239
uses: actions/checkout@v4
196240

197-
- name: Install Node.js 18.x
241+
- name: Install Node.js 20.x
198242
uses: actions/setup-node@v4
199243
with:
200-
node-version: 18.x
244+
node-version: 20.x
201245

202246
- name: Configure `yarn`
203-
run: yarn policies set-version v1.22.19
247+
run: yarn policies set-version v1.22.22
204248

205249
- name: Get yarn cache directory path
206250
id: yarn-cache-dir-path
@@ -237,7 +281,22 @@ jobs:
237281
cd packages/cubejs-testing-drivers
238282
yarn tsc
239283
284+
- name: Download Docker image artifact
285+
uses: actions/download-artifact@v4
286+
with:
287+
name: docker-image
288+
289+
- name: Load Docker image into Docker Daemon
290+
run: |
291+
gunzip image.tar.gz
292+
docker load -i image.tar
293+
240294
- name: Run tests
295+
uses: nick-fields/retry@v3
296+
# It's enough to test for any one secret because they are set all at once or not set all
297+
if: |
298+
(contains(env.CLOUD_DATABASES, matrix.database) && env.DRIVERS_TESTS_ATHENA_CUBEJS_AWS_KEY != '') ||
299+
(!contains(env.CLOUD_DATABASES, matrix.database))
241300
env:
242301
# Athena
243302
DRIVERS_TESTS_ATHENA_CUBEJS_AWS_KEY: ${{ secrets.DRIVERS_TESTS_ATHENA_CUBEJS_AWS_KEY }}
@@ -246,16 +305,34 @@ jobs:
246305
# BigQuery
247306
DRIVERS_TESTS_CUBEJS_DB_BQ_CREDENTIALS: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_BQ_CREDENTIALS }}
248307

308+
#GCS
309+
DRIVERS_TESTS_CUBEJS_DB_EXPORT_GCS_CREDENTIALS: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_EXPORT_GCS_CREDENTIALS }}
310+
311+
# Azure
312+
DRIVERS_TESTS_CUBEJS_DB_EXPORT_BUCKET_AZURE_KEY: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_EXPORT_BUCKET_AZURE_KEY }}
313+
DRIVERS_TESTS_CUBEJS_DB_EXPORT_BUCKET_AZURE_SAS_TOKEN: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_EXPORT_BUCKET_AZURE_SAS_TOKEN }}
314+
249315
# Databricks
250316
DRIVERS_TESTS_CUBEJS_DB_DATABRICKS_URL: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_DATABRICKS_URL }}
251317
DRIVERS_TESTS_CUBEJS_DB_DATABRICKS_TOKEN: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_DATABRICKS_TOKEN }}
252318
DRIVERS_TESTS_CUBEJS_DB_EXPORT_BUCKET_AWS_KEY: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_EXPORT_BUCKET_AWS_KEY }}
253319
DRIVERS_TESTS_CUBEJS_DB_EXPORT_BUCKET_AWS_SECRET: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_EXPORT_BUCKET_AWS_SECRET }}
254320

321+
# Redshift
322+
DRIVERS_TESTS_CUBEJS_DB_REDSHIFT_HOST: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_REDSHIFT_HOST }}
323+
DRIVERS_TESTS_CUBEJS_DB_REDSHIFT_USER: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_REDSHIFT_USER }}
324+
DRIVERS_TESTS_CUBEJS_DB_REDSHIFT_PASS: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_REDSHIFT_PASS }}
325+
255326
# Snowflake
256327
DRIVERS_TESTS_CUBEJS_DB_SNOWFLAKE_USER: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_SNOWFLAKE_USER }}
257328
DRIVERS_TESTS_CUBEJS_DB_SNOWFLAKE_PASS: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_SNOWFLAKE_PASS }}
258-
run: |
259-
cd ./packages/cubejs-testing-drivers
260-
export DEBUG=testcontainers
261-
yarn ${{ matrix.database }}-full
329+
with:
330+
max_attempts: 3
331+
retry_on: error
332+
retry_wait_seconds: 15
333+
timeout_minutes: 20
334+
command: |
335+
cd ./packages/cubejs-testing-drivers
336+
export DEBUG=testcontainers
337+
yarn ${{ matrix.database }}-full
338+

0 commit comments

Comments
 (0)