Skip to content

Commit a552ad6

Browse files
author
ramalingamt
committed
Merge remote-tracking branch 'upstream/master'
2 parents 53f5f22 + 1beba2d commit a552ad6

File tree

658 files changed

+178892
-26946
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

658 files changed

+178892
-26946
lines changed

.editorconfig

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,3 +11,6 @@ insert_final_newline = true
1111

1212
[*.rs]
1313
indent_size = 4
14+
15+
[rust/cubesql/cubesql/egraph-debug-template/**/*.{js,jsx,ts,tsx}]
16+
indent_size = 4

.github/ISSUE_TEMPLATE/sql_api_query_issue.md

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
name: SQL API Query Issue
33
about: Create a report to help us improve
44
title: ''
5-
labels: ''
5+
labels: 'api:sql'
66
assignees: ''
77

88
---
@@ -13,8 +13,11 @@ Search for `Failed SQL` log message.
1313
**Logical Plan**
1414
Search for `Can't rewrite plan` log message.
1515

16+
**Tool**
17+
Was thethe SQL query above generated by some BI tool or any other tool? Did you write it yourself?
18+
1619
**Version:**
17-
[e.g. 0.4.5]
20+
E.g., v1.1.0.
1821

1922
**Additional context**
2023
Add any other context about the problem here.

.github/actions/smoke.sh

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,4 +55,8 @@ echo "::endgroup::"
5555

5656
echo "::group::MongoBI"
5757
yarn lerna run --concurrency 1 --stream --no-prefix smoke:mongobi
58-
echo "::endgroup::"
58+
echo "::endgroup::"
59+
60+
echo "::group::RBAC"
61+
yarn lerna run --concurrency 1 --stream --no-prefix smoke:rbac
62+
echo "::endgroup::"

.github/workflows/drivers-tests.yml

Lines changed: 83 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -144,6 +144,8 @@ jobs:
144144
if: (needs['latest-tag-sha'].outputs.sha != github.sha)
145145
runs-on: ubuntu-20.04
146146
timeout-minutes: 30
147+
env:
148+
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
147149
steps:
148150
- name: Check out the repo
149151
uses: actions/checkout@v4
@@ -154,6 +156,7 @@ jobs:
154156
with:
155157
username: ${{ secrets.DOCKERHUB_USERNAME }}
156158
password: ${{ secrets.DOCKERHUB_TOKEN }}
159+
if: (env.DOCKERHUB_USERNAME != '')
157160
- name: Set up QEMU
158161
uses: docker/setup-qemu-action@v3
159162
- name: Download native build
@@ -167,27 +170,66 @@ jobs:
167170
context: .
168171
file: ./packages/cubejs-docker/testing-drivers.Dockerfile
169172
tags: cubejs/cube:testing-drivers
170-
push: true
173+
push: ${{ (env.DOCKERHUB_USERNAME != '') }}
174+
- name: Save Docker image as artifact
175+
run: |
176+
IMAGE_TAG=cubejs/cube:testing-drivers
177+
docker save -o image.tar $IMAGE_TAG
178+
gzip image.tar
179+
continue-on-error: true
180+
- name: Upload Docker image artifact
181+
uses: actions/upload-artifact@v4
182+
with:
183+
name: docker-image
184+
path: image.tar.gz
171185

172186
tests:
173187
runs-on: ubuntu-20.04
174188
timeout-minutes: 30
175189
needs: [latest-tag-sha, build]
176190
if: (needs['latest-tag-sha'].outputs.sha != github.sha)
191+
env:
192+
CLOUD_DATABASES: >
193+
athena-export-bucket-s3
194+
bigquery-export-bucket-gcs
195+
clickhouse-export-bucket-s3
196+
databricks-jdbc
197+
databricks-jdbc-export-bucket-s3
198+
databricks-jdbc-export-bucket-azure
199+
redshift
200+
redshift-export-bucket-s3
201+
snowflake
202+
snowflake-export-bucket-s3
203+
snowflake-export-bucket-azure
204+
snowflake-export-bucket-azure-via-storage-integration
205+
snowflake-export-bucket-gcs
206+
# As per docs:
207+
# Secrets cannot be directly referenced in if: conditionals. Instead, consider setting
208+
# secrets as job-level environment variables, then referencing the environment variables
209+
# to conditionally run steps in the job.
210+
DRIVERS_TESTS_ATHENA_CUBEJS_AWS_KEY: ${{ secrets.DRIVERS_TESTS_ATHENA_CUBEJS_AWS_KEY }}
177211
strategy:
178212
matrix:
179213
node:
180214
- 20.x
181215
database:
182-
- athena
183-
- bigquery
216+
- athena-export-bucket-s3
217+
- bigquery-export-bucket-gcs
184218
- clickhouse
219+
- clickhouse-export-bucket-s3
185220
- databricks-jdbc
186-
- databricks-jdbc-export-bucket
221+
- databricks-jdbc-export-bucket-s3
222+
- databricks-jdbc-export-bucket-azure
187223
- mssql
188224
- mysql
189225
- postgres
226+
- redshift
227+
- redshift-export-bucket-s3
190228
- snowflake
229+
- snowflake-export-bucket-s3
230+
- snowflake-export-bucket-azure
231+
- snowflake-export-bucket-azure-via-storage-integration
232+
- snowflake-export-bucket-gcs
191233
fail-fast: false
192234

193235
steps:
@@ -237,7 +279,22 @@ jobs:
237279
cd packages/cubejs-testing-drivers
238280
yarn tsc
239281
282+
- name: Download Docker image artifact
283+
uses: actions/download-artifact@v4
284+
with:
285+
name: docker-image
286+
287+
- name: Load Docker image into Docker Daemon
288+
run: |
289+
gunzip image.tar.gz
290+
docker load -i image.tar
291+
240292
- name: Run tests
293+
uses: nick-fields/retry@v3
294+
# It's enough to test for any one secret because they are set all at once or not set all
295+
if: |
296+
(contains(env.CLOUD_DATABASES, matrix.database) && env.DRIVERS_TESTS_ATHENA_CUBEJS_AWS_KEY != '') ||
297+
(!contains(env.CLOUD_DATABASES, matrix.database))
241298
env:
242299
# Athena
243300
DRIVERS_TESTS_ATHENA_CUBEJS_AWS_KEY: ${{ secrets.DRIVERS_TESTS_ATHENA_CUBEJS_AWS_KEY }}
@@ -246,16 +303,34 @@ jobs:
246303
# BigQuery
247304
DRIVERS_TESTS_CUBEJS_DB_BQ_CREDENTIALS: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_BQ_CREDENTIALS }}
248305

306+
#GCS
307+
DRIVERS_TESTS_CUBEJS_DB_EXPORT_GCS_CREDENTIALS: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_EXPORT_GCS_CREDENTIALS }}
308+
309+
# Azure
310+
DRIVERS_TESTS_CUBEJS_DB_EXPORT_BUCKET_AZURE_KEY: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_EXPORT_BUCKET_AZURE_KEY }}
311+
DRIVERS_TESTS_CUBEJS_DB_EXPORT_BUCKET_AZURE_SAS_TOKEN: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_EXPORT_BUCKET_AZURE_SAS_TOKEN }}
312+
249313
# Databricks
250314
DRIVERS_TESTS_CUBEJS_DB_DATABRICKS_URL: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_DATABRICKS_URL }}
251315
DRIVERS_TESTS_CUBEJS_DB_DATABRICKS_TOKEN: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_DATABRICKS_TOKEN }}
252316
DRIVERS_TESTS_CUBEJS_DB_EXPORT_BUCKET_AWS_KEY: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_EXPORT_BUCKET_AWS_KEY }}
253317
DRIVERS_TESTS_CUBEJS_DB_EXPORT_BUCKET_AWS_SECRET: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_EXPORT_BUCKET_AWS_SECRET }}
254318

319+
# Redshift
320+
DRIVERS_TESTS_CUBEJS_DB_REDSHIFT_HOST: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_REDSHIFT_HOST }}
321+
DRIVERS_TESTS_CUBEJS_DB_REDSHIFT_USER: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_REDSHIFT_USER }}
322+
DRIVERS_TESTS_CUBEJS_DB_REDSHIFT_PASS: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_REDSHIFT_PASS }}
323+
255324
# Snowflake
256325
DRIVERS_TESTS_CUBEJS_DB_SNOWFLAKE_USER: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_SNOWFLAKE_USER }}
257326
DRIVERS_TESTS_CUBEJS_DB_SNOWFLAKE_PASS: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_SNOWFLAKE_PASS }}
258-
run: |
259-
cd ./packages/cubejs-testing-drivers
260-
export DEBUG=testcontainers
261-
yarn ${{ matrix.database }}-full
327+
with:
328+
max_attempts: 3
329+
retry_on: error
330+
retry_wait_seconds: 15
331+
timeout_minutes: 20
332+
command: |
333+
cd ./packages/cubejs-testing-drivers
334+
export DEBUG=testcontainers
335+
yarn ${{ matrix.database }}-full
336+

.github/workflows/master.yml

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -137,12 +137,19 @@ jobs:
137137
with:
138138
github-token: ${{ secrets.GH_TRIGGER_TOKEN }}
139139
script: |
140+
const prUrl = context.payload.pull_request ? context.payload.pull_request.html_url : '';
141+
const commitUrl = `https://github.com/${context.repo.owner}/${context.repo.repo}/commit/${context.sha}`;
142+
140143
await github.rest.actions.createWorkflowDispatch({
141144
owner: 'cubedevinc',
142145
repo: 'sql-api-test-suite',
143146
workflow_id: 'test_and_run_test_suites.yml',
144147
ref: 'main',
145148
inputs: {
146-
'cube-image': 'cubejs/cube:dev'
149+
'cube-image': 'cubejs/cube:dev',
150+
'source-repo': context.repo.repo,
151+
'source-pr-url': prUrl,
152+
'source-commit-url': commitUrl,
153+
'initiator': '${{ github.actor }}'
147154
}
148155
})

.github/workflows/post-release.yml

Lines changed: 0 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -205,22 +205,3 @@ jobs:
205205
color: danger
206206
env:
207207
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
208-
209-
trigger-repo-sync:
210-
runs-on: ubuntu-20.04
211-
name: 'Sync runtime repo'
212-
timeout-minutes: 60
213-
steps:
214-
- name: Checkout
215-
uses: actions/checkout@v4
216-
- name: Trigger runtime
217-
uses: actions/github-script@v6
218-
with:
219-
github-token: ${{ secrets.GH_TRIGGER_TOKEN }}
220-
script: |
221-
await github.rest.actions.createWorkflowDispatch({
222-
owner: 'cubedevinc',
223-
repo: 'cube-runtime',
224-
workflow_id: 'sync.yml',
225-
ref: 'master'
226-
})

0 commit comments

Comments
 (0)