|
| 1 | +# Licensed to the Apache Software Foundation (ASF) under one |
| 2 | +# or more contributor license agreements. See the NOTICE file |
| 3 | +# distributed with this work for additional information |
| 4 | +# regarding copyright ownership. The ASF licenses this file |
| 5 | +# to you under the Apache License, Version 2.0 (the |
| 6 | +# "License"); you may not use this file except in compliance |
| 7 | +# with the License. You may obtain a copy of the License at |
| 8 | +# |
| 9 | +# http://www.apache.org/licenses/LICENSE-2.0 |
| 10 | +# |
| 11 | +# Unless required by applicable law or agreed to in writing, |
| 12 | +# software distributed under the License is distributed on an |
| 13 | +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY |
| 14 | +# KIND, either express or implied. See the License for the |
| 15 | +# specific language governing permissions and limitations |
| 16 | +# under the License. |
| 17 | + |
| 18 | +name: C# Databricks E2E Tests |
| 19 | + |
| 20 | +on: |
| 21 | + push: |
| 22 | + branches: [main] |
| 23 | + paths: |
| 24 | + - '.github/workflows/csharp_databricks_e2e.yml' |
| 25 | + - 'ci/scripts/csharp_databricks_e2e.sh' |
| 26 | + - 'csharp/src/Apache.Arrow.Adbc/**' |
| 27 | + - 'csharp/src/Client/**' |
| 28 | + - 'csharp/src/Drivers/Apache/Hive2/**' |
| 29 | + - 'csharp/src/Drivers/Apache/Spark/**' |
| 30 | + - 'csharp/src/Drivers/Databricks/**' |
| 31 | + - 'csharp/test/Drivers/Databricks/**' |
| 32 | + pull_request: |
| 33 | + paths: |
| 34 | + - '.github/workflows/csharp_databricks_e2e.yml' |
| 35 | + - 'ci/scripts/csharp_databricks_e2e.sh' |
| 36 | + - 'csharp/src/Apache.Arrow.Adbc/**' |
| 37 | + - 'csharp/src/Client/**' |
| 38 | + - 'csharp/src/Drivers/Apache/Hive2/**' |
| 39 | + - 'csharp/src/Drivers/Apache/Spark/**' |
| 40 | + - 'csharp/src/Drivers/Databricks/**' |
| 41 | + - 'csharp/test/Drivers/Databricks/**' |
| 42 | + |
| 43 | +concurrency: |
| 44 | + group: ${{ github.repository }}-${{ github.head_ref || github.sha }}-${{ github.workflow }} |
| 45 | + cancel-in-progress: true |
| 46 | + |
| 47 | +permissions: |
| 48 | + contents: read |
| 49 | + id-token: write # Required for OIDC token exchange |
| 50 | + |
| 51 | +defaults: |
| 52 | + run: |
| 53 | + # 'bash' will expand to -eo pipefail |
| 54 | + shell: bash |
| 55 | + |
| 56 | +jobs: |
| 57 | + csharp-databricks-e2e: |
| 58 | + name: "C# ${{ matrix.os }} ${{ matrix.dotnet }}" |
| 59 | + runs-on: ${{ matrix.os }} |
| 60 | + environment: databricks-e2e |
| 61 | + if: ${{ !contains(github.event.pull_request.title, 'WIP') }} |
| 62 | + timeout-minutes: 15 |
| 63 | + strategy: |
| 64 | + fail-fast: false |
| 65 | + matrix: |
| 66 | + dotnet: ['8.0.x'] |
| 67 | + os: [ubuntu-latest, windows-2022, macos-13, macos-latest] |
| 68 | + steps: |
| 69 | + - name: Install C# |
| 70 | + uses: actions/setup-dotnet@v4 |
| 71 | + with: |
| 72 | + dotnet-version: ${{ matrix.dotnet }} |
| 73 | + - name: Checkout ADBC |
| 74 | + uses: actions/checkout@v4 |
| 75 | + with: |
| 76 | + fetch-depth: 0 |
| 77 | + submodules: recursive |
| 78 | + - name: Build |
| 79 | + shell: bash |
| 80 | + run: ci/scripts/csharp_build.sh $(pwd) |
| 81 | + - name: Set up Databricks testing |
| 82 | + shell: bash |
| 83 | + env: |
| 84 | + DATABRICKS_WORKSPACE_URL: 'adb-6436897454825492.12.azuredatabricks.net' |
| 85 | + DATABRICKS_WAREHOUSE_PATH: '/sql/1.0/warehouses/2f03dd43e35e2aa0' |
| 86 | + DATABRICKS_SP_CLIENT_ID: '8335020c-9ba9-4821-92bb-0e8657759cda' |
| 87 | + run: | |
| 88 | + # Set up cross-platform variables |
| 89 | + if [[ "$RUNNER_OS" == "Windows" ]]; then |
| 90 | + DATABRICKS_DIR="$USERPROFILE/.databricks" |
| 91 | + DATABRICKS_CONFIG_FILE="$USERPROFILE/.databricks/connection.json" |
| 92 | + else |
| 93 | + DATABRICKS_DIR="$HOME/.databricks" |
| 94 | + DATABRICKS_CONFIG_FILE="$HOME/.databricks/connection.json" |
| 95 | + fi |
| 96 | + |
| 97 | + # Get GitHub OIDC token |
| 98 | + GITHUB_TOKEN=$(curl -H "Authorization: bearer $ACTIONS_ID_TOKEN_REQUEST_TOKEN" \ |
| 99 | + "$ACTIONS_ID_TOKEN_REQUEST_URL&audience=https://github.com/apache" | jq -r '.value') |
| 100 | +
|
| 101 | + if [ "$GITHUB_TOKEN" = "null" ] || [ -z "$GITHUB_TOKEN" ]; then |
| 102 | + echo "Failed to get GitHub OIDC token" |
| 103 | + exit 1 |
| 104 | + fi |
| 105 | + |
| 106 | + # Exchange OIDC token for Databricks OAuth token |
| 107 | + OAUTH_RESPONSE=$(curl -X POST https://$DATABRICKS_WORKSPACE_URL/oidc/v1/token \ |
| 108 | + -H "Content-Type: application/x-www-form-urlencoded" \ |
| 109 | + -d "grant_type=urn:ietf:params:oauth:grant-type:token-exchange" \ |
| 110 | + -d "client_id=$DATABRICKS_SP_CLIENT_ID" \ |
| 111 | + -d "subject_token=$GITHUB_TOKEN" \ |
| 112 | + -d "subject_token_type=urn:ietf:params:oauth:token-type:jwt" \ |
| 113 | + -d "scope=sql") |
| 114 | +
|
| 115 | + DATABRICKS_TOKEN=$(echo "$OAUTH_RESPONSE" | jq -r '.access_token') |
| 116 | +
|
| 117 | + if [ "$DATABRICKS_TOKEN" = "null" ] || [ -z "$DATABRICKS_TOKEN" ]; then |
| 118 | + echo "Failed to get Databricks access token. Response:" |
| 119 | + echo "$OAUTH_RESPONSE" |
| 120 | + exit 1 |
| 121 | + fi |
| 122 | + |
| 123 | + # Create Databricks configuration file |
| 124 | + mkdir -p ~/.databricks |
| 125 | + cat > ~/.databricks/connection.json << EOF |
| 126 | + { |
| 127 | + "hostName": "$DATABRICKS_WORKSPACE_URL", |
| 128 | + "port": "443", |
| 129 | + "path": "$DATABRICKS_WAREHOUSE_PATH", |
| 130 | + "auth_type": "oauth", |
| 131 | + "access_token": "$DATABRICKS_TOKEN" |
| 132 | + } |
| 133 | + EOF |
| 134 | + |
| 135 | + echo "DATABRICKS_TEST_CONFIG_FILE=$DATABRICKS_CONFIG_FILE" >> $GITHUB_ENV |
| 136 | + |
| 137 | + echo "Databricks configuration created successfully at $DATABRICKS_CONFIG_FILE" |
| 138 | + - name: Test Databricks |
| 139 | + shell: bash |
| 140 | + run: ci/scripts/csharp_test_databricks_e2e.sh $(pwd) |
0 commit comments