|
| 1 | +name: Test Bats Unix (Adaptive Encoding) |
| 2 | + |
| 3 | +on: |
| 4 | + pull_request: |
| 5 | + branches: [ main ] |
| 6 | + paths: |
| 7 | + - 'go/**' |
| 8 | + - 'integration-tests/**' |
| 9 | + - '.github/workflows/ci-bats-unix-adaptive.yaml' |
| 10 | + |
| 11 | +concurrency: |
| 12 | + group: ci-bats-unix-adaptive-${{ github.event.pull_request.number || github.ref }} |
| 13 | + cancel-in-progress: true |
| 14 | + |
| 15 | +jobs: |
| 16 | + test: |
| 17 | + name: Bats tests (adaptive encoding) |
| 18 | + defaults: |
| 19 | + run: |
| 20 | + shell: bash |
| 21 | + runs-on: ubuntu-22.04 |
| 22 | + env: |
| 23 | + DOLT_USE_ADAPTIVE_ENCODING: "true" |
| 24 | + use_credentials: ${{ secrets.AWS_SECRET_ACCESS_KEY != '' && secrets.AWS_ACCESS_KEY_ID != '' }} |
| 25 | + steps: |
| 26 | + - name: Free disk space |
| 27 | + run: | |
| 28 | + NAME="DISK-CLEANUP" |
| 29 | + echo "[${NAME}] Starting background cleanup..." |
| 30 | + [ -d /usr/share/dotnet ] && sudo rm -rf /usr/share/dotnet & |
| 31 | + [ -d /usr/local/lib/android ] && sudo rm -rf /usr/local/lib/android & |
| 32 | + [ -d /opt/ghc ] && sudo rm -rf /opt/ghc & |
| 33 | + [ -d /usr/local/share/boost ] && sudo rm -rf /usr/local/share/boost & |
| 34 | +
|
| 35 | + - name: Conditionally Set ENV VARS for AWS tests |
| 36 | + run: | |
| 37 | + if [[ $use_credentials == true ]]; then |
| 38 | + echo "AWS_SDK_LOAD_CONFIG=1" >> $GITHUB_ENV |
| 39 | + echo "AWS_REGION=us-west-2" >> $GITHUB_ENV |
| 40 | + echo "DOLT_BATS_AWS_TABLE=dolt-ci-bats-manifests-us-west-2" >> $GITHUB_ENV |
| 41 | + echo "DOLT_BATS_AWS_BUCKET=dolt-ci-bats-chunks-us-west-2" >> $GITHUB_ENV |
| 42 | + echo "DOLT_BATS_AWS_EXISTING_REPO=aws_remote_bats_tests__dolt__" >> $GITHUB_ENV |
| 43 | + fi |
| 44 | + - name: Configure filter tags for lambda bats |
| 45 | + if: ${{ env.use_credentials == 'true' }} |
| 46 | + run: | |
| 47 | + echo "BATS_FILTER=--filter-tags no_lambda" >> $GITHUB_ENV |
| 48 | + - name: Configure AWS Credentials |
| 49 | + if: ${{ env.use_credentials == 'true' }} |
| 50 | + uses: aws-actions/configure-aws-credentials@v4 |
| 51 | + with: |
| 52 | + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} |
| 53 | + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} |
| 54 | + aws-region: us-west-2 |
| 55 | + role-to-assume: ${{ secrets.AWS_ROLE_TO_ASSUME }} |
| 56 | + role-duration-seconds: 10800 # 3 hours D: |
| 57 | + - uses: actions/checkout@v6 |
| 58 | + - name: Setup Go 1.x |
| 59 | + uses: actions/setup-go@v5 |
| 60 | + with: |
| 61 | + go-version-file: go/go.mod |
| 62 | + id: go |
| 63 | + - name: Setup Python 3.x |
| 64 | + uses: actions/setup-python@v5 |
| 65 | + with: |
| 66 | + python-version: "3.10" |
| 67 | + - uses: actions/setup-node@v4 |
| 68 | + with: |
| 69 | + node-version: ^16 |
| 70 | + - name: Create CI Bin |
| 71 | + run: | |
| 72 | + mkdir -p ./.ci_bin |
| 73 | + echo "$(pwd)/.ci_bin" >> $GITHUB_PATH |
| 74 | + - name: Install Bats |
| 75 | + run: | |
| 76 | + npm i bats |
| 77 | + echo "$(pwd)/node_modules/.bin" >> $GITHUB_PATH |
| 78 | + working-directory: ./.ci_bin |
| 79 | + - name: Install Python Deps |
| 80 | + run: | |
| 81 | + pip install mysql-connector-python |
| 82 | + pip install pandas |
| 83 | + pip install pyarrow |
| 84 | + - name: Install Dolt |
| 85 | + working-directory: ./go |
| 86 | + run: | |
| 87 | + go build -mod=readonly -o ../.ci_bin/dolt ./cmd/dolt/. |
| 88 | +
|
| 89 | + go build -mod=readonly -o ../.ci_bin/remotesrv ./utils/remotesrv/. |
| 90 | + go build -mod=readonly -o ../.ci_bin/noms ./store/cmd/noms/. |
| 91 | + - name: Setup Dolt Config |
| 92 | + run: | |
| 93 | + dolt config --global --add user.name 'Dolthub Actions' |
| 94 | + dolt config --global --add user.email 'actions@dolthub.com' |
| 95 | + - name: Install expect |
| 96 | + run: sudo apt-get install -y expect |
| 97 | + - name: Install pcre2grep |
| 98 | + run: sudo apt-get install -y pcre2-utils |
| 99 | + - name: Install Maven |
| 100 | + run: sudo apt-get install -y maven |
| 101 | + - name: Install Hadoop |
| 102 | + working-directory: ./.ci_bin |
| 103 | + run: | |
| 104 | + curl -LO https://downloads.apache.org/hadoop/common/hadoop-3.3.6/hadoop-3.3.6.tar.gz |
| 105 | + tar xvf hadoop-3.3.6.tar.gz |
| 106 | + echo "$(pwd)/hadoop-3.3.6/bin" >> $GITHUB_PATH |
| 107 | + - name: Install parquet-cli |
| 108 | + id: parquet_cli |
| 109 | + working-directory: ./.ci_bin |
| 110 | + run: | |
| 111 | + curl -OL https://github.com/apache/parquet-mr/archive/refs/tags/apache-parquet-1.12.3.tar.gz |
| 112 | + tar zxvf apache-parquet-1.12.3.tar.gz |
| 113 | + cd parquet-java-apache-parquet-1.12.3/parquet-cli |
| 114 | + mvn clean install -DskipTests |
| 115 | + runtime_jar="$(pwd)"/target/parquet-cli-1.12.3-runtime.jar |
| 116 | + echo "runtime_jar=$runtime_jar" >> $GITHUB_OUTPUT |
| 117 | + - name: Check expect |
| 118 | + run: expect -v |
| 119 | + - name: Test all Unix |
| 120 | + env: |
| 121 | + SQL_ENGINE: "local-engine" |
| 122 | + PARQUET_RUNTIME_JAR: ${{ steps.parquet_cli.outputs.runtime_jar }} |
| 123 | + BATS_TEST_RETRIES: "3" |
| 124 | + run: | |
| 125 | + bats --print-output-on-failure --tap $BATS_FILTER . |
| 126 | + working-directory: ./integration-tests/bats |
| 127 | + - name: Test all Unix, SQL_ENGINE=remote-engine |
| 128 | + if: ${{ env.use_credentials == 'true' }} |
| 129 | + env: |
| 130 | + SQL_ENGINE: "remote-engine" |
| 131 | + PARQUET_RUNTIME_JAR: ${{ steps.parquet_cli.outputs.runtime_jar }} |
| 132 | + BATS_TEST_RETRIES: "3" |
| 133 | + run: | |
| 134 | + bats --print-output-on-failure --tap $BATS_FILTER . |
| 135 | + working-directory: ./integration-tests/bats |
0 commit comments