docs: Stop generating dynamic docs content in build (#3212) #362
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| # Licensed to the Apache Software Foundation (ASF) under one | |
| # or more contributor license agreements. See the NOTICE file | |
| # distributed with this work for additional information | |
| # regarding copyright ownership. The ASF licenses this file | |
| # to you under the Apache License, Version 2.0 (the | |
| # "License"); you may not use this file except in compliance | |
| # with the License. You may obtain a copy of the License at | |
| # | |
| # http://www.apache.org/licenses/LICENSE-2.0 | |
| # | |
| # Unless required by applicable law or agreed to in writing, | |
| # software distributed under the License is distributed on an | |
| # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY | |
| # KIND, either express or implied. See the License for the | |
| # specific language governing permissions and limitations | |
| # under the License. | |
| name: PR Build (macOS) | |
| concurrency: | |
| group: ${{ github.repository }}-${{ github.head_ref || github.sha }}-${{ github.workflow }} | |
| cancel-in-progress: true | |
| on: | |
| push: | |
| paths-ignore: | |
| - "doc/**" | |
| - "docs/**" | |
| - "**.md" | |
| - "native/core/benches/**" | |
| - "native/spark-expr/benches/**" | |
| - "spark/src/test/scala/org/apache/spark/sql/benchmark/**" | |
| pull_request: | |
| paths-ignore: | |
| - "doc/**" | |
| - "docs/**" | |
| - "**.md" | |
| - "native/core/benches/**" | |
| - "native/spark-expr/benches/**" | |
| - "spark/src/test/scala/org/apache/spark/sql/benchmark/**" | |
| # manual trigger | |
| # https://docs.github.com/en/actions/managing-workflow-runs/manually-running-a-workflow | |
| workflow_dispatch: | |
| env: | |
| RUST_VERSION: stable | |
| jobs: | |
| # Fast lint check - gates all other jobs (runs on Linux for cost efficiency) | |
| lint: | |
| name: Lint | |
| runs-on: ubuntu-latest | |
| container: | |
| image: amd64/rust | |
| steps: | |
| - uses: actions/checkout@v6 | |
| - name: Check Rust formatting | |
| run: | | |
| rustup component add rustfmt | |
| cd native && cargo fmt --all -- --check | |
| # Build native library once and share with all test jobs | |
| build-native: | |
| needs: lint | |
| name: Build Native Library (macOS) | |
| runs-on: macos-14 | |
| steps: | |
| - uses: actions/checkout@v6 | |
| - name: Setup Rust & Java toolchain | |
| uses: ./.github/actions/setup-macos-builder | |
| with: | |
| rust-version: ${{ env.RUST_VERSION }} | |
| jdk-version: 17 | |
| jdk-architecture: aarch64 | |
| protoc-architecture: aarch_64 | |
| - name: Restore Cargo cache | |
| uses: actions/cache/restore@v4 | |
| with: | |
| path: | | |
| ~/.cargo/registry | |
| ~/.cargo/git | |
| native/target | |
| key: ${{ runner.os }}-cargo-ci-${{ hashFiles('native/**/Cargo.lock', 'native/**/Cargo.toml') }} | |
| restore-keys: | | |
| ${{ runner.os }}-cargo-ci- | |
| - name: Build native library (CI profile) | |
| run: | | |
| cd native | |
| # CI profile: same overflow behavior as release, but faster compilation | |
| # (no LTO, parallel codegen) | |
| cargo build --profile ci | |
| - name: Upload native library | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: native-lib-macos | |
| path: native/target/ci/libcomet.dylib | |
| retention-days: 1 | |
| - name: Save Cargo cache | |
| uses: actions/cache/save@v4 | |
| if: github.ref == 'refs/heads/main' | |
| with: | |
| path: | | |
| ~/.cargo/registry | |
| ~/.cargo/git | |
| native/target | |
| key: ${{ runner.os }}-cargo-ci-${{ hashFiles('native/**/Cargo.lock', 'native/**/Cargo.toml') }} | |
| macos-aarch64-test: | |
| needs: build-native | |
| strategy: | |
| matrix: | |
| os: [macos-14] | |
| # the goal with these profiles is to get coverage of all Java, Scala, and Spark | |
| # versions without testing all possible combinations, which would be overkill | |
| profile: | |
| - name: "Spark 3.4, JDK 11, Scala 2.12" | |
| java_version: "11" | |
| maven_opts: "-Pspark-3.4 -Pscala-2.12" | |
| - name: "Spark 3.5, JDK 17, Scala 2.13" | |
| java_version: "17" | |
| maven_opts: "-Pspark-3.5 -Pscala-2.13" | |
| - name: "Spark 4.0, JDK 17, Scala 2.13" | |
| java_version: "17" | |
| maven_opts: "-Pspark-4.0 -Pscala-2.13" | |
| suite: | |
| - name: "fuzz" | |
| value: | | |
| org.apache.comet.CometFuzzTestSuite | |
| org.apache.comet.CometFuzzAggregateSuite | |
| org.apache.comet.CometFuzzIcebergSuite | |
| org.apache.comet.CometFuzzMathSuite | |
| org.apache.comet.DataGeneratorSuite | |
| - name: "shuffle" | |
| value: | | |
| org.apache.comet.exec.CometShuffleSuite | |
| org.apache.comet.exec.CometShuffle4_0Suite | |
| org.apache.comet.exec.CometNativeShuffleSuite | |
| org.apache.comet.exec.CometShuffleEncryptionSuite | |
| org.apache.comet.exec.CometShuffleManagerSuite | |
| org.apache.comet.exec.CometAsyncShuffleSuite | |
| org.apache.comet.exec.DisableAQECometShuffleSuite | |
| org.apache.comet.exec.DisableAQECometAsyncShuffleSuite | |
| org.apache.spark.shuffle.sort.SpillSorterSuite | |
| - name: "parquet" | |
| value: | | |
| org.apache.comet.parquet.CometParquetWriterSuite | |
| org.apache.comet.parquet.ParquetReadV1Suite | |
| org.apache.comet.parquet.ParquetReadV2Suite | |
| org.apache.comet.parquet.ParquetReadFromFakeHadoopFsSuite | |
| org.apache.spark.sql.comet.ParquetDatetimeRebaseV1Suite | |
| org.apache.spark.sql.comet.ParquetDatetimeRebaseV2Suite | |
| org.apache.spark.sql.comet.ParquetEncryptionITCase | |
| org.apache.comet.exec.CometNativeReaderSuite | |
| org.apache.comet.CometIcebergNativeSuite | |
| - name: "csv" | |
| value: | | |
| org.apache.comet.csv.CometCsvNativeReadSuite | |
| - name: "exec" | |
| value: | | |
| org.apache.comet.exec.CometAggregateSuite | |
| org.apache.comet.exec.CometExec3_4PlusSuite | |
| org.apache.comet.exec.CometExecSuite | |
| org.apache.comet.exec.CometGenerateExecSuite | |
| org.apache.comet.exec.CometWindowExecSuite | |
| org.apache.comet.exec.CometJoinSuite | |
| org.apache.comet.CometNativeSuite | |
| org.apache.comet.CometSparkSessionExtensionsSuite | |
| org.apache.spark.CometPluginsSuite | |
| org.apache.spark.CometPluginsDefaultSuite | |
| org.apache.spark.CometPluginsNonOverrideSuite | |
| org.apache.spark.CometPluginsUnifiedModeOverrideSuite | |
| org.apache.comet.rules.CometScanRuleSuite | |
| org.apache.comet.rules.CometExecRuleSuite | |
| org.apache.spark.sql.CometTPCDSQuerySuite | |
| org.apache.spark.sql.CometTPCDSQueryTestSuite | |
| org.apache.spark.sql.CometTPCHQuerySuite | |
| org.apache.spark.sql.comet.CometTPCDSV1_4_PlanStabilitySuite | |
| org.apache.spark.sql.comet.CometTPCDSV2_7_PlanStabilitySuite | |
| org.apache.spark.sql.comet.CometTaskMetricsSuite | |
| org.apache.comet.objectstore.NativeConfigSuite | |
| - name: "expressions" | |
| value: | | |
| org.apache.comet.CometExpressionSuite | |
| org.apache.comet.CometExpressionCoverageSuite | |
| org.apache.comet.CometHashExpressionSuite | |
| org.apache.comet.CometTemporalExpressionSuite | |
| org.apache.comet.CometArrayExpressionSuite | |
| org.apache.comet.CometCastSuite | |
| org.apache.comet.CometMathExpressionSuite | |
| org.apache.comet.CometStringExpressionSuite | |
| org.apache.comet.CometBitwiseExpressionSuite | |
| org.apache.comet.CometMapExpressionSuite | |
| org.apache.comet.CometJsonExpressionSuite | |
| org.apache.comet.expressions.conditional.CometIfSuite | |
| org.apache.comet.expressions.conditional.CometCoalesceSuite | |
| org.apache.comet.expressions.conditional.CometCaseWhenSuite | |
| - name: "sql" | |
| value: | | |
| org.apache.spark.sql.CometToPrettyStringSuite | |
| fail-fast: false | |
| name: ${{ matrix.os }}/${{ matrix.profile.name }} [${{ matrix.suite.name }}] | |
| runs-on: ${{ matrix.os }} | |
| steps: | |
| - uses: actions/checkout@v6 | |
| - name: Setup Rust & Java toolchain | |
| uses: ./.github/actions/setup-macos-builder | |
| with: | |
| rust-version: ${{ env.RUST_VERSION }} | |
| jdk-version: ${{ matrix.profile.java_version }} | |
| jdk-architecture: aarch64 | |
| protoc-architecture: aarch_64 | |
| - name: Download native library | |
| uses: actions/download-artifact@v4 | |
| with: | |
| name: native-lib-macos | |
| # Download to release/ since Maven's -Prelease expects libcomet.dylib there | |
| path: native/target/release/ | |
| # Restore cargo registry cache (for any cargo commands that might run) | |
| - name: Cache Cargo registry | |
| uses: actions/cache@v4 | |
| with: | |
| path: | | |
| ~/.cargo/registry | |
| ~/.cargo/git | |
| key: ${{ runner.os }}-cargo-registry-${{ hashFiles('native/**/Cargo.lock') }} | |
| restore-keys: | | |
| ${{ runner.os }}-cargo-registry- | |
| - name: Set thread thresholds envs for spark test on macOS | |
| # see: https://github.com/apache/datafusion-comet/issues/2965 | |
| shell: bash | |
| run: | | |
| echo "SPARK_TEST_SQL_SHUFFLE_EXCHANGE_MAX_THREAD_THRESHOLD=256" >> $GITHUB_ENV | |
| echo "SPARK_TEST_SQL_RESULT_QUERY_STAGE_MAX_THREAD_THRESHOLD=256" >> $GITHUB_ENV | |
| echo "SPARK_TEST_HIVE_SHUFFLE_EXCHANGE_MAX_THREAD_THRESHOLD=48" >> $GITHUB_ENV | |
| echo "SPARK_TEST_HIVE_RESULT_QUERY_STAGE_MAX_THREAD_THRESHOLD=48" >> $GITHUB_ENV | |
| - name: Java test steps | |
| uses: ./.github/actions/java-test | |
| with: | |
| artifact_name: ${{ matrix.os }}-${{ matrix.profile.name }}-${{ matrix.suite.name }}-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }} | |
| suites: ${{ matrix.suite.name == 'sql' && matrix.profile.name == 'Spark 3.4, JDK 11, Scala 2.12' && '' || matrix.suite.value }} | |
| maven_opts: ${{ matrix.profile.maven_opts }} | |
| skip-native-build: true |