diff --git a/.github/create_combined_ci_report.py b/.github/create_combined_ci_report.py
new file mode 100755
index 000000000000..19c7a6c05472
--- /dev/null
+++ b/.github/create_combined_ci_report.py
@@ -0,0 +1,294 @@
+#!/usr/bin/env python3
+import argparse
+import os
+from pathlib import Path
+from itertools import combinations
+import json
+
+import requests
+from clickhouse_driver import Client
+import boto3
+from botocore.exceptions import NoCredentialsError
+
+DATABASE_HOST_VAR = "CHECKS_DATABASE_HOST"
+DATABASE_USER_VAR = "CHECKS_DATABASE_USER"
+DATABASE_PASSWORD_VAR = "CHECKS_DATABASE_PASSWORD"
+S3_BUCKET = "altinity-build-artifacts"
+
+
+def get_checks_fails(client: Client, job_url: str):
+ """
+ Get tests that did not succeed for the given job URL.
+ Exclude checks that have status 'error' as they are counted in get_checks_errors.
+ """
+ columns = (
+ "check_status, check_name, test_status, test_name, report_url as results_link"
+ )
+ query = f"""SELECT {columns} FROM `gh-data`.checks
+ WHERE task_url='{job_url}'
+ AND test_status IN ('FAIL', 'ERROR')
+ AND check_status!='error'
+ ORDER BY check_name, test_name
+ """
+ return client.query_dataframe(query)
+
+
+def get_checks_known_fails(client: Client, job_url: str, known_fails: dict):
+ """
+ Get tests that are known to fail for the given job URL.
+ """
+ assert len(known_fails) > 0, "cannot query the database with empty known fails"
+ columns = (
+ "check_status, check_name, test_status, test_name, report_url as results_link"
+ )
+ query = f"""SELECT {columns} FROM `gh-data`.checks
+ WHERE task_url='{job_url}'
+ AND test_status='BROKEN'
+ AND test_name IN ({','.join(f"'{test}'" for test in known_fails.keys())})
+ ORDER BY test_name, check_name
+ """
+
+ df = client.query_dataframe(query)
+
+ df.insert(
+ len(df.columns) - 1,
+ "reason",
+ df["test_name"]
+ .cat.remove_unused_categories()
+ .apply(
+ lambda test_name: known_fails[test_name].get("reason", "No reason given")
+ ),
+ )
+
+ return df
+
+
+def get_checks_errors(client: Client, job_url: str):
+ """
+ Get checks that have status 'error' for the given job URL.
+ """
+ columns = (
+ "check_status, check_name, test_status, test_name, report_url as results_link"
+ )
+ query = f"""SELECT {columns} FROM `gh-data`.checks
+ WHERE task_url='{job_url}'
+ AND check_status=='error'
+ ORDER BY check_name, test_name
+ """
+ return client.query_dataframe(query)
+
+
+def drop_prefix_rows(df, column_to_clean):
+ """
+ Drop rows from the dataframe if:
+ - the row matches another row completely except for the specified column
+ - the specified column of that row is a prefix of the same column in another row
+ """
+ to_drop = set()
+ reference_columns = [col for col in df.columns if col != column_to_clean]
+ for (i, row_1), (j, row_2) in combinations(df.iterrows(), 2):
+ if all(row_1[col] == row_2[col] for col in reference_columns):
+ if row_2[column_to_clean].startswith(row_1[column_to_clean]):
+ to_drop.add(i)
+ elif row_1[column_to_clean].startswith(row_2[column_to_clean]):
+ to_drop.add(j)
+ return df.drop(to_drop)
+
+
+def get_regression_fails(client: Client, job_url: str):
+ """
+ Get regression tests that did not succeed for the given job URL.
+ """
+ # If you rename the alias for report_url, also update the formatters in format_results_as_html_table
+ # Nested SELECT handles test reruns
+ query = f"""SELECT arch, job_name, status, test_name, results_link
+ FROM (
+ SELECT
+ architecture as arch,
+ test_name,
+ argMax(result, start_time) AS status,
+ job_url,
+ job_name,
+ report_url as results_link
+ FROM `gh-data`.clickhouse_regression_results
+ GROUP BY architecture, test_name, job_url, job_name, report_url, start_time
+ ORDER BY start_time DESC, length(test_name) DESC
+ )
+ WHERE job_url='{job_url}'
+ AND status IN ('Fail', 'Error')
+ """
+ df = client.query_dataframe(query)
+ df = drop_prefix_rows(df, "test_name")
+ df["job_name"] = df["job_name"].str.title()
+ return df
+
+
+def url_to_html_link(url: str) -> str:
+ if not url:
+ return ""
+ text = url.split("/")[-1]
+ if not text:
+ text = "results"
+ return f'{text}'
+
+
+def format_test_name_for_linewrap(text: str) -> str:
+ """Tweak the test name to improve line wrapping."""
+ return text.replace(".py::", "/")
+
+
+def format_results_as_html_table(results) -> str:
+ if len(results) == 0:
+ return "
Nothing to report
"
+ results.columns = [col.replace("_", " ").title() for col in results.columns]
+ html = (
+ results.to_html(
+ index=False,
+ formatters={
+ "Results Link": url_to_html_link,
+ "Test Name": format_test_name_for_linewrap,
+ },
+ escape=False,
+ ) # tbody/thead tags interfere with the table sorting script
+ .replace("\n", "")
+ .replace("\n", "")
+ .replace("\n", "")
+ .replace("\n", "")
+ .replace(' argparse.Namespace:
+ parser = argparse.ArgumentParser(description="Create a combined CI report.")
+ parser.add_argument(
+ "--actions-run-url", required=True, help="URL of the actions run"
+ )
+ parser.add_argument(
+ "--pr-number", required=True, help="Pull request number for the S3 path"
+ )
+ parser.add_argument(
+ "--commit-sha", required=True, help="Commit SHA for the S3 path"
+ )
+ parser.add_argument(
+ "--no-upload", action="store_true", help="Do not upload the report"
+ )
+ parser.add_argument(
+ "--known-fails", type=str, help="Path to the file with known fails"
+ )
+ parser.add_argument(
+ "--mark-preview", action="store_true", help="Mark the report as a preview"
+ )
+ return parser.parse_args()
+
+
+def main():
+ args = parse_args()
+
+ db_client = Client(
+ host=os.getenv(DATABASE_HOST_VAR),
+ user=os.getenv(DATABASE_USER_VAR),
+ password=os.getenv(DATABASE_PASSWORD_VAR),
+ port=9440,
+ secure="y",
+ verify=False,
+ settings={"use_numpy": True},
+ )
+
+ s3_path = (
+ f"https://s3.amazonaws.com/{S3_BUCKET}/{args.pr_number}/{args.commit_sha}/"
+ )
+ report_destination_url = s3_path + "combined_report.html"
+ ci_running_report_url = s3_path + "ci_running.html"
+
+ response = requests.get(ci_running_report_url)
+ if response.status_code == 200:
+ ci_running_report: str = response.text
+ else:
+ print(
+ f"Failed to download CI running report. Status code: {response.status_code}, Response: {response.text}"
+ )
+ exit(1)
+
+ fail_results = {
+ "checks_fails": get_checks_fails(db_client, args.actions_run_url),
+ "checks_known_fails": [],
+ "checks_errors": get_checks_errors(db_client, args.actions_run_url),
+ "regression_fails": get_regression_fails(db_client, args.actions_run_url),
+ }
+
+ if args.known_fails:
+ if not os.path.exists(args.known_fails):
+ print(f"Known fails file {args.known_fails} not found.")
+ exit(1)
+
+ with open(args.known_fails) as f:
+ known_fails = json.load(f)
+
+ if known_fails:
+ fail_results["checks_known_fails"] = get_checks_known_fails(
+ db_client, args.actions_run_url, known_fails
+ )
+
+ combined_report = (
+ ci_running_report.replace("ClickHouse CI Running for", "Combined CI Report for")
+ .replace(
+ "",
+ f"""Table of Contents
+{'This is a preview. FinishCheck has not completed.
' if args.mark_preview else ""}
+
+
+CI Jobs Status
+""",
+ 1,
+ )
+ .replace(
+ "
",
+ f"""
+
+Checks Errors
+{format_results_as_html_table(fail_results['checks_errors'])}
+
+Checks New Fails
+{format_results_as_html_table(fail_results['checks_fails'])}
+
+Regression New Fails
+{format_results_as_html_table(fail_results['regression_fails'])}
+
+Checks Known Fails
+{format_results_as_html_table(fail_results['checks_known_fails'])}
+""",
+ 1,
+ )
+ )
+ report_path = Path("combined_report.html")
+ report_path.write_text(combined_report, encoding="utf-8")
+
+ if args.no_upload:
+ print(f"Report saved to {report_path}")
+ exit(0)
+
+ # Upload the report to S3
+ s3_client = boto3.client("s3")
+
+ try:
+ s3_client.put_object(
+ Bucket=S3_BUCKET,
+ Key=f"{args.pr_number}/{args.commit_sha}/combined_report.html",
+ Body=combined_report,
+ ContentType="text/html; charset=utf-8",
+ )
+ except NoCredentialsError:
+ print("Credentials not available for S3 upload.")
+
+ print(report_destination_url)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml
index b1749d27cef4..9897dfa13f7f 100644
--- a/.github/workflows/regression.yml
+++ b/.github/workflows/regression.yml
@@ -19,7 +19,7 @@ name: Regression test workflow - Release
default: 210
type: number
build_sha:
- description: commit sha of the workflow run for artifact upload.
+ description: commit sha of the workflow run for artifact upload.
required: true
type: string
checkout_depth:
@@ -93,6 +93,9 @@ env:
AWS_DEFAULT_REGION: ${{ secrets.AWS_REPORT_REGION }}
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
+ CHECKS_DATABASE_HOST: ${{ secrets.CHECKS_DATABASE_HOST }}
+ CHECKS_DATABASE_USER: ${{ secrets.CHECKS_DATABASE_USER }}
+ CHECKS_DATABASE_PASSWORD: ${{ secrets.CHECKS_DATABASE_PASSWORD }}
args: --test-to-end
--no-colors
--local
@@ -100,6 +103,7 @@ env:
--output classic
--parallel 1
--log raw.log
+ --with-analyzer
artifacts: builds
artifact_paths: |
./report.html
@@ -170,17 +174,29 @@ jobs:
- name: Get deb url
run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV
- name: Run ${{ env.SUITE }} suite
+ id: run_suite
run: EXITCODE=0;
python3
-u ${{ env.SUITE }}/regression.py
- --clickhouse-binary-path ${{ env.clickhouse_binary_path }}
- --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
+ --clickhouse-binary-path ${{ env.clickhouse_path }}
+ --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name=$GITHUB_JOB job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
${{ env.args }} || EXITCODE=$?;
.github/add_link_to_logs.sh;
exit $EXITCODE
+ - name: Set Commit Status
+ if: always()
+ run: python3 .github/set_builds_status.py
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ JOB_OUTCOME: ${{ steps.run_suite.outcome }}
+ SUITE_NAME: "Regression ${{ inputs.arch }} ${{ matrix.SUITE }}"
- name: Create and upload logs
if: always()
run: .github/create_and_upload_logs.sh 1
+ - name: Upload logs to regression results database
+ if: always()
+ timeout-minutes: 20
+ run: .github/upload_results_to_database.sh 1
- uses: actions/upload-artifact@v4
if: always()
with:
@@ -221,18 +237,30 @@ jobs:
- name: Get deb url
run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV
- name: Run ${{ env.SUITE }} suite
+ id: run_suite
run: EXITCODE=0;
python3
-u alter/regression.py
- --clickhouse-binary-path ${{ env.clickhouse_binary_path }}
+ --clickhouse-binary-path ${{ env.clickhouse_path }}
--only "/alter/${{ matrix.ONLY }} partition/*"
- --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
+ --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name=$GITHUB_JOB job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
${{ env.args }} || EXITCODE=$?;
.github/add_link_to_logs.sh;
exit $EXITCODE
+ - name: Set Commit Status
+ if: always()
+ run: python3 .github/set_builds_status.py
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ JOB_OUTCOME: ${{ steps.run_suite.outcome }}
+ SUITE_NAME: "Regression ${{ inputs.arch }} Alter ${{ matrix.ONLY }} partition"
- name: Create and upload logs
if: always()
run: .github/create_and_upload_logs.sh 1
+ - name: Upload logs to regression results database
+ if: always()
+ timeout-minutes: 20
+ run: .github/upload_results_to_database.sh 1
- uses: actions/upload-artifact@v4
if: always()
with:
@@ -273,10 +301,11 @@ jobs:
- name: Get deb url
run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV
- name: Run ${{ env.SUITE }} suite
+ id: run_suite
run: EXITCODE=0;
python3
-u ${{ env.SUITE }}/benchmark.py
- --clickhouse-binary-path ${{ env.clickhouse_binary_path }}
+ --clickhouse-binary-path ${{ env.clickhouse_path }}
--storage ${{ matrix.STORAGE }}
--gcs-uri ${{ secrets.REGRESSION_GCS_URI }}
--gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }}
@@ -285,13 +314,24 @@ jobs:
--aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }}
--aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }}
--aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }}
- --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
+ --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name=$GITHUB_JOB job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
${{ env.args }} || EXITCODE=$?;
.github/add_link_to_logs.sh;
exit $EXITCODE
+ - name: Set Commit Status
+ if: always()
+ run: python3 .github/set_builds_status.py
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ JOB_OUTCOME: ${{ steps.run_suite.outcome }}
+ SUITE_NAME: "Regression ${{ inputs.arch }} Benchmark ${{ matrix.STORAGE }}"
- name: Create and upload logs
if: always()
run: .github/create_and_upload_logs.sh 1
+ - name: Upload logs to regression results database
+ if: always()
+ timeout-minutes: 20
+ run: .github/upload_results_to_database.sh 1
- uses: actions/upload-artifact@v4
if: always()
with:
@@ -328,18 +368,30 @@ jobs:
- name: Get deb url
run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV
- name: Run ${{ env.SUITE }} suite
+ id: run_suite
run: EXITCODE=0;
python3
-u ${{ env.SUITE }}/regression.py
--ssl
- --clickhouse-binary-path ${{ env.clickhouse_binary_path }}
- --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
+ --clickhouse-binary-path ${{ env.clickhouse_path }}
+ --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name=$GITHUB_JOB job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
${{ env.args }} || EXITCODE=$?;
.github/add_link_to_logs.sh;
exit $EXITCODE
+ - name: Set Commit Status
+ if: always()
+ run: python3 .github/set_builds_status.py
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ JOB_OUTCOME: ${{ steps.run_suite.outcome }}
+ SUITE_NAME: "Regression ${{ inputs.arch }} Clickhouse Keeper SSL"
- name: Create and upload logs
if: always()
run: .github/create_and_upload_logs.sh 1
+ - name: Upload logs to regression results database
+ if: always()
+ timeout-minutes: 20
+ run: .github/upload_results_to_database.sh 1
- uses: actions/upload-artifact@v4
if: always()
with:
@@ -379,17 +431,29 @@ jobs:
- name: Get deb url
run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV
- name: Run ${{ env.SUITE }} suite
+ id: run_suite
run: EXITCODE=0;
python3
-u ${{ env.SUITE }}/regression.py
- --clickhouse-binary-path ${{ env.clickhouse_binary_path }}
- --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
+ --clickhouse-binary-path ${{ env.clickhouse_path }}
+ --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name=$GITHUB_JOB job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
${{ env.args }} || EXITCODE=$?;
.github/add_link_to_logs.sh;
exit $EXITCODE
+ - name: Set Commit Status
+ if: always()
+ run: python3 .github/set_builds_status.py
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ JOB_OUTCOME: ${{ steps.run_suite.outcome }}
+ SUITE_NAME: "Regression ${{ inputs.arch }} LDAP ${{ matrix.SUITE }}"
- name: Create and upload logs
if: always()
run: .github/create_and_upload_logs.sh 1
+ - name: Upload logs to regression results database
+ if: always()
+ timeout-minutes: 20
+ run: .github/upload_results_to_database.sh 1
- uses: actions/upload-artifact@v4
if: always()
with:
@@ -425,17 +489,29 @@ jobs:
- name: Get deb url
run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV
- name: Run ${{ env.SUITE }} suite
+ id: run_suite
run: EXITCODE=0;
python3
-u ${{ env.SUITE }}/regression.py
- --clickhouse-binary-path ${{ env.clickhouse_binary_path }}
- --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
+ --clickhouse-binary-path ${{ env.clickhouse_path }}
+ --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name=$GITHUB_JOB job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
${{ env.args }} || EXITCODE=$?;
.github/add_link_to_logs.sh;
exit $EXITCODE
+ - name: Set Commit Status
+ if: always()
+ run: python3 .github/set_builds_status.py
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ JOB_OUTCOME: ${{ steps.run_suite.outcome }}
+ SUITE_NAME: "Regression ${{ inputs.arch }} Parquet"
- name: Create and upload logs
if: always()
run: .github/create_and_upload_logs.sh 1
+ - name: Upload logs to regression results database
+ if: always()
+ timeout-minutes: 20
+ run: .github/upload_results_to_database.sh 1
- uses: actions/upload-artifact@v4
if: always()
with:
@@ -476,22 +552,34 @@ jobs:
- name: Get deb url
run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV
- name: Run ${{ env.SUITE }} suite
+ id: run_suite
run: EXITCODE=0;
python3
-u ${{ env.SUITE }}/regression.py
- --clickhouse-binary-path ${{ env.clickhouse_binary_path }}
+ --clickhouse-binary-path ${{ env.clickhouse_path }}
--storage ${{ matrix.STORAGE }}
--aws-s3-bucket ${{ secrets.REGRESSION_AWS_S3_BUCKET }}
--aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }}
--aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }}
--aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }}
- --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
+ --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name=$GITHUB_JOB job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
${{ env.args }} || EXITCODE=$?;
.github/add_link_to_logs.sh;
exit $EXITCODE
+ - name: Set Commit Status
+ if: always()
+ run: python3 .github/set_builds_status.py
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ JOB_OUTCOME: ${{ steps.run_suite.outcome }}
+ SUITE_NAME: "Regression ${{ inputs.arch }} Parquet ${{ matrix.STORAGE }}"
- name: Create and upload logs
if: always()
run: .github/create_and_upload_logs.sh 1
+ - name: Upload logs to regression results database
+ if: always()
+ timeout-minutes: 20
+ run: .github/upload_results_to_database.sh 1
- uses: actions/upload-artifact@v4
if: always()
with:
@@ -532,10 +620,11 @@ jobs:
- name: Get deb url
run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV
- name: Run ${{ env.SUITE }} suite
+ id: run_suite
run: EXITCODE=0;
python3
-u ${{ env.SUITE }}/regression.py
- --clickhouse-binary-path ${{ env.clickhouse_binary_path }}
+ --clickhouse-binary-path ${{ env.clickhouse_path }}
--storage ${{ matrix.STORAGE }}
--gcs-uri ${{ secrets.REGRESSION_GCS_URI }}
--gcs-key-id ${{ secrets.REGRESSION_GCS_KEY_ID }}
@@ -544,13 +633,27 @@ jobs:
--aws-s3-region ${{ secrets.REGRESSION_AWS_S3_REGION }}
--aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }}
--aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }}
- --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
+ --azure-account-name ${{ secrets.AZURE_ACCOUNT_NAME }}
+ --azure-storage-key ${{ secrets.AZURE_STORAGE_KEY }}
+ --azure-container ${{ secrets.AZURE_CONTAINER_NAME }}
+ --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name=$GITHUB_JOB job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
${{ env.args }} || EXITCODE=$?;
.github/add_link_to_logs.sh;
exit $EXITCODE
+ - name: Set Commit Status
+ if: always()
+ run: python3 .github/set_builds_status.py
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ JOB_OUTCOME: ${{ steps.run_suite.outcome }}
+ SUITE_NAME: "Regression ${{ inputs.arch }} S3 ${{ matrix.STORAGE }}"
- name: Create and upload logs
if: always()
run: .github/create_and_upload_logs.sh 1
+ - name: Upload logs to regression results database
+ if: always()
+ timeout-minutes: 20
+ run: .github/upload_results_to_database.sh 1
- uses: actions/upload-artifact@v4
if: always()
with:
@@ -570,6 +673,7 @@ jobs:
uses: actions/checkout@v4
with:
repository: Altinity/clickhouse-regression
+ ref: ${{ inputs.commit }}
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
@@ -590,10 +694,11 @@ jobs:
- name: Get deb url
run: python3 .github/get-deb-url.py --reports-path ${{ env.REPORTS_PATH }} --github-env $GITHUB_ENV
- name: Run ${{ env.SUITE }} suite
+ id: run_suite
run: EXITCODE=0;
python3
-u ${{ env.SUITE }}/regression.py
- --clickhouse-binary-path ${{ env.clickhouse_binary_path }}
+ --clickhouse-binary-path ${{ env.clickhouse_path }}
--aws-s3-access-key ${{ secrets.REGRESSION_AWS_S3_SECRET_ACCESS_KEY }}
--aws-s3-key-id ${{ secrets.REGRESSION_AWS_S3_KEY_ID }}
--aws-s3-uri https://s3.${{ secrets.REGRESSION_AWS_S3_REGION}}.amazonaws.com/${{ secrets.REGRESSION_AWS_S3_BUCKET }}/data/
@@ -601,13 +706,24 @@ jobs:
--gcs-key-secret ${{ secrets.REGRESSION_GCS_KEY_SECRET }}
--gcs-uri ${{ secrets.REGRESSION_GCS_URI }}
--with-${{ matrix.STORAGE }}
- --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_binary_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.id="$GITHUB_RUN_ID" job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
+ --attr project="$GITHUB_REPOSITORY" project.id="$GITHUB_REPOSITORY_ID" package="${{ env.clickhouse_path }}" version="${{ env.version }}" user.name="$GITHUB_ACTOR" repository="https://github.com/Altinity/clickhouse-regression" commit.hash="$(git rev-parse HEAD)" job.name=$GITHUB_JOB job.url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" arch="$(uname -i)"
${{ env.args }} || EXITCODE=$?;
.github/add_link_to_logs.sh;
exit $EXITCODE
+ - name: Set Commit Status
+ if: always()
+ run: python3 .github/set_builds_status.py
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ JOB_OUTCOME: ${{ steps.run_suite.outcome }}
+ SUITE_NAME: "Regression ${{ inputs.arch }} Tiered Storage ${{ matrix.STORAGE }}"
- name: Create and upload logs
if: always()
run: .github/create_and_upload_logs.sh 1
+ - name: Upload logs to regression results database
+ if: always()
+ timeout-minutes: 20
+ run: .github/upload_results_to_database.sh 1
- uses: actions/upload-artifact@v4
if: always()
with:
diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml
index 1b444e227834..98036dedb38f 100644
--- a/.github/workflows/release_branches.yml
+++ b/.github/workflows/release_branches.yml
@@ -29,7 +29,7 @@ on: # yamllint disable-line rule:truthy
jobs:
RunConfig:
- runs-on: [self-hosted, altinity-on-demand, altinity-type-cax41, altinity-in-hel1, altinity-image-arm-system-ubuntu-22.04]
+ runs-on: [self-hosted, altinity-on-demand, altinity-type-cax41, altinity-image-arm-system-ubuntu-22.04]
outputs:
data: ${{ steps.runconfig.outputs.CI_DATA }}
steps:
@@ -85,7 +85,7 @@ jobs:
secrets: inherit
with:
test_name: Compatibility check (amd64)
- runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04
+ runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-system-ubuntu-22.04
data: ${{ needs.RunConfig.outputs.data }}
CompatibilityCheckAarch64:
needs: [RunConfig, BuilderDebAarch64]
@@ -94,7 +94,7 @@ jobs:
secrets: inherit
with:
test_name: Compatibility check (aarch64)
- runner_type: altinity-on-demand, altinity-type-cax41, altinity-in-hel1, altinity-image-arm-system-ubuntu-22.04
+ runner_type: altinity-on-demand, altinity-type-cax41, altinity-image-arm-system-ubuntu-22.04
data: ${{ needs.RunConfig.outputs.data }}
#########################################################################################
#################################### ORDINARY BUILDS ####################################
@@ -194,7 +194,7 @@ jobs:
secrets: inherit
with:
test_name: Docker server image
- runner_type: altinity-on-demand, altinity-type-cpx41, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04
+ runner_type: altinity-on-demand, altinity-type-cpx41, altinity-image-x86-system-ubuntu-22.04
data: ${{ needs.RunConfig.outputs.data }}
DockerKeeperImage:
needs: [RunConfig, BuilderDebRelease, BuilderDebAarch64]
@@ -203,7 +203,7 @@ jobs:
secrets: inherit
with:
test_name: Docker keeper image
- runner_type: altinity-on-demand, altinity-type-cpx41, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04
+ runner_type: altinity-on-demand, altinity-type-cpx41, altinity-image-x86-system-ubuntu-22.04
data: ${{ needs.RunConfig.outputs.data }}
############################################################################################
##################################### BUILD REPORTER #######################################
@@ -224,7 +224,7 @@ jobs:
secrets: inherit
with:
test_name: ClickHouse build check
- runner_type: altinity-on-demand, altinity-setup-reporter, altinity-type-cax11, altinity-in-hel1, altinity-image-arm-system-ubuntu-22.04
+ runner_type: altinity-on-demand, altinity-setup-reporter, altinity-type-cax11, altinity-image-arm-system-ubuntu-22.04
data: ${{ needs.RunConfig.outputs.data }}
BuilderSpecialReport:
# run report check for failed builds to indicate the CI error
@@ -237,7 +237,7 @@ jobs:
secrets: inherit
with:
test_name: ClickHouse special build check
- runner_type: altinity-on-demand, altinity-setup-reporter, altinity-type-cax11, altinity-in-hel1, altinity-image-arm-system-ubuntu-22.04
+ runner_type: altinity-on-demand, altinity-setup-reporter, altinity-type-cax11, altinity-image-arm-system-ubuntu-22.04
data: ${{ needs.RunConfig.outputs.data }}
MarkReleaseReady:
if: ${{ !failure() && !cancelled() }}
@@ -246,7 +246,7 @@ jobs:
- BuilderBinDarwinAarch64
- BuilderDebRelease
- BuilderDebAarch64
- runs-on: [self-hosted, altinity-on-demand, altinity-setup-reporter, altinity-type-cax11, altinity-in-hel1, altinity-image-arm-system-ubuntu-22.04]
+ runs-on: [self-hosted, altinity-on-demand, altinity-setup-reporter, altinity-type-cax11, altinity-image-arm-system-ubuntu-22.04]
steps:
- name: Debug
run: |
@@ -280,7 +280,7 @@ jobs:
secrets: inherit
with:
test_name: Install packages (amd64)
- runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04
+ runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-system-ubuntu-22.04
data: ${{ needs.RunConfig.outputs.data }}
run_command: |
python3 install_check.py "$CHECK_NAME"
@@ -291,7 +291,7 @@ jobs:
secrets: inherit
with:
test_name: Install packages (arm64)
- runner_type: altinity-on-demand, altinity-type-cax41, altinity-in-hel1, altinity-image-arm-system-ubuntu-22.04
+ runner_type: altinity-on-demand, altinity-type-cax41, altinity-image-arm-system-ubuntu-22.04
data: ${{ needs.RunConfig.outputs.data }}
run_command: |
python3 install_check.py "$CHECK_NAME"
@@ -305,7 +305,7 @@ jobs:
secrets: inherit
with:
test_name: Stateless tests (release)
- runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04
+ runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-system-ubuntu-22.04
data: ${{ needs.RunConfig.outputs.data }}
FunctionalStatelessTestAarch64:
needs: [RunConfig, BuilderDebAarch64]
@@ -314,7 +314,7 @@ jobs:
secrets: inherit
with:
test_name: Stateless tests (aarch64)
- runner_type: altinity-on-demand, altinity-type-cax41, altinity-in-hel1, altinity-image-arm-system-ubuntu-22.04
+ runner_type: altinity-on-demand, altinity-type-cax41, altinity-image-arm-system-ubuntu-22.04
data: ${{ needs.RunConfig.outputs.data }}
FunctionalStatelessTestAsan:
needs: [RunConfig, BuilderDebAsan]
@@ -323,7 +323,7 @@ jobs:
secrets: inherit
with:
test_name: Stateless tests (asan)
- runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04
+ runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-system-ubuntu-22.04
data: ${{ needs.RunConfig.outputs.data }}
FunctionalStatelessTestTsan:
needs: [RunConfig, BuilderDebTsan]
@@ -332,7 +332,7 @@ jobs:
secrets: inherit
with:
test_name: Stateless tests (tsan)
- runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04
+ runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-system-ubuntu-22.04
data: ${{ needs.RunConfig.outputs.data }}
FunctionalStatelessTestMsan:
needs: [RunConfig, BuilderDebMsan]
@@ -341,7 +341,7 @@ jobs:
secrets: inherit
with:
test_name: Stateless tests (msan)
- runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04
+ runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-system-ubuntu-22.04
data: ${{ needs.RunConfig.outputs.data }}
FunctionalStatelessTestUBsan:
needs: [RunConfig, BuilderDebUBsan]
@@ -350,7 +350,7 @@ jobs:
secrets: inherit
with:
test_name: Stateless tests (ubsan)
- runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04
+ runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-system-ubuntu-22.04
data: ${{ needs.RunConfig.outputs.data }}
FunctionalStatelessTestDebug:
needs: [RunConfig, BuilderDebDebug]
@@ -359,7 +359,7 @@ jobs:
secrets: inherit
with:
test_name: Stateless tests (debug)
- runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04
+ runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-system-ubuntu-22.04
data: ${{ needs.RunConfig.outputs.data }}
##############################################################################################
############################ FUNCTIONAl STATEFUL TESTS #######################################
@@ -371,7 +371,7 @@ jobs:
secrets: inherit
with:
test_name: Stateful tests (release)
- runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04
+ runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-system-ubuntu-22.04
data: ${{ needs.RunConfig.outputs.data }}
FunctionalStatefulTestAarch64:
needs: [RunConfig, BuilderDebAarch64]
@@ -380,7 +380,7 @@ jobs:
secrets: inherit
with:
test_name: Stateful tests (aarch64)
- runner_type: altinity-on-demand, altinity-type-cax41, altinity-in-hel1, altinity-image-arm-system-ubuntu-22.04
+ runner_type: altinity-on-demand, altinity-type-cax41, altinity-image-arm-system-ubuntu-22.04
data: ${{ needs.RunConfig.outputs.data }}
FunctionalStatefulTestAsan:
needs: [RunConfig, BuilderDebAsan]
@@ -389,7 +389,7 @@ jobs:
secrets: inherit
with:
test_name: Stateful tests (asan)
- runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04
+ runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-system-ubuntu-22.04
data: ${{ needs.RunConfig.outputs.data }}
FunctionalStatefulTestTsan:
needs: [RunConfig, BuilderDebTsan]
@@ -398,7 +398,7 @@ jobs:
secrets: inherit
with:
test_name: Stateful tests (tsan)
- runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04
+ runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-system-ubuntu-22.04
data: ${{ needs.RunConfig.outputs.data }}
FunctionalStatefulTestMsan:
needs: [RunConfig, BuilderDebMsan]
@@ -407,7 +407,7 @@ jobs:
secrets: inherit
with:
test_name: Stateful tests (msan)
- runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04
+ runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-system-ubuntu-22.04
data: ${{ needs.RunConfig.outputs.data }}
FunctionalStatefulTestUBsan:
needs: [RunConfig, BuilderDebUBsan]
@@ -416,7 +416,7 @@ jobs:
secrets: inherit
with:
test_name: Stateful tests (ubsan)
- runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04
+ runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-system-ubuntu-22.04
data: ${{ needs.RunConfig.outputs.data }}
FunctionalStatefulTestDebug:
needs: [RunConfig, BuilderDebDebug]
@@ -425,7 +425,7 @@ jobs:
secrets: inherit
with:
test_name: Stateful tests (debug)
- runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04
+ runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-system-ubuntu-22.04
data: ${{ needs.RunConfig.outputs.data }}
##############################################################################################
######################################### STRESS TESTS #######################################
@@ -437,7 +437,7 @@ jobs:
secrets: inherit
with:
test_name: Stress test (asan)
- runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04
+ runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-system-ubuntu-22.04
data: ${{ needs.RunConfig.outputs.data }}
StressTestTsan:
needs: [RunConfig, BuilderDebTsan]
@@ -446,7 +446,7 @@ jobs:
secrets: inherit
with:
test_name: Stress test (tsan)
- runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04
+ runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-system-ubuntu-22.04
data: ${{ needs.RunConfig.outputs.data }}
StressTestMsan:
needs: [RunConfig, BuilderDebMsan]
@@ -455,7 +455,7 @@ jobs:
secrets: inherit
with:
test_name: Stress test (msan)
- runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04
+ runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-system-ubuntu-22.04
data: ${{ needs.RunConfig.outputs.data }}
StressTestUBsan:
needs: [RunConfig, BuilderDebUBsan]
@@ -464,7 +464,7 @@ jobs:
secrets: inherit
with:
test_name: Stress test (ubsan)
- runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04
+ runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-system-ubuntu-22.04
data: ${{ needs.RunConfig.outputs.data }}
StressTestDebug:
needs: [RunConfig, BuilderDebDebug]
@@ -473,7 +473,7 @@ jobs:
secrets: inherit
with:
test_name: Stress test (debug)
- runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04
+ runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-system-ubuntu-22.04
data: ${{ needs.RunConfig.outputs.data }}
#############################################################################################
############################# INTEGRATION TESTS #############################################
@@ -485,7 +485,7 @@ jobs:
secrets: inherit
with:
test_name: Integration tests (asan)
- runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04
+ runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-system-ubuntu-22.04
data: ${{ needs.RunConfig.outputs.data }}
IntegrationTestsAnalyzerAsan:
needs: [RunConfig, BuilderDebAsan]
@@ -494,7 +494,7 @@ jobs:
secrets: inherit
with:
test_name: Integration tests (asan, old analyzer)
- runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04
+ runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-system-ubuntu-22.04
data: ${{ needs.RunConfig.outputs.data }}
IntegrationTestsTsan:
needs: [RunConfig, BuilderDebTsan]
@@ -503,7 +503,7 @@ jobs:
secrets: inherit
with:
test_name: Integration tests (tsan)
- runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04
+ runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-system-ubuntu-22.04
data: ${{ needs.RunConfig.outputs.data }}
IntegrationTestsRelease:
needs: [RunConfig, BuilderDebRelease]
@@ -512,7 +512,7 @@ jobs:
secrets: inherit
with:
test_name: Integration tests (release)
- runner_type: altinity-on-demand, altinity-type-cpx51, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04
+ runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-system-ubuntu-22.04
data: ${{ needs.RunConfig.outputs.data }}
#############################################################################################
##################################### REGRESSION TESTS ######################################
@@ -524,7 +524,7 @@ jobs:
secrets: inherit
with:
runner_type: altinity-on-demand, altinity-type-cpx51, altinity-image-x86-app-docker-ce, altinity-setup-regression
- commit: c5e1513a2214ee33696c29717935e0a94989ac2a
+ commit: 53d73ed32155a8a17ee0d0cdb15aee96c98010a2
arch: release
build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
RegressionTestsAarch64:
@@ -534,7 +534,7 @@ jobs:
secrets: inherit
with:
runner_type: altinity-on-demand, altinity-type-cax41, altinity-image-arm-app-docker-ce, altinity-setup-regression
- commit: c5e1513a2214ee33696c29717935e0a94989ac2a
+ commit: 53d73ed32155a8a17ee0d0cdb15aee96c98010a2
arch: aarch64
build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
SignRelease:
@@ -544,7 +544,7 @@ jobs:
secrets: inherit
with:
test_name: Sign release
- runner_type: altinity-on-demand, altinity-type-cpx41, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04
+ runner_type: altinity-on-demand, altinity-type-cpx41, altinity-image-x86-system-ubuntu-22.04
data: ${{ needs.RunConfig.outputs.data }}
SignAarch64:
needs: [RunConfig, BuilderDebAarch64]
@@ -553,10 +553,10 @@ jobs:
secrets: inherit
with:
test_name: Sign aarch64
- runner_type: altinity-on-demand, altinity-type-cpx41, altinity-in-ash, altinity-image-x86-system-ubuntu-22.04
+ runner_type: altinity-on-demand, altinity-type-cpx41, altinity-image-x86-system-ubuntu-22.04
data: ${{ needs.RunConfig.outputs.data }}
FinishCheck:
- if: ${{ !failure() && !cancelled() }}
+ if: ${{ !cancelled() }}
needs:
- DockerServerImage
- DockerKeeperImage
@@ -600,3 +600,24 @@ jobs:
run: |
cd "$GITHUB_WORKSPACE/tests/ci"
python3 finish_check.py
+ - name: Create and upload combined report
+ env:
+ CHECKS_DATABASE_HOST: ${{ secrets.CHECKS_DATABASE_HOST }}
+ CHECKS_DATABASE_USER: ${{ secrets.CHECKS_DATABASE_USER }}
+ CHECKS_DATABASE_PASSWORD: ${{ secrets.CHECKS_DATABASE_PASSWORD }}
+ COMMIT_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
+ PR_NUMBER: ${{ github.event.number }}
+ ACTIONS_RUN_URL: ${{ github.event.repository.html_url }}/actions/runs/${{ github.run_id }}
+ shell: bash
+ run: |
+ pip install clickhouse-driver==0.2.8 numpy==1.26.4 pandas==2.2.0
+
+ REPORT_LINK=$(python3 .github/create_combined_ci_report.py --pr-number $PR_NUMBER --commit-sha $COMMIT_SHA --actions-run-url $ACTIONS_RUN_URL --known-fails tests/broken_tests.json)
+
+ IS_VALID_URL=$(echo $REPORT_LINK | grep -E '^https?://')
+ if [[ -n $IS_VALID_URL ]]; then
+ echo "Combined CI Report: [View Report]($REPORT_LINK)" >> $GITHUB_STEP_SUMMARY
+ else
+ echo "Error: $REPORT_LINK" >> $GITHUB_STEP_SUMMARY
+ exit 1
+ fi
\ No newline at end of file
diff --git a/docker/test/util/process_functional_tests_result.py b/docker/test/util/process_functional_tests_result.py
index fd4cc9f4bf76..30cba0d6690d 100755
--- a/docker/test/util/process_functional_tests_result.py
+++ b/docker/test/util/process_functional_tests_result.py
@@ -4,6 +4,7 @@
import logging
import argparse
import csv
+import json
OK_SIGN = "[ OK "
FAIL_SIGN = "[ FAIL "
@@ -206,6 +207,7 @@ def write_results(results_file, status_file, results, status):
parser.add_argument("--out-results-file", default="/test_output/test_results.tsv")
parser.add_argument("--out-status-file", default="/test_output/check_status.tsv")
parser.add_argument("--broken-tests", default="/analyzer_tech_debt.txt")
+ parser.add_argument("--broken-tests-json", default="/broken_tests.json")
args = parser.parse_args()
broken_tests = list()
@@ -213,6 +215,14 @@ def write_results(results_file, status_file, results, status):
logging.info(f"File {args.broken_tests} with broken tests found")
with open(args.broken_tests) as f:
broken_tests = f.read().splitlines()
+
+ if os.path.exists(args.broken_tests_json):
+ logging.info(f"File {args.broken_tests_json} with broken tests found")
+
+ with open(args.broken_tests_json) as f:
+ broken_tests.extend(json.load(f).keys())
+
+ if broken_tests:
logging.info(f"Broken tests in the list: {len(broken_tests)}")
state, description, test_results = process_result(args.in_results_dir, broken_tests)
diff --git a/tests/broken_tests.json b/tests/broken_tests.json
new file mode 100644
index 000000000000..d9d0008ef88f
--- /dev/null
+++ b/tests/broken_tests.json
@@ -0,0 +1,58 @@
+{
+ "test_postgresql_replica_database_engine_2/test.py::test_quoting_publication": {
+ "message": "DB::Exception: Syntax error:",
+ "reason": "syntax error"
+ },
+ "test_distributed_inter_server_secret/test.py::test_secure_cluster_distributed_over_distributed_different_users": {
+ "message": "DB::NetException: Connection reset by peer, while reading from socket",
+ "reason": "network issue"
+ },
+ "02920_alter_column_of_projections": {
+ "reason": "requires different settings"
+ },
+ "02888_system_tables_with_inaccsessible_table_function": {
+ "reason": "todo investigate"
+ },
+ "03094_grouparraysorted_memory": {
+ "reason": "flaky"
+ },
+ "02700_s3_part_INT_MAX": {
+ "reason": "fails with asan"
+ },
+ "test_backward_compatibility/test_aggregate_function_state.py::test_backward_compatability_for_avg":{
+ "reason": "not run by upstream"
+ },
+ "test_backward_compatibility/test_aggregate_function_state.py::test_backward_compatability_for_uniq_exact[1000]":{
+ "reason": "not run by upstream"
+ },
+ "test_backward_compatibility/test_aggregate_function_state.py::test_backward_compatability_for_uniq_exact[500000] ":{
+ "reason": "not run by upstream"
+ },
+ "test_backward_compatibility/test_aggregate_function_state.py::test_backward_compatability_for_uniq_exact_variadic[1000]":{
+ "reason": "not run by upstream"
+ },
+ "test_backward_compatibility/test_aggregate_function_state.py::test_backward_compatability_for_uniq_exact_variadic[500000]":{
+ "reason": "not run by upstream"
+ },
+ "test_dictionaries_all_layouts_separate_sources/test_mongo.py::test_simple_ssl[cache-True]": {
+ "reason": "errors upstream"
+ },
+ "test_dictionaries_all_layouts_separate_sources/test_mongo.py::test_simple_ssl[direct-True]":{
+ "reason": "errors upstream"
+ },
+ "test_dictionaries_all_layouts_separate_sources/test_mongo.py::test_simple_ssl[flat-True]":{
+ "reason": "errors upstream"
+ },
+ "test_dictionaries_all_layouts_separate_sources/test_mongo.py::test_simple_ssl[hashed-True]":{
+ "reason": "errors upstream"
+ },
+ "test_dictionaries_all_layouts_separate_sources/test_mongo_uri.py::test_simple_ssl[flat-True]":{
+ "reason": "errors upstream"
+ },
+ "test_storage_mongodb/test.py::test_secure_connection[True]":{
+ "reason": "fails upstream"
+ },
+ "test_table_function_mongodb/test.py::test_secure_connection[True]":{
+ "reason": "fails upstream"
+ }
+}
diff --git a/tests/ci/functional_test_check.py b/tests/ci/functional_test_check.py
index 8f1ffb05ac3c..4cd022c6bf81 100644
--- a/tests/ci/functional_test_check.py
+++ b/tests/ci/functional_test_check.py
@@ -97,6 +97,9 @@ def get_run_command(
if "analyzer" not in check_name
else ""
)
+ volume_with_broken_test += (
+ f"--volume={repo_path}/tests/broken_tests.json:/broken_tests.json "
+ )
return (
f"docker run --volume={builds_path}:/package_folder "
diff --git a/tests/ci/integration_tests_runner.py b/tests/ci/integration_tests_runner.py
index f10c0a190e2f..18725a509095 100755
--- a/tests/ci/integration_tests_runner.py
+++ b/tests/ci/integration_tests_runner.py
@@ -474,6 +474,19 @@ def _get_parallel_tests_skip_list(repo_path):
skip_list_tests = json.load(skip_list_file)
return list(sorted(skip_list_tests))
+ @staticmethod
+ def _get_broken_tests_list(repo_path: str) -> dict:
+ skip_list_file_path = f"{repo_path}/tests/broken_tests.json"
+ if (
+ not os.path.isfile(skip_list_file_path)
+ or os.path.getsize(skip_list_file_path) == 0
+ ):
+ return {}
+
+ with open(skip_list_file_path, "r", encoding="utf-8") as skip_list_file:
+ skip_list_tests = json.load(skip_list_file)
+ return skip_list_tests
+
@staticmethod
def group_test_by_file(tests):
result = {} # type: Dict
@@ -891,6 +904,8 @@ def run_impl(self, repo_path, build_path):
" ".join(not_found_tests[:3]),
)
+ known_broken_tests = self._get_broken_tests_list(repo_path)
+
grouped_tests = self.group_test_by_file(filtered_sequential_tests)
i = 0
for par_group in chunks(filtered_parallel_tests, PARALLEL_GROUP_SIZE):
@@ -921,6 +936,26 @@ def run_impl(self, repo_path, build_path):
group_counters, group_test_times, log_paths = self.try_run_test_group(
repo_path, group, tests, MAX_RETRY, NUM_WORKERS
)
+
+ for fail_status in ("ERROR", "FAILED"):
+ for failed_test in group_counters[fail_status]:
+ if failed_test in known_broken_tests.keys():
+ fail_message = known_broken_tests[failed_test].get("message")
+ if not fail_message:
+ mark_as_broken = True
+ else:
+ mark_as_broken = False
+ for log_path in log_paths:
+ if log_path.endswith(".log"):
+ with open(log_path) as log_file:
+ if fail_message in log_file.read():
+ mark_as_broken = True
+ break
+
+ if mark_as_broken:
+ group_counters[fail_status].remove(failed_test)
+ group_counters["BROKEN"].append(failed_test)
+
total_tests = 0
for counter, value in group_counters.items():
logging.info(
diff --git a/tests/integration/compose/docker_compose_hdfs.yml b/tests/integration/compose/docker_compose_hdfs.yml
index 1cae54ad9e1a..40a10df01f7c 100644
--- a/tests/integration/compose/docker_compose_hdfs.yml
+++ b/tests/integration/compose/docker_compose_hdfs.yml
@@ -1,7 +1,7 @@
version: '2.3'
services:
hdfs1:
- image: sequenceiq/hadoop-docker:2.7.0
+ image: prasanthj/docker-hadoop:2.6.0
hostname: hdfs1
restart: always
expose: