Skip to content
This repository was archived by the owner on Jun 13, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions graphql_api/tests/test_test_analytics.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
get_results,
)
from services.redis_configuration import get_redis_connection
from utils.test_results import dedup_table

from .helper import GraphQLTestHelper

Expand Down Expand Up @@ -218,7 +219,8 @@ def test_get_test_results(
):
results = get_results(repository.repoid, repository.branch, 30)
assert results is not None
assert results.equals(test_results_table)

assert results.equals(dedup_table(test_results_table))

def test_get_test_results_no_storage(
self, transactional_db, repository, mock_storage
Expand All @@ -231,7 +233,7 @@ def test_get_test_results_no_redis(
m = mocker.patch("services.task.TaskService.cache_test_results_redis")
results = get_results(repository.repoid, repository.branch, 30)
assert results is not None
assert results.equals(test_results_table)
assert results.equals(dedup_table(test_results_table))

m.assert_called_once_with(repository.repoid, repository.branch)

Expand Down
31 changes: 0 additions & 31 deletions graphql_api/types/test_analytics/test_analytics.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,37 +215,6 @@ def generate_test_results(
},
)

failure_rate_expr = (
pl.col("failure_rate")
* (pl.col("total_fail_count") + pl.col("total_pass_count"))
).sum() / (pl.col("total_fail_count") + pl.col("total_pass_count")).sum()

flake_rate_expr = (
pl.col("flake_rate") * (pl.col("total_fail_count") + pl.col("total_pass_count"))
).sum() / (pl.col("total_fail_count") + pl.col("total_pass_count")).sum()

avg_duration_expr = (
pl.col("avg_duration")
* (pl.col("total_pass_count") + pl.col("total_fail_count"))
).sum() / (pl.col("total_pass_count") + pl.col("total_fail_count")).sum()

# dedup
table = table.group_by("name").agg(
pl.col("test_id").first().alias("test_id"),
pl.col("testsuite").alias("testsuite"),
pl.col("flags").explode().unique().alias("flags"),
failure_rate_expr.alias("failure_rate"),
flake_rate_expr.alias("flake_rate"),
pl.col("updated_at").max().alias("updated_at"),
avg_duration_expr.alias("avg_duration"),
pl.col("total_fail_count").sum().alias("total_fail_count"),
pl.col("total_flaky_fail_count").sum().alias("total_flaky_fail_count"),
pl.col("total_pass_count").sum().alias("total_pass_count"),
pl.col("total_skip_count").sum().alias("total_skip_count"),
pl.col("commits_where_fail").sum().alias("commits_where_fail"),
pl.col("last_duration").max().alias("last_duration"),
)

if term:
table = table.filter(pl.col("name").str.contains(term))

Expand Down
41 changes: 41 additions & 0 deletions utils/test_results.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,45 @@ def storage_key(
return key


def dedup_table(table: pl.DataFrame) -> pl.DataFrame:
failure_rate_expr = (
pl.col("failure_rate")
* (pl.col("total_fail_count") + pl.col("total_pass_count"))
).sum() / (pl.col("total_fail_count") + pl.col("total_pass_count")).sum()

flake_rate_expr = (
pl.col("flake_rate") * (pl.col("total_fail_count") + pl.col("total_pass_count"))
).sum() / (pl.col("total_fail_count") + pl.col("total_pass_count")).sum()

avg_duration_expr = (
pl.col("avg_duration")
* (pl.col("total_pass_count") + pl.col("total_fail_count"))
).sum() / (pl.col("total_pass_count") + pl.col("total_fail_count")).sum()

# dedup
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

very helpful comment ;)

table = (
table.group_by("name")
.agg(
pl.col("test_id").first().alias("test_id"),
pl.col("testsuite").alias("testsuite"),
pl.col("flags").explode().unique().alias("flags"),
failure_rate_expr.fill_nan(0).alias("failure_rate"),
flake_rate_expr.fill_nan(0).alias("flake_rate"),
pl.col("updated_at").max().alias("updated_at"),
avg_duration_expr.fill_nan(0).alias("avg_duration"),
pl.col("total_fail_count").sum().alias("total_fail_count"),
pl.col("total_flaky_fail_count").sum().alias("total_flaky_fail_count"),
pl.col("total_pass_count").sum().alias("total_pass_count"),
pl.col("total_skip_count").sum().alias("total_skip_count"),
pl.col("commits_where_fail").sum().alias("commits_where_fail"),
pl.col("last_duration").max().alias("last_duration"),
)
.sort("name")
)

return table


def get_results(
repoid: int,
branch: str,
Expand Down Expand Up @@ -73,4 +112,6 @@ def get_results(
if table.height == 0:
return None

table = dedup_table(table)

return table
Loading