Skip to content
This repository was archived by the owner on Jun 13, 2025. It is now read-only.
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
[
{
"cursor": "MC44fHRlc3Q0",
"node": {
"name": "test4",
"failureRate": 0.8,
"flakeRate": 0.0,
"avgDuration": 100.0,
"totalFailCount": 20,
"totalFlakyFailCount": 0,
"totalPassCount": 5,
"totalSkipCount": 5,
"commitsFailed": 5,
"lastDuration": 100.0
}
},
{
"cursor": "MC43NXx0ZXN0Mw==",
"node": {
"name": "test3",
"failureRate": 0.75,
"flakeRate": 0.0,
"avgDuration": 100.0,
"totalFailCount": 15,
"totalFlakyFailCount": 0,
"totalPassCount": 5,
"totalSkipCount": 5,
"commitsFailed": 5,
"lastDuration": 100.0
}
},
{
"cursor": "MC42NjY2NjY2NjY2NjY2NjY2fHRlc3Qy",
"node": {
"name": "test2",
"failureRate": 0.6666666666666666,
"flakeRate": 0.0,
"avgDuration": 100.0,
"totalFailCount": 10,
"totalFlakyFailCount": 0,
"totalPassCount": 5,
"totalSkipCount": 5,
"commitsFailed": 5,
"lastDuration": 100.0
}
},
{
"cursor": "MC41fHRlc3Qx",
"node": {
"name": "test1",
"failureRate": 0.5,
"flakeRate": 0.5,
"avgDuration": 100.0,
"totalFailCount": 5,
"totalFlakyFailCount": 5,
"totalPassCount": 5,
"totalSkipCount": 5,
"commitsFailed": 5,
"lastDuration": 100.0
}
},
{
"cursor": "MC4wfHRlc3Qw",
"node": {
"name": "test0",
"failureRate": 0.0,
"flakeRate": 0.0,
"avgDuration": 100.0,
"totalFailCount": 0,
"totalFlakyFailCount": 0,
"totalPassCount": 5,
"totalSkipCount": 5,
"commitsFailed": 5,
"lastDuration": 100.0
}
}
]
65 changes: 65 additions & 0 deletions graphql_api/tests/test_test_analytics.py
Original file line number Diff line number Diff line change
Expand Up @@ -808,3 +808,68 @@ def test_gql_query_with_new_ta(self, mocker, repository, snapshot):
settings.GCS_BUCKET_NAME,
f"test_analytics/branch_rollups/{repository.repoid}/{repository.branch}.arrow",
)

def test_gql_query_with_new_ta_all_branches(self, mocker, repository, snapshot):
# set the feature flag
mocker.patch("rollouts.READ_NEW_TA.check_value", return_value=True)

# read file from samples
storage = get_appropriate_storage_service()
try:
storage.create_root_storage(settings.GCS_BUCKET_NAME)
except BucketAlreadyExistsError:
pass
storage.write_file(
settings.GCS_BUCKET_NAME,
f"test_analytics/repo_rollups/{repository.repoid}.arrow",
test_results_table_no_version.write_ipc(None).getvalue(),
)

# run the GQL query
query = base_gql_query % (
repository.author.username,
repository.name,
"""
testResults(filters: { branch: "All branches" }, ordering: { parameter: FAILURE_RATE, direction: DESC } ) {
totalCount
edges {
cursor
node {
name
failureRate
flakeRate
updatedAt
avgDuration
totalFailCount
totalFlakyFailCount
totalPassCount
totalSkipCount
commitsFailed
lastDuration
}
}
}
""",
)

result = self.gql_request(query, owner=repository.author)

# take a snapshot of the results
assert (
result["owner"]["repository"]["testAnalytics"]["testResults"]["totalCount"]
== 5
)
assert snapshot("json") == [
{
**edge,
"node": {k: v for k, v in edge["node"].items() if k != "updatedAt"},
}
for edge in result["owner"]["repository"]["testAnalytics"]["testResults"][
"edges"
]
]

storage.delete_file(
settings.GCS_BUCKET_NAME,
f"test_analytics/repo_rollups/{repository.repoid}.arrow",
)
3 changes: 1 addition & 2 deletions graphql_api/types/test_analytics/test_analytics.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,8 +198,7 @@ def generate_test_results(

:param repoid: repoid of the repository we want to calculate aggregates for
:param branch: optional name of the branch we want to filter on, if this is provided the aggregates calculated will only take into account
test instances generated on that branch. By default branches will not be filtered and test instances on all branches wil be taken into
account.
test instances generated on that branch.
:param interval: timedelta for filtering test instances used to calculate the aggregates by time, the test instances used will be
those with a created at larger than now - interval.
:param testsuites: optional list of testsuite names to filter by, this is done via a union
Expand Down
8 changes: 5 additions & 3 deletions utils/test_results.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@
from rollouts import READ_NEW_TA
from services.task import TaskService

ALL_BRANCHES = "All branches"

get_results_summary = Summary(
"test_results_get_results", "Time it takes to download results from GCS", ["impl"]
)
Expand Down Expand Up @@ -144,10 +146,10 @@ def old_get_results(
return table


def rollup_blob_path(repoid: int, branch: str | None = None) -> str:
def rollup_blob_path(repoid: int, branch: str) -> str:
return (
f"test_analytics/branch_rollups/{repoid}/{branch}.arrow"
if branch
if branch != ALL_BRANCHES
else f"test_analytics/repo_rollups/{repoid}.arrow"
)

Expand Down Expand Up @@ -223,7 +225,7 @@ def v1_agg_table(table: pl.LazyFrame) -> pl.LazyFrame:

def new_get_results(
repoid: int,
branch: str | None,
branch: str,
interval_start: int,
interval_end: int | None = None,
) -> pl.DataFrame | None:
Expand Down
Loading