Skip to content
This repository was archived by the owner on Jun 13, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
84 changes: 19 additions & 65 deletions graphql_api/tests/test_test_analytics.py
Original file line number Diff line number Diff line change
Expand Up @@ -598,64 +598,36 @@ def test_desc_failure_rate_ordering_on_test_results_with_after(self) -> None:
pass_count=2,
fail_count=3,
)
res = self.fetch_test_analytics(
repo.name,
"""testResults(ordering: { parameter: FAILURE_RATE, direction: DESC }, first: 1) { edges { node { name failureRate } }, pageInfo { hasNextPage, hasPreviousPage, startCursor, endCursor }, totalCount }""",
)

assert res["testResults"] == {
"edges": [
{"node": {"name": test_2.name, "failureRate": 0.6}},
],
"pageInfo": {
"endCursor": base64_encode_string(f"0.6|{test_2.name}"),
"hasNextPage": True,
"hasPreviousPage": False,
"startCursor": base64_encode_string(f"0.6|{test_2.name}"),
},
"totalCount": 2,
}

res = self.fetch_test_analytics(
repo.name,
"""testResults(ordering: { parameter: FAILURE_RATE, direction: DESC }, first: 1, after: "%s") { edges { node { name failureRate } }, pageInfo { hasNextPage, hasPreviousPage, startCursor, endCursor }, totalCount }"""
% res["testResults"]["pageInfo"]["endCursor"],
test_3 = TestFactory(repository=repo)
_ = DailyTestRollupFactory(
test=test_3,
date=datetime.date.today(),
repoid=repo.repoid,
pass_count=1,
fail_count=4,
)

assert res["testResults"] == {
"edges": [
{"node": {"name": test.name, "failureRate": 0.2}},
],
"pageInfo": {
"endCursor": base64_encode_string(f"0.2|{test.name}"),
"hasNextPage": False,
"hasPreviousPage": False,
"startCursor": base64_encode_string(f"0.2|{test.name}"),
},
"totalCount": 2,
}

res = self.fetch_test_analytics(
repo.name,
"""testResults(ordering: { parameter: FAILURE_RATE, direction: ASC }, first: 1) { edges { node { name failureRate } }, pageInfo { hasNextPage, hasPreviousPage, startCursor, endCursor }, totalCount }""",
"""testResults(ordering: { parameter: FAILURE_RATE, direction: DESC }, first: 1) { edges { node { name failureRate } }, pageInfo { hasNextPage, hasPreviousPage, startCursor, endCursor }, totalCount }""",
)

assert res["testResults"] == {
"edges": [
{"node": {"name": test.name, "failureRate": 0.2}},
{"node": {"name": test_3.name, "failureRate": 0.8}},
],
"pageInfo": {
"endCursor": base64_encode_string(f"0.2|{test.name}"),
"endCursor": base64_encode_string(f"0.8|{test_3.name}"),
"hasNextPage": True,
"hasPreviousPage": False,
"startCursor": base64_encode_string(f"0.2|{test.name}"),
"startCursor": base64_encode_string(f"0.8|{test_3.name}"),
},
"totalCount": 2,
"totalCount": 3,
}

res = self.fetch_test_analytics(
repo.name,
"""testResults(ordering: { parameter: FAILURE_RATE, direction: ASC }, first: 1, after: "%s") { edges { node { name failureRate } }, pageInfo { hasNextPage, hasPreviousPage, startCursor, endCursor }, totalCount }"""
"""testResults(ordering: { parameter: FAILURE_RATE, direction: DESC }, first: 1, after: "%s") { edges { node { name failureRate } }, pageInfo { hasNextPage, hasPreviousPage, startCursor, endCursor }, totalCount }"""
% res["testResults"]["pageInfo"]["endCursor"],
)

Expand All @@ -665,48 +637,30 @@ def test_desc_failure_rate_ordering_on_test_results_with_after(self) -> None:
],
"pageInfo": {
"endCursor": base64_encode_string(f"0.6|{test_2.name}"),
"hasNextPage": False,
"hasNextPage": True,
"hasPreviousPage": False,
"startCursor": base64_encode_string(f"0.6|{test_2.name}"),
},
"totalCount": 2,
"totalCount": 3,
}

res = self.fetch_test_analytics(
repo.name,
"""testResults(ordering: { parameter: FAILURE_RATE, direction: ASC }, last: 2) { edges { node { name failureRate } }, pageInfo { hasNextPage, hasPreviousPage, startCursor, endCursor }, totalCount }""",
"""testResults(ordering: { parameter: FAILURE_RATE, direction: DESC }, first: 1, after: "%s") { edges { node { name failureRate } }, pageInfo { hasNextPage, hasPreviousPage, startCursor, endCursor }, totalCount }"""
% res["testResults"]["pageInfo"]["endCursor"],
)

assert res["testResults"] == {
"edges": [
{"node": {"name": test_2.name, "failureRate": 0.6}},
{"node": {"name": test.name, "failureRate": 0.2}},
],
"pageInfo": {
"endCursor": base64_encode_string(f"0.2|{test.name}"),
"hasNextPage": False,
"hasPreviousPage": False,
"startCursor": base64_encode_string(f"0.6|{test_2.name}"),
},
"totalCount": 2,
}

res = self.fetch_test_analytics(
repo.name,
"""testResults(ordering: { parameter: FAILURE_RATE, direction: ASC }, last: 1) { edges { node { name failureRate } }, pageInfo { hasNextPage, hasPreviousPage, startCursor, endCursor }, totalCount }""",
)

assert res["testResults"] == {
"edges": [
{"node": {"name": test_2.name, "failureRate": 0.6}},
],
"pageInfo": {
"endCursor": base64_encode_string(f"0.6|{test_2.name}"),
"hasNextPage": False,
"hasPreviousPage": True,
"startCursor": base64_encode_string(f"0.6|{test_2.name}"),
"startCursor": base64_encode_string(f"0.2|{test.name}"),
},
"totalCount": 2,
"totalCount": 3,
}

def test_flake_rate_filtering_on_test_results(self) -> None:
Expand Down
17 changes: 14 additions & 3 deletions utils/test_results.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,6 +242,7 @@ def search_base_query(
rows: list[TestResultsRow],
ordering: str,
cursor: CursorValue | None,
descending: bool = False,
) -> list[TestResultsRow]:
"""
The reason we have to do this filtering in the application logic is because we need to get the total count of rows that
Expand Down Expand Up @@ -270,7 +271,12 @@ def compare(row: TestResultsRow) -> int:
row_value = getattr(row, ordering)
row_value_str = str(row_value)
cursor_value_str = cursor.ordered_value
return (row_value_str > cursor_value_str) - (row_value_str < cursor_value_str)
row_is_greater = row_value_str > cursor_value_str
row_is_less = row_value_str < cursor_value_str
if descending:
return row_is_less - row_is_greater
else:
return row_is_greater - row_is_less

left, right = 0, len(rows) - 1
while left <= right:
Expand Down Expand Up @@ -442,8 +448,13 @@ def generate_test_results(
page_size: int = first or last or 20

cursor_value = decode_cursor(after) if after else decode_cursor(before)

search_rows = search_base_query(rows, ordering, cursor_value)
descending = ordering_direction == "DESC"
search_rows = search_base_query(
rows,
ordering,
cursor_value,
descending=descending,
)

page: list[dict[str, str | TestResultsRow]] = [
{"cursor": encode_cursor(row, ordering), "node": row}
Expand Down
10 changes: 10 additions & 0 deletions utils/tests/unit/test_search_base_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,3 +51,13 @@ def test_search_base_query_with_missing_cursor_low_name_high_failure_rate():
cursor = CursorValue(name="0", ordered_value="0.15")
res = search_base_query(rows, "failure_rate", cursor)
assert res == rows[-1:]


def test_search_base_query_descending():
# [(2, "0.2"), (1, "0.1"), (0, "0.0")]
# ^
# here's where the cursor is pointing at
rows = [row_factory(str(i), float(i) * 0.1) for i in range(2, -1, -1)]
cursor = CursorValue(name="0", ordered_value="0.15")
res = search_base_query(rows, "failure_rate", cursor, descending=True)
assert res == rows[1:]
Loading