Skip to content

Commit 6d541bf

Browse files
authored
ref(discover): Move to using json for range functions (#21334)
1 parent 99edf04 commit 6d541bf

File tree

3 files changed

+284
-31
lines changed

3 files changed

+284
-31
lines changed

src/sentry/api/event_search.py

Lines changed: 78 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1360,7 +1360,7 @@ def normalize(self, value):
13601360
raise InvalidFunctionArgument(
13611361
u"{} is in the wrong format, expected a date like 2020-03-14T15:14:15".format(value)
13621362
)
1363-
return value
1363+
return u"'{}'".format(value)
13641364

13651365

13661366
class NumericColumn(FunctionArg):
@@ -1810,30 +1810,67 @@ def validate_argument_count(self, field, arguments):
18101810
Function(
18111811
"percentile_range",
18121812
required_args=[
1813-
DurationColumn("column"),
1813+
DurationColumnNoLookup("column"),
18141814
NumberRange("percentile", 0, 1),
18151815
DateArg("start"),
18161816
DateArg("end"),
18171817
NumberRange("index", 1, None),
18181818
],
18191819
aggregate=[
1820-
u"quantileIf({percentile:.2f})({column},and(greaterOrEquals(timestamp,toDateTime('{start}')),less(timestamp,toDateTime('{end}'))))",
1821-
None,
1820+
u"quantileIf({percentile:.2f})",
1821+
[
1822+
ArgValue("column"),
1823+
[
1824+
"and",
1825+
[
1826+
# NOTE: These conditions are written in this seemingly backwards way
1827+
# because of how snuba special cases the following syntax
1828+
# ["a", ["b", ["c", ["d"]]]
1829+
#
1830+
# This array is can be interpreted 2 ways
1831+
# 1. a(b(c(d))) the way snuba interprets it
1832+
# - snuba special cases it when it detects an array where the first
1833+
# element is a literal, and the second element is an array and
1834+
# treats it as a function call rather than 2 separate arguments
1835+
# 2. a(b, c(d)) the way we want it to be interpreted
1836+
#
1837+
# Because of how snuba interprets this expression, it makes it impossible
1838+
# to specify a function with 2 arguments whose first argument is a literal
1839+
# and the second argument is an expression.
1840+
#
1841+
# Working with this limitation, we have to invert the conditions in
1842+
# order to express a function whose first argument is an expression while
1843+
# the second argument is a literal.
1844+
["lessOrEquals", [["toDateTime", [ArgValue("start")]], "timestamp"]],
1845+
["greater", [["toDateTime", [ArgValue("end")]], "timestamp"]],
1846+
],
1847+
],
1848+
],
18221849
"percentile_range_{index:g}",
18231850
],
18241851
result_type="duration",
18251852
),
18261853
Function(
18271854
"avg_range",
18281855
required_args=[
1829-
DurationColumn("column"),
1856+
DurationColumnNoLookup("column"),
18301857
DateArg("start"),
18311858
DateArg("end"),
18321859
NumberRange("index", 1, None),
18331860
],
18341861
aggregate=[
1835-
u"avgIf({column},and(greaterOrEquals(timestamp,toDateTime('{start}')),less(timestamp,toDateTime('{end}'))))",
1836-
None,
1862+
u"avgIf",
1863+
[
1864+
ArgValue("column"),
1865+
[
1866+
"and",
1867+
[
1868+
# see `percentile_range` for why the conditions are backwards
1869+
["lessOrEquals", [["toDateTime", [ArgValue("start")]], "timestamp"]],
1870+
["greater", [["toDateTime", [ArgValue("end")]], "timestamp"]],
1871+
],
1872+
],
1873+
],
18371874
"avg_range_{index:g}",
18381875
],
18391876
result_type="duration",
@@ -1848,8 +1885,29 @@ def validate_argument_count(self, field, arguments):
18481885
],
18491886
calculated_args=[{"name": "tolerated", "fn": lambda args: args["satisfaction"] * 4.0}],
18501887
aggregate=[
1851-
u"uniqIf(user,and(greater(duration,{tolerated:g}),and(greaterOrEquals(timestamp,toDateTime('{start}')),less(timestamp,toDateTime('{end}')))))",
1852-
None,
1888+
u"uniqIf",
1889+
[
1890+
"user",
1891+
[
1892+
"and",
1893+
[
1894+
# Currently, the column resolution on aggregates doesn't recurse, so we use
1895+
# `duration` (snuba name) rather than `transaction.duration` (sentry name).
1896+
["greater", ["duration", ArgValue("tolerated")]],
1897+
[
1898+
"and",
1899+
[
1900+
# see `percentile_range` for why the conditions are backwards
1901+
[
1902+
"lessOrEquals",
1903+
[["toDateTime", [ArgValue("start")]], "timestamp"],
1904+
],
1905+
["greater", [["toDateTime", [ArgValue("end")]], "timestamp"]],
1906+
],
1907+
],
1908+
],
1909+
],
1910+
],
18531911
"user_misery_range_{index:g}",
18541912
],
18551913
result_type="duration",
@@ -1858,8 +1916,17 @@ def validate_argument_count(self, field, arguments):
18581916
"count_range",
18591917
required_args=[DateArg("start"), DateArg("end"), NumberRange("index", 1, None)],
18601918
aggregate=[
1861-
u"countIf(and(greaterOrEquals(timestamp,toDateTime('{start}')),less(timestamp,toDateTime('{end}'))))",
1862-
None,
1919+
u"countIf",
1920+
[
1921+
[
1922+
"and",
1923+
[
1924+
# see `percentile_range` for why the conditions are backwards
1925+
["lessOrEquals", [["toDateTime", [ArgValue("start")]], "timestamp"]],
1926+
["greater", [["toDateTime", [ArgValue("end")]], "timestamp"]],
1927+
],
1928+
],
1929+
],
18631930
"count_range_{index:g}",
18641931
],
18651932
result_type="integer",

tests/sentry/api/test_event_search.py

Lines changed: 143 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -2391,8 +2391,20 @@ def test_percentile_range(self):
23912391
result = resolve_field_list(fields, eventstore.Filter())
23922392
assert result["aggregations"] == [
23932393
[
2394-
u"quantileIf(0.50)(duration,and(greaterOrEquals(timestamp,toDateTime('2020-05-01T01:12:34')),less(timestamp,toDateTime('2020-05-03T06:48:57'))))",
2395-
None,
2394+
"quantileIf(0.50)",
2395+
[
2396+
"transaction.duration",
2397+
[
2398+
"and",
2399+
[
2400+
[
2401+
"lessOrEquals",
2402+
[["toDateTime", ["'2020-05-01T01:12:34'"]], "timestamp"],
2403+
],
2404+
["greater", [["toDateTime", ["'2020-05-03T06:48:57'"]], "timestamp"]],
2405+
],
2406+
],
2407+
],
23962408
"percentile_range_1",
23972409
]
23982410
]
@@ -2414,8 +2426,20 @@ def test_average_range(self):
24142426
result = resolve_field_list(fields, eventstore.Filter())
24152427
assert result["aggregations"] == [
24162428
[
2417-
u"avgIf(duration,and(greaterOrEquals(timestamp,toDateTime('2020-05-01T01:12:34')),less(timestamp,toDateTime('2020-05-03T06:48:57'))))",
2418-
None,
2429+
"avgIf",
2430+
[
2431+
"transaction.duration",
2432+
[
2433+
"and",
2434+
[
2435+
[
2436+
"lessOrEquals",
2437+
[["toDateTime", ["'2020-05-01T01:12:34'"]], "timestamp"],
2438+
],
2439+
["greater", [["toDateTime", ["'2020-05-03T06:48:57'"]], "timestamp"]],
2440+
],
2441+
],
2442+
],
24192443
"avg_range_1",
24202444
]
24212445
]
@@ -2435,8 +2459,29 @@ def test_user_misery_range(self):
24352459
result = resolve_field_list(fields, eventstore.Filter())
24362460
assert result["aggregations"] == [
24372461
[
2438-
"uniqIf(user,and(greater(duration,1200),and(greaterOrEquals(timestamp,toDateTime('2020-05-01T01:12:34')),less(timestamp,toDateTime('2020-05-03T06:48:57')))))",
2439-
None,
2462+
"uniqIf",
2463+
[
2464+
"user",
2465+
[
2466+
"and",
2467+
[
2468+
["greater", ["duration", 1200]],
2469+
[
2470+
"and",
2471+
[
2472+
[
2473+
"lessOrEquals",
2474+
[["toDateTime", ["'2020-05-01T01:12:34'"]], "timestamp"],
2475+
],
2476+
[
2477+
"greater",
2478+
[["toDateTime", ["'2020-05-03T06:48:57'"]], "timestamp"],
2479+
],
2480+
],
2481+
],
2482+
],
2483+
],
2484+
],
24402485
u"user_misery_range_1",
24412486
]
24422487
]
@@ -2471,13 +2516,55 @@ def test_percentage(self):
24712516
result = resolve_field_list(fields, eventstore.Filter())
24722517
assert result["aggregations"] == [
24732518
[
2474-
"uniqIf(user,and(greater(duration,1200),and(greaterOrEquals(timestamp,toDateTime('2020-05-01T01:12:34')),less(timestamp,toDateTime('2020-05-03T06:48:57')))))",
2475-
None,
2519+
"uniqIf",
2520+
[
2521+
"user",
2522+
[
2523+
"and",
2524+
[
2525+
["greater", ["duration", 1200]],
2526+
[
2527+
"and",
2528+
[
2529+
[
2530+
"lessOrEquals",
2531+
[["toDateTime", ["'2020-05-01T01:12:34'"]], "timestamp"],
2532+
],
2533+
[
2534+
"greater",
2535+
[["toDateTime", ["'2020-05-03T06:48:57'"]], "timestamp"],
2536+
],
2537+
],
2538+
],
2539+
],
2540+
],
2541+
],
24762542
"user_misery_range_1",
24772543
],
24782544
[
2479-
"uniqIf(user,and(greater(duration,1200),and(greaterOrEquals(timestamp,toDateTime('2020-05-03T06:48:57')),less(timestamp,toDateTime('2020-05-05T01:12:34')))))",
2480-
None,
2545+
"uniqIf",
2546+
[
2547+
"user",
2548+
[
2549+
"and",
2550+
[
2551+
["greater", ["duration", 1200]],
2552+
[
2553+
"and",
2554+
[
2555+
[
2556+
"lessOrEquals",
2557+
[["toDateTime", ["'2020-05-03T06:48:57'"]], "timestamp"],
2558+
],
2559+
[
2560+
"greater",
2561+
[["toDateTime", ["'2020-05-05T01:12:34'"]], "timestamp"],
2562+
],
2563+
],
2564+
],
2565+
],
2566+
],
2567+
],
24812568
"user_misery_range_2",
24822569
],
24832570
[
@@ -2496,13 +2583,55 @@ def test_minus(self):
24962583
result = resolve_field_list(fields, eventstore.Filter())
24972584
assert result["aggregations"] == [
24982585
[
2499-
"uniqIf(user,and(greater(duration,1200),and(greaterOrEquals(timestamp,toDateTime('2020-05-01T01:12:34')),less(timestamp,toDateTime('2020-05-03T06:48:57')))))",
2500-
None,
2586+
"uniqIf",
2587+
[
2588+
"user",
2589+
[
2590+
"and",
2591+
[
2592+
["greater", ["duration", 1200]],
2593+
[
2594+
"and",
2595+
[
2596+
[
2597+
"lessOrEquals",
2598+
[["toDateTime", ["'2020-05-01T01:12:34'"]], "timestamp"],
2599+
],
2600+
[
2601+
"greater",
2602+
[["toDateTime", ["'2020-05-03T06:48:57'"]], "timestamp"],
2603+
],
2604+
],
2605+
],
2606+
],
2607+
],
2608+
],
25012609
"user_misery_range_1",
25022610
],
25032611
[
2504-
"uniqIf(user,and(greater(duration,1200),and(greaterOrEquals(timestamp,toDateTime('2020-05-03T06:48:57')),less(timestamp,toDateTime('2020-05-05T01:12:34')))))",
2505-
None,
2612+
"uniqIf",
2613+
[
2614+
"user",
2615+
[
2616+
"and",
2617+
[
2618+
["greater", ["duration", 1200]],
2619+
[
2620+
"and",
2621+
[
2622+
[
2623+
"lessOrEquals",
2624+
[["toDateTime", ["'2020-05-03T06:48:57'"]], "timestamp"],
2625+
],
2626+
[
2627+
"greater",
2628+
[["toDateTime", ["'2020-05-05T01:12:34'"]], "timestamp"],
2629+
],
2630+
],
2631+
],
2632+
],
2633+
],
2634+
],
25062635
"user_misery_range_2",
25072636
],
25082637
[

0 commit comments

Comments
 (0)