Skip to content

Commit dae9645

Browse files
Rachel ChenRachel Chen
authored andcommitted
new version
1 parent b3570ff commit dae9645

File tree

8 files changed

+17
-17
lines changed

8 files changed

+17
-17
lines changed

requirements-base.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ rfc3986-validator>=0.1.1
6868
sentry-arroyo>=2.19.9
6969
sentry-kafka-schemas>=0.1.128
7070
sentry-ophio==1.0.0
71-
sentry-protos>=0.1.44
71+
sentry-protos>=0.1.45
7272
sentry-redis-tools>=0.1.7
7373
sentry-relay>=0.9.4
7474
sentry-sdk[http2]>=2.19.2

requirements-dev-frozen.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -189,7 +189,7 @@ sentry-forked-djangorestframework-stubs==3.15.2.post1
189189
sentry-forked-email-reply-parser==0.5.12.post1
190190
sentry-kafka-schemas==0.1.128
191191
sentry-ophio==1.0.0
192-
sentry-protos==0.1.44
192+
sentry-protos==0.1.45
193193
sentry-redis-tools==0.1.7
194194
sentry-relay==0.9.4
195195
sentry-sdk==2.19.2

requirements-frozen.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -128,7 +128,7 @@ sentry-arroyo==2.19.9
128128
sentry-forked-email-reply-parser==0.5.12.post1
129129
sentry-kafka-schemas==0.1.128
130130
sentry-ophio==1.0.0
131-
sentry-protos==0.1.44
131+
sentry-protos==0.1.45
132132
sentry-redis-tools==0.1.7
133133
sentry-relay==0.9.4
134134
sentry-sdk==2.19.2

src/sentry/api/endpoints/organization_spans_fields.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -117,7 +117,7 @@ def get(self, request: Request, organization: Organization) -> Response:
117117
limit=max_span_tags,
118118
offset=0,
119119
type=(
120-
AttributeKey.Type.TYPE_FLOAT
120+
AttributeKey.Type.TYPE_DOUBLE
121121
if serialized["type"] == "number"
122122
else AttributeKey.Type.TYPE_STRING
123123
),

src/sentry/search/eap/columns.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -136,7 +136,7 @@ def proto_type(self) -> AttributeKey.Type.ValueType:
136136
137137
see: https://www.notion.so/sentry/Should-count-return-an-int-in-the-v1-RPC-API-1348b10e4b5d80498bfdead194cc304e
138138
"""
139-
return constants.FLOAT
139+
return constants.DOUBLE
140140

141141

142142
def simple_sentry_field(field) -> ResolvedColumn:

src/sentry/search/eap/spans.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -332,7 +332,7 @@ def _resolve_search_value(
332332
)
333333
elif isinstance(value, (float, int)):
334334
return AttributeValue(val_int=int(value))
335-
elif column_type == constants.FLOAT:
335+
elif column_type == constants.DOUBLE:
336336
if operator in constants.IN_OPERATORS:
337337
if isinstance(value, list):
338338
return AttributeValue(

src/sentry/sentry_metrics/querying/eap/mql_eap_bridge.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,7 @@ def make_eap_request(
9191
filter=rpc_filters,
9292
granularity_secs=interval,
9393
key=AttributeKey(
94-
name=ts.metric.mri.split("/")[1].split("@")[0], type=AttributeKey.TYPE_FLOAT
94+
name=ts.metric.mri.split("/")[1].split("@")[0], type=AttributeKey.TYPE_DOUBLE
9595
),
9696
)
9797
aggregate_resp = snuba_rpc.rpc(aggregate_req, AggregateBucketResponse)

tests/sentry/search/eap/test_spans.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ def test_numeric_query(self):
5151
query, _ = self.resolver.resolve_query("ai.total_tokens.used:123")
5252
assert query == TraceItemFilter(
5353
comparison_filter=ComparisonFilter(
54-
key=AttributeKey(name="ai_total_tokens_used", type=AttributeKey.Type.TYPE_FLOAT),
54+
key=AttributeKey(name="ai_total_tokens_used", type=AttributeKey.Type.TYPE_DOUBLE),
5555
op=ComparisonFilter.OP_EQUALS,
5656
value=AttributeValue(val_double=123),
5757
)
@@ -95,7 +95,7 @@ def test_in_numeric_filter(self):
9595
query, _ = self.resolver.resolve_query("ai.total_tokens.used:[123,456,789]")
9696
assert query == TraceItemFilter(
9797
comparison_filter=ComparisonFilter(
98-
key=AttributeKey(name="ai_total_tokens_used", type=AttributeKey.Type.TYPE_FLOAT),
98+
key=AttributeKey(name="ai_total_tokens_used", type=AttributeKey.Type.TYPE_DOUBLE),
9999
op=ComparisonFilter.OP_IN,
100100
value=AttributeValue(val_double_array=DoubleArray(values=[123, 456, 789])),
101101
)
@@ -105,7 +105,7 @@ def test_greater_than_numeric_filter(self):
105105
query, _ = self.resolver.resolve_query("ai.total_tokens.used:>123")
106106
assert query == TraceItemFilter(
107107
comparison_filter=ComparisonFilter(
108-
key=AttributeKey(name="ai_total_tokens_used", type=AttributeKey.Type.TYPE_FLOAT),
108+
key=AttributeKey(name="ai_total_tokens_used", type=AttributeKey.Type.TYPE_DOUBLE),
109109
op=ComparisonFilter.OP_GREATER_THAN,
110110
value=AttributeValue(val_double=123),
111111
)
@@ -281,15 +281,15 @@ def test_simple_string_tag(self):
281281
def test_simple_number_tag(self):
282282
resolved_column, virtual_context = self.resolver.resolve_column("tags[foo, number]")
283283
assert resolved_column.proto_definition == AttributeKey(
284-
name="foo", type=AttributeKey.Type.TYPE_FLOAT
284+
name="foo", type=AttributeKey.Type.TYPE_DOUBLE
285285
)
286286
assert virtual_context is None
287287

288288
def test_sum_function(self):
289289
resolved_column, virtual_context = self.resolver.resolve_column("sum(span.self_time)")
290290
assert resolved_column.proto_definition == AttributeAggregation(
291291
aggregate=Function.FUNCTION_SUM,
292-
key=AttributeKey(name="sentry.exclusive_time_ms", type=AttributeKey.Type.TYPE_FLOAT),
292+
key=AttributeKey(name="sentry.exclusive_time_ms", type=AttributeKey.Type.TYPE_DOUBLE),
293293
label="sum(span.self_time)",
294294
extrapolation_mode=ExtrapolationMode.EXTRAPOLATION_MODE_SAMPLE_WEIGHTED,
295295
)
@@ -299,7 +299,7 @@ def test_sum_default_argument(self):
299299
resolved_column, virtual_context = self.resolver.resolve_column("sum()")
300300
assert resolved_column.proto_definition == AttributeAggregation(
301301
aggregate=Function.FUNCTION_SUM,
302-
key=AttributeKey(name="sentry.duration_ms", type=AttributeKey.Type.TYPE_FLOAT),
302+
key=AttributeKey(name="sentry.duration_ms", type=AttributeKey.Type.TYPE_DOUBLE),
303303
label="sum()",
304304
extrapolation_mode=ExtrapolationMode.EXTRAPOLATION_MODE_SAMPLE_WEIGHTED,
305305
)
@@ -309,7 +309,7 @@ def test_function_alias(self):
309309
resolved_column, virtual_context = self.resolver.resolve_column("sum() as test")
310310
assert resolved_column.proto_definition == AttributeAggregation(
311311
aggregate=Function.FUNCTION_SUM,
312-
key=AttributeKey(name="sentry.duration_ms", type=AttributeKey.Type.TYPE_FLOAT),
312+
key=AttributeKey(name="sentry.duration_ms", type=AttributeKey.Type.TYPE_DOUBLE),
313313
label="test",
314314
extrapolation_mode=ExtrapolationMode.EXTRAPOLATION_MODE_SAMPLE_WEIGHTED,
315315
)
@@ -319,15 +319,15 @@ def test_count(self):
319319
resolved_column, virtual_context = self.resolver.resolve_column("count()")
320320
assert resolved_column.proto_definition == AttributeAggregation(
321321
aggregate=Function.FUNCTION_COUNT,
322-
key=AttributeKey(name="sentry.duration_ms", type=AttributeKey.Type.TYPE_FLOAT),
322+
key=AttributeKey(name="sentry.duration_ms", type=AttributeKey.Type.TYPE_DOUBLE),
323323
label="count()",
324324
extrapolation_mode=ExtrapolationMode.EXTRAPOLATION_MODE_SAMPLE_WEIGHTED,
325325
)
326326
assert virtual_context is None
327327
resolved_column, virtual_context = self.resolver.resolve_column("count(span.duration)")
328328
assert resolved_column.proto_definition == AttributeAggregation(
329329
aggregate=Function.FUNCTION_COUNT,
330-
key=AttributeKey(name="sentry.duration_ms", type=AttributeKey.Type.TYPE_FLOAT),
330+
key=AttributeKey(name="sentry.duration_ms", type=AttributeKey.Type.TYPE_DOUBLE),
331331
label="count(span.duration)",
332332
extrapolation_mode=ExtrapolationMode.EXTRAPOLATION_MODE_SAMPLE_WEIGHTED,
333333
)
@@ -337,7 +337,7 @@ def test_p50(self):
337337
resolved_column, virtual_context = self.resolver.resolve_column("p50()")
338338
assert resolved_column.proto_definition == AttributeAggregation(
339339
aggregate=Function.FUNCTION_P50,
340-
key=AttributeKey(name="sentry.duration_ms", type=AttributeKey.Type.TYPE_FLOAT),
340+
key=AttributeKey(name="sentry.duration_ms", type=AttributeKey.Type.TYPE_DOUBLE),
341341
label="p50()",
342342
extrapolation_mode=ExtrapolationMode.EXTRAPOLATION_MODE_SAMPLE_WEIGHTED,
343343
)

0 commit comments

Comments
 (0)