Skip to content

Commit 1694844

Browse files
committed
Revert to transaction based storage of attrs.
1 parent b38c89b commit 1694844

File tree

7 files changed

+35
-40
lines changed

7 files changed

+35
-40
lines changed

newrelic/api/llm_custom_attributes.py

Lines changed: 4 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -12,13 +12,10 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
import contextvars
16-
import functools
1715
import logging
1816
from newrelic.api.transaction import current_transaction
1917

2018
_logger = logging.getLogger(__name__)
21-
custom_attr_context_var = contextvars.ContextVar("custom_attr_context_var", default={})
2219

2320

2421
class WithLlmCustomAttributes(object):
@@ -38,12 +35,10 @@ def __enter__(self):
3835
_logger.warning("WithLlmCustomAttributes must be called within the scope of a transaction.")
3936
return self
4037

41-
token = custom_attr_context_var.set(self.attr_dict)
42-
self.transaction._custom_attr_context_var = custom_attr_context_var
43-
return token
38+
self.transaction._llm_context_attrs = self.attr_dict
39+
return self
4440

4541
def __exit__(self, exc, value, tb):
42+
# Clear out context attributes once we leave the current context
4643
if self.transaction:
47-
custom_attr_context_var.set(None)
48-
self.transaction._custom_attr_context_var = custom_attr_context_var
49-
44+
self.transaction._llm_context_attrs = None

newrelic/hooks/external_botocore.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -787,10 +787,9 @@ def handle_chat_completion_event(transaction, bedrock_attrs):
787787
custom_attrs_dict = transaction._custom_params
788788
llm_metadata_dict = {key: value for key, value in custom_attrs_dict.items() if key.startswith("llm.")}
789789

790-
llm_context_attrs = getattr(transaction, "_custom_attr_context_var", None)
790+
llm_context_attrs = getattr(transaction, "_llm_context_attrs", None)
791791
if llm_context_attrs:
792-
context_attrs = llm_context_attrs.get()
793-
llm_metadata_dict.update(context_attrs)
792+
llm_metadata_dict.update(llm_context_attrs)
794793

795794
span_id = bedrock_attrs.get("span_id", None)
796795
trace_id = bedrock_attrs.get("trace_id", None)

newrelic/hooks/mlmodel_langchain.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -709,10 +709,9 @@ def _get_llm_metadata(transaction):
709709
# Grab LLM-related custom attributes off of the transaction to store as metadata on LLM events
710710
custom_attrs_dict = transaction._custom_params
711711
llm_metadata_dict = {key: value for key, value in custom_attrs_dict.items() if key.startswith("llm.")}
712-
llm_context_attrs = getattr(transaction, "_custom_attr_context_var", None)
712+
llm_context_attrs = getattr(transaction, "_llm_context_attrs", None)
713713
if llm_context_attrs:
714-
context_attrs = llm_context_attrs.get()
715-
llm_metadata_dict.update(context_attrs)
714+
llm_metadata_dict.update(llm_context_attrs)
716715

717716
return llm_metadata_dict
718717

newrelic/hooks/mlmodel_openai.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -943,10 +943,9 @@ def _get_llm_attributes(transaction):
943943
custom_attrs_dict = transaction._custom_params
944944
llm_metadata_dict = {key: value for key, value in custom_attrs_dict.items() if key.startswith("llm.")}
945945

946-
llm_context_attrs = getattr(transaction, "_custom_attr_context_var", None)
946+
llm_context_attrs = getattr(transaction, "_llm_context_attrs", None)
947947
if llm_context_attrs:
948-
context_attrs = llm_context_attrs.get()
949-
llm_metadata_dict.update(context_attrs)
948+
llm_metadata_dict.update(llm_context_attrs)
950949

951950
return llm_metadata_dict
952951

tests/agent_features/test_llm_custom_attributes.py

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
12
# Copyright 2010 New Relic, Inc.
23
#
34
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -23,9 +24,9 @@
2324
def test_llm_custom_attributes():
2425
transaction = current_transaction()
2526
with WithLlmCustomAttributes({"test": "attr", "test1": "attr1"}):
26-
assert transaction._custom_attr_context_var.get() == {"llm.test": "attr", "llm.test1": "attr1"}
27+
assert transaction._llm_context_attrs == {"llm.test": "attr", "llm.test1": "attr1"}
2728

28-
assert transaction._custom_attr_context_var.get() is None
29+
assert transaction._llm_context_attrs is None
2930

3031

3132
@pytest.mark.parametrize("context_attrs", (None, "not-a-dict"))
@@ -35,14 +36,14 @@ def test_llm_custom_attributes_no_attrs(context_attrs):
3536

3637
with pytest.raises(TypeError):
3738
with WithLlmCustomAttributes(context_attrs):
38-
assert transaction._custom_attr_context_var.get() is None
39+
assert transaction._llm_context_attrs is None
3940

4041

4142
@background_task()
4243
def test_llm_custom_attributes_prefixed_attrs():
4344
transaction = current_transaction()
44-
with WithLlmCustomAttributes({"llm.test": "attr", "llm.test1": "attr1"}):
45+
with WithLlmCustomAttributes({"llm.test": "attr", "test1": "attr1"}):
4546
# Validate API does not prefix attributes that already begin with "llm."
46-
assert transaction._custom_attr_context_var.get() == {"llm.test": "attr", "llm.test1": "attr1"}
47+
assert transaction._llm_context_attrs == {"llm.test": "attr", "llm.test1": "attr1"}
4748

48-
assert transaction._custom_attr_context_var.get() is None
49+
assert transaction._llm_context_attrs is None

tests/mlmodel_openai/test_chat_completion.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -35,10 +35,9 @@
3535
validate_transaction_metrics,
3636
)
3737
from newrelic.api.background_task import background_task
38-
from newrelic.api.transaction import add_custom_attribute, current_transaction
38+
from newrelic.api.transaction import add_custom_attribute
3939
from newrelic.api.llm_custom_attributes import WithLlmCustomAttributes
4040

41-
from newrelic.api.application import application_instance as application
4241

4342
_test_openai_chat_completion_messages = (
4443
{"role": "system", "content": "You are a scientist."},

tests/mlmodel_openai/test_chat_completion_stream_error_v1.py

Lines changed: 17 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@
2424
add_token_count_to_events,
2525
disabled_ai_monitoring_record_content_settings,
2626
events_sans_content,
27+
events_with_context_attrs,
2728
llm_token_count_callback,
2829
set_trace_info,
2930
)
@@ -118,18 +119,19 @@
118119
rollup_metrics=[("Llm/completion/OpenAI/create", 1)],
119120
background_task=True,
120121
)
121-
@validate_custom_events(expected_events_on_no_model_error)
122+
@validate_custom_events(events_with_context_attrs(expected_events_on_no_model_error))
122123
@validate_custom_event_count(count=3)
123124
@background_task()
124125
def test_chat_completion_invalid_request_error_no_model(set_trace_info, sync_openai_client):
125126
with pytest.raises(TypeError):
126127
set_trace_info()
127128
add_custom_attribute("llm.conversation_id", "my-awesome-id")
128-
generator = sync_openai_client.chat.completions.create(
129-
messages=_test_openai_chat_completion_messages, temperature=0.7, max_tokens=100, stream=True
130-
)
131-
for resp in generator:
132-
assert resp
129+
with WithLlmCustomAttributes({"context": "attr"}):
130+
generator = sync_openai_client.chat.completions.create(
131+
messages=_test_openai_chat_completion_messages, temperature=0.7, max_tokens=100, stream=True
132+
)
133+
for resp in generator:
134+
assert resp
133135

134136

135137
@dt_enabled
@@ -189,22 +191,23 @@ def test_chat_completion_invalid_request_error_no_model_no_content(set_trace_inf
189191
rollup_metrics=[("Llm/completion/OpenAI/create", 1)],
190192
background_task=True,
191193
)
192-
@validate_custom_events(expected_events_on_no_model_error)
194+
@validate_custom_events(events_with_context_attrs(expected_events_on_no_model_error))
193195
@validate_custom_event_count(count=3)
194196
@background_task()
195197
def test_chat_completion_invalid_request_error_no_model_async(loop, set_trace_info, async_openai_client):
196198
with pytest.raises(TypeError):
197199
set_trace_info()
198200
add_custom_attribute("llm.conversation_id", "my-awesome-id")
201+
with WithLlmCustomAttributes({"context": "attr"}):
199202

200-
async def consumer():
201-
generator = await async_openai_client.chat.completions.create(
202-
messages=_test_openai_chat_completion_messages, temperature=0.7, max_tokens=100, stream=True
203-
)
204-
async for resp in generator:
205-
assert resp
203+
async def consumer():
204+
generator = await async_openai_client.chat.completions.create(
205+
messages=_test_openai_chat_completion_messages, temperature=0.7, max_tokens=100, stream=True
206+
)
207+
async for resp in generator:
208+
assert resp
206209

207-
loop.run_until_complete(consumer())
210+
loop.run_until_complete(consumer())
208211

209212

210213
@dt_enabled

0 commit comments

Comments
 (0)