Skip to content

Commit 3fce48a

Browse files
committed
advanced filtering on dataset collection
1 parent d3b27d3 commit 3fce48a

File tree

1 file changed

+4
-8
lines changed

1 file changed

+4
-8
lines changed

parea/wrapper/openai/openai.py

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,6 @@
1717
from openai.util import convert_to_openai_object
1818
else:
1919
from openai.types.chat import ChatCompletion as OpenAIObject
20-
from openai.types.chat import ParsedChatCompletion as OpenAIObjectParsed
21-
from openai.types.chat import ParsedChatCompletionMessage
2220

2321
def convert_to_openai_object(kwargs) -> OpenAIObject:
2422
if "id" not in kwargs:
@@ -57,7 +55,7 @@ def get_original_methods(self, module_client=openai):
5755
original_methods = {"ChatCompletion.create": module_client.ChatCompletion.create, "ChatCompletion.acreate": module_client.ChatCompletion.acreate}
5856
else:
5957
try:
60-
original_methods = {"chat.completions.create": module_client.chat.completions.create, "beta.chat.completions.parse": module_client.beta.chat.completions.parse}
58+
original_methods = {"chat.completions.create": module_client.chat.completions.create}
6159
except openai.OpenAIError:
6260
original_methods = {}
6361
return list(original_methods.keys())
@@ -105,7 +103,7 @@ def resolver(self, trace_id: str, _args: Sequence[Any], kwargs: Dict[str, Any],
105103
trace_data.get()[trace_id].output_tokens = output_tokens
106104
trace_data.get()[trace_id].total_tokens = total_tokens
107105
trace_data.get()[trace_id].cost = _compute_cost(input_tokens, output_tokens, model)
108-
trace_data.get()[trace_id].output = json_dumps(output) if not isinstance(output, str) else output
106+
trace_data.get()[trace_id].output = output
109107
return response
110108

111109
def gen_resolver(self, trace_id: str, _args: Sequence[Any], kwargs: Dict[str, Any], response, final_log):
@@ -271,7 +269,7 @@ def _kwargs_to_llm_configuration(kwargs):
271269

272270
@staticmethod
273271
def _get_output(result: Any, model: Optional[str] = None) -> str:
274-
if not isinstance(result, (OpenAIObject, OpenAIObjectParsed)) and isinstance(result, dict):
272+
if not isinstance(result, OpenAIObject) and isinstance(result, dict):
275273
result = convert_to_openai_object(
276274
{
277275
"choices": [
@@ -284,9 +282,7 @@ def _get_output(result: Any, model: Optional[str] = None) -> str:
284282
}
285283
)
286284
response_message = result.choices[0].message
287-
if isinstance(response_message, ParsedChatCompletionMessage):
288-
completion = response_message.parsed.model_dump_json() if response_message.parsed else ""
289-
elif not response_message.get("content", None) if is_old_openai else not response_message.content:
285+
if not response_message.get("content", None) if is_old_openai else not response_message.content:
290286
completion = OpenAIWrapper._format_function_call(response_message)
291287
else:
292288
completion = response_message.content

0 commit comments

Comments
 (0)