|
7 | 7 | try: |
8 | 8 | import openai |
9 | 9 | except ImportError: |
10 | | - raise ModuleNotFoundError( |
11 | | - "Please install the OpenAI SDK to use this feature: 'pip install openai'" |
12 | | - ) |
| 10 | + raise ModuleNotFoundError("Please install the OpenAI SDK to use this feature: 'pip install openai'") |
13 | 11 |
|
14 | 12 | from posthog.ai.utils import call_llm_and_track_usage_async, get_model_params |
15 | 13 | from posthog.client import Client as PostHogClient |
@@ -85,9 +83,7 @@ async def _create_streaming( |
85 | 83 | usage_stats: Dict[str, int] = {} |
86 | 84 | accumulated_content = [] |
87 | 85 | stream_options = {"include_usage": True} |
88 | | - response = await self._client.chat.completions.create( |
89 | | - **kwargs, stream_options=stream_options |
90 | | - ) |
| 86 | + response = await self._client.chat.completions.create(**kwargs, stream_options=stream_options) |
91 | 87 |
|
92 | 88 | async def async_generator(): |
93 | 89 | nonlocal usage_stats, accumulated_content |
@@ -153,9 +149,7 @@ def _capture_streaming_event( |
153 | 149 | "$ai_latency": latency, |
154 | 150 | "$ai_trace_id": posthog_trace_id, |
155 | 151 | "$ai_posthog_properties": posthog_properties, |
156 | | - "$ai_request_url": str( |
157 | | - self._client.base_url.join("chat/completions") |
158 | | - ), |
| 152 | + "$ai_request_url": str(self._client.base_url.join("chat/completions")), |
159 | 153 | } |
160 | 154 |
|
161 | 155 | if hasattr(self._client._ph_client, "capture"): |
|
0 commit comments