@@ -81,29 +81,17 @@ llm_observability_examples.py:0: note: def create(self, *, messages: Iterable[Ch
8181llm_observability_examples.py:0: note: def create(self, *, messages: Iterable[ChatCompletionDeveloperMessageParam | ChatCompletionSystemMessageParam | ChatCompletionUserMessageParam | ChatCompletionAssistantMessageParam | ChatCompletionToolMessageParam | ChatCompletionFunctionMessageParam], model: str | Literal['o3-mini', 'o3-mini-2025-01-31', 'o1', 'o1-2024-12-17', 'o1-preview', 'o1-preview-2024-09-12', 'o1-mini', 'o1-mini-2024-09-12', 'gpt-4.5-preview', 'gpt-4.5-preview-2025-02-27', 'gpt-4o', 'gpt-4o-2024-11-20', 'gpt-4o-2024-08-06', 'gpt-4o-2024-05-13', 'gpt-4o-audio-preview', 'gpt-4o-audio-preview-2024-10-01', 'gpt-4o-audio-preview-2024-12-17', 'gpt-4o-mini-audio-preview', 'gpt-4o-mini-audio-preview-2024-12-17', 'chatgpt-4o-latest', 'gpt-4o-mini', 'gpt-4o-mini-2024-07-18', 'gpt-4-turbo', 'gpt-4-turbo-2024-04-09', 'gpt-4-0125-preview', 'gpt-4-turbo-preview', 'gpt-4-1106-preview', 'gpt-4-vision-preview', 'gpt-4', 'gpt-4-0314', 'gpt-4-0613', 'gpt-4-32k', 'gpt-4-32k-0314', 'gpt-4-32k-0613', 'gpt-3.5-turbo', 'gpt-3.5-turbo-16k', 'gpt-3.5-turbo-0301', 'gpt-3.5-turbo-0613', 'gpt-3.5-turbo-1106', 'gpt-3.5-turbo-0125', 'gpt-3.5-turbo-16k-0613'], stream: Literal[True], audio: ChatCompletionAudioParam | NotGiven | None = ..., frequency_penalty: float | NotGiven | None = ..., function_call: Literal['none', 'auto'] | ChatCompletionFunctionCallOptionParam | NotGiven = ..., functions: Iterable[Function] | NotGiven = ..., logit_bias: dict[str, int] | NotGiven | None = ..., logprobs: bool | NotGiven | None = ..., max_completion_tokens: int | NotGiven | None = ..., max_tokens: int | NotGiven | None = ..., metadata: dict[str, str] | NotGiven | None = ..., modalities: list[Literal['text', 'audio']] | NotGiven | None = ..., n: int | NotGiven | None = ..., parallel_tool_calls: bool | NotGiven = ..., prediction: ChatCompletionPredictionContentParam | NotGiven | None = ..., presence_penalty: float | NotGiven | None = ..., reasoning_effort: Literal['low', 'medium', 'high'] | None | NotGiven | None = ..., response_format: ResponseFormatText | ResponseFormatJSONObject | ResponseFormatJSONSchema | NotGiven = ..., seed: int | NotGiven | None = ..., service_tier: Literal['auto', 'default'] | NotGiven | None = ..., stop: str | list[str] | NotGiven | None = ..., store: bool | NotGiven | None = ..., stream_options: ChatCompletionStreamOptionsParam | NotGiven | None = ..., temperature: float | NotGiven | None = ..., tool_choice: Literal['none', 'auto', 'required'] | ChatCompletionNamedToolChoiceParam | NotGiven = ..., tools: Iterable[ChatCompletionToolParam] | NotGiven = ..., top_logprobs: int | NotGiven | None = ..., top_p: float | NotGiven | None = ..., user: str | NotGiven = ..., extra_headers: Mapping[str, str | Omit] | None = ..., extra_query: Mapping[str, object] | None = ..., extra_body: object | None = ..., timeout: float | Timeout | NotGiven | None = ...) -> Coroutine[Any, Any, AsyncStream[ChatCompletionChunk]]
8282llm_observability_examples.py:0: note: def create(self, *, messages: Iterable[ChatCompletionDeveloperMessageParam | ChatCompletionSystemMessageParam | ChatCompletionUserMessageParam | ChatCompletionAssistantMessageParam | ChatCompletionToolMessageParam | ChatCompletionFunctionMessageParam], model: str | Literal['o3-mini', 'o3-mini-2025-01-31', 'o1', 'o1-2024-12-17', 'o1-preview', 'o1-preview-2024-09-12', 'o1-mini', 'o1-mini-2024-09-12', 'gpt-4.5-preview', 'gpt-4.5-preview-2025-02-27', 'gpt-4o', 'gpt-4o-2024-11-20', 'gpt-4o-2024-08-06', 'gpt-4o-2024-05-13', 'gpt-4o-audio-preview', 'gpt-4o-audio-preview-2024-10-01', 'gpt-4o-audio-preview-2024-12-17', 'gpt-4o-mini-audio-preview', 'gpt-4o-mini-audio-preview-2024-12-17', 'chatgpt-4o-latest', 'gpt-4o-mini', 'gpt-4o-mini-2024-07-18', 'gpt-4-turbo', 'gpt-4-turbo-2024-04-09', 'gpt-4-0125-preview', 'gpt-4-turbo-preview', 'gpt-4-1106-preview', 'gpt-4-vision-preview', 'gpt-4', 'gpt-4-0314', 'gpt-4-0613', 'gpt-4-32k', 'gpt-4-32k-0314', 'gpt-4-32k-0613', 'gpt-3.5-turbo', 'gpt-3.5-turbo-16k', 'gpt-3.5-turbo-0301', 'gpt-3.5-turbo-0613', 'gpt-3.5-turbo-1106', 'gpt-3.5-turbo-0125', 'gpt-3.5-turbo-16k-0613'], stream: bool, audio: ChatCompletionAudioParam | NotGiven | None = ..., frequency_penalty: float | NotGiven | None = ..., function_call: Literal['none', 'auto'] | ChatCompletionFunctionCallOptionParam | NotGiven = ..., functions: Iterable[Function] | NotGiven = ..., logit_bias: dict[str, int] | NotGiven | None = ..., logprobs: bool | NotGiven | None = ..., max_completion_tokens: int | NotGiven | None = ..., max_tokens: int | NotGiven | None = ..., metadata: dict[str, str] | NotGiven | None = ..., modalities: list[Literal['text', 'audio']] | NotGiven | None = ..., n: int | NotGiven | None = ..., parallel_tool_calls: bool | NotGiven = ..., prediction: ChatCompletionPredictionContentParam | NotGiven | None = ..., presence_penalty: float | NotGiven | None = ..., reasoning_effort: Literal['low', 'medium', 'high'] | None | NotGiven | None = ..., response_format: ResponseFormatText | ResponseFormatJSONObject | ResponseFormatJSONSchema | NotGiven = ..., seed: int | NotGiven | None = ..., service_tier: Literal['auto', 'default'] | NotGiven | None = ..., stop: str | list[str] | NotGiven | None = ..., store: bool | NotGiven | None = ..., stream_options: ChatCompletionStreamOptionsParam | NotGiven | None = ..., temperature: float | NotGiven | None = ..., tool_choice: Literal['none', 'auto', 'required'] | ChatCompletionNamedToolChoiceParam | NotGiven = ..., tools: Iterable[ChatCompletionToolParam] | NotGiven = ..., top_logprobs: int | NotGiven | None = ..., top_p: float | NotGiven | None = ..., user: str | NotGiven = ..., extra_headers: Mapping[str, str | Omit] | None = ..., extra_query: Mapping[str, object] | None = ..., extra_body: object | None = ..., timeout: float | Timeout | NotGiven | None = ...) -> Coroutine[Any, Any, ChatCompletion | AsyncStream[ChatCompletionChunk]]
8383llm_observability_examples.py:0: error: Unexpected keyword argument "posthog_distinct_id" for "create" of "Embeddings" [call-arg]
84- env/lib/python3.13/site-packages/openai/resources/embeddings.py:0: note: "create" of "Embeddings" defined here
8584llm_observability_examples.py:0: error: Unexpected keyword argument "posthog_trace_id" for "create" of "Embeddings" [call-arg]
86- env/lib/python3.13/site-packages/openai/resources/embeddings.py:0: note: "create" of "Embeddings" defined here
8785llm_observability_examples.py:0: error: Unexpected keyword argument "posthog_properties" for "create" of "Embeddings" [call-arg]
88- env/lib/python3.13/site-packages/openai/resources/embeddings.py:0: note: "create" of "Embeddings" defined here
8986llm_observability_examples.py:0: error: Unexpected keyword argument "posthog_groups" for "create" of "Embeddings" [call-arg]
90- env/lib/python3.13/site-packages/openai/resources/embeddings.py:0: note: "create" of "Embeddings" defined here
9187llm_observability_examples.py:0: error: Unexpected keyword argument "posthog_distinct_id" for "create" of "AsyncEmbeddings" [call-arg]
92- env/lib/python3.13/site-packages/openai/resources/embeddings.py:0: note: "create" of "AsyncEmbeddings" defined here
9388llm_observability_examples.py:0: error: Unexpected keyword argument "posthog_trace_id" for "create" of "AsyncEmbeddings" [call-arg]
94- env/lib/python3.13/site-packages/openai/resources/embeddings.py:0: note: "create" of "AsyncEmbeddings" defined here
9589llm_observability_examples.py:0: error: Unexpected keyword argument "posthog_properties" for "create" of "AsyncEmbeddings" [call-arg]
96- env/lib/python3.13/site-packages/openai/resources/embeddings.py:0: note: "create" of "AsyncEmbeddings" defined here
9790llm_observability_examples.py:0: error: Unexpected keyword argument "posthog_groups" for "create" of "AsyncEmbeddings" [call-arg]
98- env/lib/python3.13/site-packages/openai/resources/embeddings.py:0: note: "create" of "AsyncEmbeddings" defined here
9991llm_observability_examples.py:0: error: Unexpected keyword argument "posthog_distinct_id" for "parse" of "Completions" [call-arg]
100- env/lib/python3.13/site-packages/openai/resources/beta/chat/completions.py:0: note: "parse" of "Completions" defined here
10192llm_observability_examples.py:0: error: Unexpected keyword argument "posthog_trace_id" for "parse" of "Completions" [call-arg]
102- env/lib/python3.13/site-packages/openai/resources/beta/chat/completions.py:0: note: "parse" of "Completions" defined here
10393llm_observability_examples.py:0: error: Unexpected keyword argument "posthog_properties" for "parse" of "Completions" [call-arg]
104- env/lib/python3.13/site-packages/openai/resources/beta/chat/completions.py:0: note: "parse" of "Completions" defined here
10594llm_observability_examples.py:0: error: Unexpected keyword argument "posthog_groups" for "parse" of "Completions" [call-arg]
106- env/lib/python3.13/site-packages/openai/resources/beta/chat/completions.py:0: note: "parse" of "Completions" defined here
10795llm_observability_examples.py:0: error: No overload variant of "create" of "Completions" matches argument types "str", "list[dict[str, str]]", "list[dict[str, Collection[str]]]", "Any", "Any", "Any", "Any" [call-overload]
10896llm_observability_examples.py:0: note: Possible overload variants:
10997llm_observability_examples.py:0: note: def create(self, *, messages: Iterable[ChatCompletionDeveloperMessageParam | ChatCompletionSystemMessageParam | ChatCompletionUserMessageParam | ChatCompletionAssistantMessageParam | ChatCompletionToolMessageParam | ChatCompletionFunctionMessageParam], model: str | Literal['o3-mini', 'o3-mini-2025-01-31', 'o1', 'o1-2024-12-17', 'o1-preview', 'o1-preview-2024-09-12', 'o1-mini', 'o1-mini-2024-09-12', 'gpt-4.5-preview', 'gpt-4.5-preview-2025-02-27', 'gpt-4o', 'gpt-4o-2024-11-20', 'gpt-4o-2024-08-06', 'gpt-4o-2024-05-13', 'gpt-4o-audio-preview', 'gpt-4o-audio-preview-2024-10-01', 'gpt-4o-audio-preview-2024-12-17', 'gpt-4o-mini-audio-preview', 'gpt-4o-mini-audio-preview-2024-12-17', 'chatgpt-4o-latest', 'gpt-4o-mini', 'gpt-4o-mini-2024-07-18', 'gpt-4-turbo', 'gpt-4-turbo-2024-04-09', 'gpt-4-0125-preview', 'gpt-4-turbo-preview', 'gpt-4-1106-preview', 'gpt-4-vision-preview', 'gpt-4', 'gpt-4-0314', 'gpt-4-0613', 'gpt-4-32k', 'gpt-4-32k-0314', 'gpt-4-32k-0613', 'gpt-3.5-turbo', 'gpt-3.5-turbo-16k', 'gpt-3.5-turbo-0301', 'gpt-3.5-turbo-0613', 'gpt-3.5-turbo-1106', 'gpt-3.5-turbo-0125', 'gpt-3.5-turbo-16k-0613'], audio: ChatCompletionAudioParam | NotGiven | None = ..., frequency_penalty: float | NotGiven | None = ..., function_call: Literal['none', 'auto'] | ChatCompletionFunctionCallOptionParam | NotGiven = ..., functions: Iterable[Function] | NotGiven = ..., logit_bias: dict[str, int] | NotGiven | None = ..., logprobs: bool | NotGiven | None = ..., max_completion_tokens: int | NotGiven | None = ..., max_tokens: int | NotGiven | None = ..., metadata: dict[str, str] | NotGiven | None = ..., modalities: list[Literal['text', 'audio']] | NotGiven | None = ..., n: int | NotGiven | None = ..., parallel_tool_calls: bool | NotGiven = ..., prediction: ChatCompletionPredictionContentParam | NotGiven | None = ..., presence_penalty: float | NotGiven | None = ..., reasoning_effort: Literal['low', 'medium', 'high'] | None | NotGiven | None = ..., response_format: ResponseFormatText | ResponseFormatJSONObject | ResponseFormatJSONSchema | NotGiven = ..., seed: int | NotGiven | None = ..., service_tier: Literal['auto', 'default'] | NotGiven | None = ..., stop: str | list[str] | NotGiven | None = ..., store: bool | NotGiven | None = ..., stream: Literal[False] | NotGiven | None = ..., stream_options: ChatCompletionStreamOptionsParam | NotGiven | None = ..., temperature: float | NotGiven | None = ..., tool_choice: Literal['none', 'auto', 'required'] | ChatCompletionNamedToolChoiceParam | NotGiven = ..., tools: Iterable[ChatCompletionToolParam] | NotGiven = ..., top_logprobs: int | NotGiven | None = ..., top_p: float | NotGiven | None = ..., user: str | NotGiven = ..., extra_headers: Mapping[str, str | Omit] | None = ..., extra_query: Mapping[str, object] | None = ..., extra_body: object | None = ..., timeout: float | Timeout | NotGiven | None = ...) -> ChatCompletion
0 commit comments