Skip to content

Commit aa24576

Browse files
committed
chore: upgrade python
1 parent 1674bec commit aa24576

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

56 files changed

+228
-246
lines changed

aidial_adapter_openai/audio_api/transcribe/prompt.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from __future__ import annotations
22

33
import mimetypes
4-
from typing import Any, List
4+
from typing import Any
55

66
from aidial_sdk.exceptions import RequestValidationError
77
from pydantic import BaseModel
@@ -50,7 +50,7 @@ async def from_request(
5050

5151
system_message = _collect_system_messages(messages)
5252

53-
audios: List[FileResource] = []
53+
audios: list[FileResource] = []
5454

5555
for message in result:
5656
for file in message.files:

aidial_adapter_openai/chat_completions/gpt.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
from typing import AsyncIterator, Callable, Coroutine, List, Mapping, Tuple
1+
from collections.abc import AsyncIterator, Callable, Coroutine, Mapping
22

33
from aidial_sdk.exceptions import InvalidRequestError
44
from openai import AsyncAzureOpenAI, AsyncOpenAI, AsyncStream
@@ -29,10 +29,10 @@
2929

3030
async def multi_modal_truncate_prompt(
3131
request: dict,
32-
messages: List[MultiModalMessage],
32+
messages: list[MultiModalMessage],
3333
max_prompt_tokens: int,
3434
tokenizer: Tokenizer,
35-
) -> Tuple[List[MultiModalMessage], DiscardedMessages, TruncatedTokens]:
35+
) -> tuple[list[MultiModalMessage], DiscardedMessages, TruncatedTokens]:
3636
return await truncate_prompt(
3737
messages=messages,
3838
message_tokens=tokenizer.tokenize_request_message,
@@ -62,9 +62,9 @@ def _extract_max_prompt_tokens(request: dict) -> int | None:
6262

6363

6464
async def _truncate_messages(
65-
request: dict, messages: List[MultiModalMessage], tokenizer: Tokenizer
66-
) -> Tuple[
67-
List[MultiModalMessage],
65+
request: dict, messages: list[MultiModalMessage], tokenizer: Tokenizer
66+
) -> tuple[
67+
list[MultiModalMessage],
6868
DiscardedMessages | None,
6969
Callable[[], Coroutine[None, None, TruncatedTokens]],
7070
]:
@@ -107,7 +107,7 @@ async def chat_completion(
107107
eliminate_empty_choices: bool,
108108
) -> ResponseWithHeaders[AsyncIterator[dict] | dict]:
109109
n: int = request.get("n") or 1
110-
messages: List[dict] = request["messages"]
110+
messages: list[dict] = request["messages"]
111111
model_name = request["model"]
112112

113113
multi_modal_messages = await ResourceProcessor(

aidial_adapter_openai/chat_completions/gpt_oss.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,15 +18,16 @@
1818
https://cookbook.openai.com/articles/gpt-oss/handle-raw-cot
1919
"""
2020

21-
from typing import AsyncIterator, Set, TypeVar
21+
from collections.abc import AsyncIterator
22+
from typing import TypeVar
2223

2324
from pydantic import BaseModel
2425

2526
from aidial_adapter_openai.utils.streaming import map_stream
2627

2728

2829
class _ResponseTransformer(BaseModel):
29-
opened_reasoning_stages: Set[int] = set()
30+
opened_reasoning_stages: set[int] = set()
3031
"""Indices of choices where a reasoning stage was open.
3132
"""
3233

aidial_adapter_openai/chat_completions/non_gpt.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
from typing import AsyncIterator
1+
from collections.abc import AsyncIterator
22

33
from openai import AsyncAzureOpenAI, AsyncOpenAI, AsyncStream
44
from openai.types.chat.chat_completion import ChatCompletion

aidial_adapter_openai/chat_completions/transformation.py

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
from dataclasses import dataclass
2-
from typing import List, Set, assert_never
2+
from typing import assert_never
33

44
from aidial_sdk.exceptions import InvalidRequestError
55
from openai.types.chat import (
@@ -46,15 +46,15 @@ class Error:
4646

4747
class MessageTransformer:
4848
file_storage: FileStorage | None
49-
errors: Set[Error]
50-
images: List[ImageResource]
51-
files: List[FileResource]
49+
errors: set[Error]
50+
images: list[ImageResource]
51+
files: list[FileResource]
5252

5353
def __init__(
5454
self,
5555
*,
5656
file_storage: FileStorage | None,
57-
errors: Set[Error] | None = None,
57+
errors: set[Error] | None = None,
5858
):
5959
self.file_storage = file_storage
6060
self.errors = set() if errors is None else errors
@@ -81,13 +81,13 @@ async def try_download_resource(
8181
return None
8282

8383
async def download_attachments(
84-
self, attachments: List[dict]
85-
) -> List[ChatCompletionContentPartImageParam | File]:
84+
self, attachments: list[dict]
85+
) -> list[ChatCompletionContentPartImageParam | File]:
8686

8787
if attachments:
8888
logger.debug(f"original attachments: {attachments}")
8989

90-
ret: List[ChatCompletionContentPartImageParam | File] = []
90+
ret: list[ChatCompletionContentPartImageParam | File] = []
9191
for attachment in attachments:
9292
if result := await self.download_attachment(attachment):
9393
ret.append(result)
@@ -146,15 +146,15 @@ async def download_content(
146146
self,
147147
content: (
148148
str
149-
| List[ChatCompletionContentPartParam | ContentArrayOfContentPart]
149+
| list[ChatCompletionContentPartParam | ContentArrayOfContentPart]
150150
),
151-
) -> List[ChatCompletionContentPartParam | ContentArrayOfContentPart]:
151+
) -> list[ChatCompletionContentPartParam | ContentArrayOfContentPart]:
152152
if isinstance(content, str):
153153
parts = [create_text_content_part(content)]
154154
else:
155155
parts = content
156156

157-
ret: List[
157+
ret: list[
158158
ChatCompletionContentPartParam | ContentArrayOfContentPart
159159
] = []
160160
for part in parts:
@@ -193,9 +193,9 @@ class ResourceProcessor(BaseModel):
193193
file_storage: FileStorage | None
194194

195195
async def transform_messages(
196-
self, messages: List[dict]
197-
) -> List[MultiModalMessage]:
198-
errors: Set[Error] = set()
196+
self, messages: list[dict]
197+
) -> list[MultiModalMessage]:
198+
errors: set[Error] = set()
199199
transformations = [
200200
await MessageTransformer(
201201
file_storage=self.file_storage, errors=errors

aidial_adapter_openai/completions.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
1-
from typing import Any, AsyncIterator, Dict
1+
from collections.abc import AsyncIterator
2+
from typing import Any
23

34
from aidial_sdk.exceptions import RequestValidationError
45
from openai import AsyncAzureOpenAI, AsyncOpenAI, AsyncStream
@@ -18,7 +19,7 @@ def sanitize_text(text: str) -> str:
1819

1920
def convert_to_chat_completions_response(
2021
chunk: Completion, is_stream: bool
21-
) -> Dict[str, Any]:
22+
) -> dict[str, Any]:
2223
converted_chunk = build_chunk(
2324
id=chunk.id,
2425
model=chunk.model,
@@ -37,7 +38,7 @@ def convert_to_chat_completions_response(
3738

3839
async def chat_completion(
3940
*,
40-
request: Dict[str, Any],
41+
request: dict[str, Any],
4142
client: AsyncAzureOpenAI | AsyncOpenAI,
4243
prompt_template: str | None,
4344
) -> AsyncIterator[dict] | dict:

aidial_adapter_openai/configuration/app_config.py

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from __future__ import annotations
22

33
import os
4-
from typing import Dict, List, assert_never
4+
from typing import assert_never
55

66
from aidial_sdk.exceptions import InternalServerError
77

@@ -39,23 +39,23 @@ class DeploymentAPIType(ExtraForbidModel):
3939

4040

4141
class ApplicationConfig(ExtraForbidModel):
42-
TIKTOKEN_MODEL_MAPPING: Dict[str, str] = {}
42+
TIKTOKEN_MODEL_MAPPING: dict[str, str] = {}
4343

44-
DALLE3_DEPLOYMENTS: List[str] = []
44+
DALLE3_DEPLOYMENTS: list[str] = []
4545
DALLE3_AZURE_API_VERSION: str = "2024-02-01"
4646

47-
GPT_IMAGE_1_DEPLOYMENTS: List[str] = []
47+
GPT_IMAGE_1_DEPLOYMENTS: list[str] = []
4848
GPT_IMAGE_1_AZURE_API_VERSION: str = "2025-04-01-preview"
4949

50-
MISTRAL_DEPLOYMENTS: List[str] = []
51-
DATABRICKS_DEPLOYMENTS: List[str] = []
52-
GPT4O_DEPLOYMENTS: List[str] = []
53-
GPT4O_MINI_DEPLOYMENTS: List[str] = []
54-
AZURE_AI_VISION_DEPLOYMENTS: List[str] = []
50+
MISTRAL_DEPLOYMENTS: list[str] = []
51+
DATABRICKS_DEPLOYMENTS: list[str] = []
52+
GPT4O_DEPLOYMENTS: list[str] = []
53+
GPT4O_MINI_DEPLOYMENTS: list[str] = []
54+
AZURE_AI_VISION_DEPLOYMENTS: list[str] = []
5555

56-
API_VERSIONS_MAPPING: Dict[str, str] = {}
57-
COMPLETION_DEPLOYMENTS_PROMPT_TEMPLATES: Dict[str, str] = {}
58-
NON_STREAMING_DEPLOYMENTS: List[str] = []
56+
API_VERSIONS_MAPPING: dict[str, str] = {}
57+
COMPLETION_DEPLOYMENTS_PROMPT_TEMPLATES: dict[str, str] = {}
58+
NON_STREAMING_DEPLOYMENTS: list[str] = []
5959
ELIMINATE_EMPTY_CHOICES: bool = False
6060

6161
AUDIO_AZURE_API_VERSION: str = "2025-03-01-preview"

aidial_adapter_openai/dial_api/embedding_inputs.py

Lines changed: 5 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,5 @@
1-
from typing import (
2-
AsyncIterator,
3-
Callable,
4-
Coroutine,
5-
List,
6-
TypeVar,
7-
assert_never,
8-
cast,
9-
)
1+
from collections.abc import AsyncIterator, Callable, Coroutine
2+
from typing import TypeVar, assert_never, cast
103

114
from aidial_sdk.chat_completion.request import Attachment
125
from aidial_sdk.embeddings.request import EmbeddingsRequest
@@ -15,7 +8,7 @@
158
_T = TypeVar("_T")
169

1710
_Coro = Coroutine[None, None, _T]
18-
_Tokens = List[int]
11+
_Tokens = list[int]
1912

2013

2114
async def reject_tokens(tokens: _Tokens):
@@ -25,7 +18,7 @@ async def reject_tokens(tokens: _Tokens):
2518
)
2619

2720

28-
async def reject_mixed(input: List[str | Attachment]):
21+
async def reject_mixed(input: list[str | Attachment]):
2922
raise RequestValidationError(
3023
"Embedding inputs composed of multiple texts and/or attachments aren't supported"
3124
)
@@ -37,7 +30,7 @@ async def collect_embedding_inputs(
3730
on_text: Callable[[str], _Coro[_T]],
3831
on_attachment: Callable[[Attachment], _Coro[_T]],
3932
on_tokens: Callable[[_Tokens], _Coro[_T]] = reject_tokens,
40-
on_mixed: Callable[[List[str | Attachment]], _Coro[_T]] = reject_mixed,
33+
on_mixed: Callable[[list[str | Attachment]], _Coro[_T]] = reject_mixed,
4134
) -> AsyncIterator[_T]:
4235

4336
async def _on_str_or_attachment(input: str | Attachment) -> _T:

aidial_adapter_openai/dial_api/request.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
1-
from typing import Any, Type, TypeVar
1+
from typing import Any, TypeVar
22

33
from aidial_sdk.exceptions import RequestValidationError
44
from pydantic import BaseModel, ValidationError
55

66
_T = TypeVar("_T", bound=BaseModel)
77

88

9-
def parse_configuration(cls: Type[_T], data: Any) -> _T | None:
9+
def parse_configuration(cls: type[_T], data: Any) -> _T | None:
1010
if (cf := data.get("custom_fields")) is None:
1111
return None
1212

aidial_adapter_openai/dial_api/resource.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
import base64
22
import mimetypes
33
from abc import ABC, abstractmethod
4-
from typing import List
54

65
from aidial_sdk.chat_completion import Attachment
76
from pydantic import BaseModel, Field, root_validator
@@ -25,17 +24,17 @@ class MissingContentTypeError(ValidationError):
2524

2625
class UnsupportedContentTypeError(ValidationError):
2726
type: str
28-
supported_types: List[str]
27+
supported_types: list[str]
2928

30-
def __init__(self, *, message: str, type: str, supported_types: List[str]):
29+
def __init__(self, *, message: str, type: str, supported_types: list[str]):
3130
self.type = type
3231
self.supported_types = supported_types
3332
super().__init__(message)
3433

3534

3635
class DialResource(ABC, BaseModel):
3736
entity_name: str = Field(default=None)
38-
supported_types: List[str] | None = Field(default=None)
37+
supported_types: list[str] | None = Field(default=None)
3938

4039
@abstractmethod
4140
async def download(self, storage: FileStorage | None) -> Resource: ...

0 commit comments

Comments
 (0)