Skip to content

Commit e9789b6

Browse files
authored
Merge pull request #46 from Nayjest/infra_improvements
Infra improvements
2 parents 2eceb39 + bc58744 commit e9789b6

29 files changed

+285
-232
lines changed

lm_proxy/__main__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
"""Provides the CLI entry point when the package is executed as a Python module."""
2+
23
from .app import cli_app
34

45

lm_proxy/api_key_check/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
"""Collection of built-in API-key checkers for usage in the configuration."""
2+
23
from .in_config import check_api_key_in_config
34
from .with_request import CheckAPIKeyWithRequest
45
from .allow_all import AllowAll

lm_proxy/api_key_check/allow_all.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
This module provides a simple authentication strategy for development or testing
55
environments where all API keys should be accepted without validation.
66
"""
7+
78
from typing import Optional
89
from dataclasses import dataclass
910

@@ -25,10 +26,7 @@ class AllowAll:
2526
group: str = "default"
2627
capture_api_key: bool = True
2728

28-
def __call__(
29-
self,
30-
api_key: Optional[str]
31-
) -> tuple[str, dict[str, Optional[str]]]:
29+
def __call__(self, api_key: Optional[str]) -> tuple[str, dict[str, Optional[str]]]:
3230
"""
3331
Validate an API key (accepts all keys without verification).
3432

lm_proxy/api_key_check/in_config.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
For using this function,
66
set "api_key_check" configuration value to "lm_proxy.api_key_check.check_api_key_in_config".
77
"""
8+
89
from typing import Optional
910
from ..bootstrap import env
1011

lm_proxy/api_key_check/with_request.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
"""
22
API key check implementation using HTTP requests.
33
"""
4+
45
from typing import Optional
56
from dataclasses import dataclass, field
67
import requests
@@ -13,6 +14,7 @@ class CheckAPIKeyWithRequest: # pylint: disable=too-many-instance-attributes
1314
"""
1415
Validates a Client API key by making an HTTP request to a specified URL.
1516
"""
17+
1618
url: str = field()
1719
method: str = field(default="get")
1820
headers: dict = field(default_factory=dict)
@@ -45,10 +47,7 @@ def check_func(api_key: str) -> Optional[tuple[str, dict]]:
4547
for k, v in self.headers.items()
4648
}
4749
response = requests.request(
48-
method=self.method,
49-
url=url,
50-
headers=headers,
51-
timeout=self.timeout
50+
method=self.method, url=url, headers=headers, timeout=self.timeout
5251
)
5352
response.raise_for_status()
5453
group = self.default_group

lm_proxy/app.py

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
"""
22
LM-Proxy Application Entrypoint
33
"""
4+
45
import logging
56
from typing import Optional
67
from fastapi import FastAPI
@@ -19,9 +20,7 @@
1920
@cli_app.callback(invoke_without_command=True)
2021
def run_server(
2122
config: Optional[str] = typer.Option(None, help="Path to the configuration file"),
22-
debug: Optional[bool] = typer.Option(
23-
None, help="Enable debug mode (more verbose logging)"
24-
),
23+
debug: Optional[bool] = typer.Option(None, help="Enable debug mode (more verbose logging)"),
2524
env_file: Optional[str] = typer.Option(
2625
".env",
2726
"--env",
@@ -55,9 +54,7 @@ def web_app():
5554
"""
5655
Entrypoint for ASGI server
5756
"""
58-
app = FastAPI(
59-
title="LM-Proxy", description="OpenAI-compatible proxy server for LLM inference"
60-
)
57+
app = FastAPI(title="LM-Proxy", description="OpenAI-compatible proxy server for LLM inference")
6158
OpenAIHTTPException.register(app)
6259
app.add_api_route(
6360
path=f"{env.config.api_prefix}/chat/completions",

lm_proxy/base_types.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
"""Base types used in LM-Proxy."""
2+
23
import uuid
34
from dataclasses import dataclass, field
45
from datetime import datetime
@@ -17,6 +18,7 @@ class ChatCompletionRequest(BaseModel):
1718
"""
1819
Request model for chat/completions endpoint.
1920
"""
21+
2022
model: str
2123
messages: List[mc.Msg | dict]
2224
# | dict --> support of messages with lists of dicts
@@ -58,6 +60,7 @@ class RequestContext: # pylint: disable=too-many-instance-attributes
5860
"""
5961
Stores information about a single LLM request/response cycle for usage in middleware.
6062
"""
63+
6164
id: Optional[str] = field(default_factory=lambda: str(uuid.uuid4()))
6265
request: Optional[ChatCompletionRequest] = field(default=None)
6366
http_request: Optional[Request] = field(default=None)
@@ -83,7 +86,4 @@ def to_dict(self) -> dict:
8386
return data
8487

8588

86-
THandler = Callable[
87-
[RequestContext],
88-
Union[Awaitable[None], None]
89-
]
89+
THandler = Callable[[RequestContext], Union[Awaitable[None], None]]

lm_proxy/bootstrap.py

Lines changed: 5 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@ def format(self, record):
4444

4545
class Env:
4646
"""Runtime environment singleton."""
47+
4748
config: Config
4849
connections: dict[str, mc.types.LLMAsyncFunctionType]
4950
debug: bool
@@ -66,9 +67,7 @@ def init(config: Config | str | PathLike, debug: bool = False):
6667
if isinstance(config, (str, PathLike)):
6768
config = Config.load(config)
6869
else:
69-
raise ValueError(
70-
"config must be a path (str or PathLike) or Config instance"
71-
)
70+
raise ValueError("config must be a path (str or PathLike) or Config instance")
7271
env.config = config
7372

7473
env._init_components()
@@ -84,18 +83,12 @@ def init(config: Config | str | PathLike, debug: bool = False):
8483
if inspect.iscoroutinefunction(conn_config):
8584
env.connections[conn_name] = conn_config
8685
elif isinstance(conn_config, str):
87-
env.connections[conn_name] = resolve_instance_or_callable(
88-
conn_config
89-
)
86+
env.connections[conn_name] = resolve_instance_or_callable(conn_config)
9087
else:
91-
mc.configure(
92-
**conn_config, EMBEDDING_DB_TYPE=mc.EmbeddingDbType.NONE
93-
)
88+
mc.configure(**conn_config, EMBEDDING_DB_TYPE=mc.EmbeddingDbType.NONE)
9489
env.connections[conn_name] = mc.env().llm_async_function
9590
except mc.LLMConfigError as e:
96-
raise ValueError(
97-
f"Error in configuration for connection '{conn_name}': {e}"
98-
) from e
91+
raise ValueError(f"Error in configuration for connection '{conn_name}': {e}") from e
9992

10093
logging.info("Done initializing %d connections.", len(env.connections))
10194

lm_proxy/config.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@ class ModelListingMode(StrEnum):
3030

3131
class Group(BaseModel):
3232
"""User group configuration."""
33+
3334
api_keys: list[str] = Field(default_factory=list)
3435
allowed_connections: str = Field(default="*") # Comma-separated list or "*"
3536

lm_proxy/config_loaders/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
"""Built-in configuration loaders for different file formats."""
2+
23
from .python import load_python_config
34
from .toml import load_toml_config
45
from .yaml import load_yaml_config

0 commit comments

Comments
 (0)