Skip to content

Commit d5e9f31

Browse files
authored
Merge pull request #187 from veithly/fix/fix-env
Update Python version compatibility in imports and adjust max_tokens for Gemini model configuration. Refine formatting in __init__.py for improved readability.
2 parents 0bd1184 + c832091 commit d5e9f31

File tree

3 files changed

+8
-7
lines changed

3 files changed

+8
-7
lines changed

spoon_ai/__init__.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
from pathlib import Path
44

5-
try: # Python 3.11+
5+
try: # Python 3.12+
66
from importlib.metadata import PackageNotFoundError, version as _dist_version
77
except Exception: # pragma: no cover - fallback for older environments
88
try:
@@ -26,7 +26,7 @@ def _read_local_pyproject_version() -> str | None:
2626

2727
try:
2828
try:
29-
import tomllib # Python 3.11+
29+
import tomllib # Python 3.12+
3030
except Exception: # pragma: no cover
3131
import tomli as tomllib # type: ignore
3232

@@ -61,13 +61,13 @@ def _resolve_version() -> str:
6161

6262
__version__: str = _resolve_version()
6363

64-
from spoon_ai.chat import ChatBot
65-
from spoon_ai.schema import LLMResponse, LLMResponseChunk, Message
64+
from spoon_ai.chat import ChatBot
65+
from spoon_ai.schema import LLMResponse, LLMResponseChunk, Message
6666

6767
__all__ = [
6868
"__version__",
6969
"ChatBot",
70-
"Message",
70+
"Message",
7171
"LLMResponse",
7272
"LLMResponseChunk",
7373
]

spoon_ai/llm/config.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -341,7 +341,7 @@ def _get_provider_defaults(self, provider_name: str) -> Dict[str, Any]:
341341
},
342342
'gemini': {
343343
'model': 'gemini-2.5-pro',
344-
'max_tokens': 250000,
344+
'max_tokens': 20000,
345345
'base_url': 'https://generativelanguage.googleapis.com/v1beta',
346346
'temperature': 0.1, # Lower temperature for Gemini
347347
**{k: v for k, v in common_defaults.items() if k != 'temperature'}

spoon_ai/llm/providers/gemini_provider.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -569,6 +569,7 @@ def get_metadata(self) -> ProviderMetadata:
569569
ProviderCapability.IMAGE_GENERATION,
570570
ProviderCapability.VISION
571571
],
572+
max_tokens=self.max_tokens,
572573
supports_system_messages=True,
573574
rate_limits={
574575
"requests_per_minute": 60,
@@ -615,4 +616,4 @@ async def _handle_error(self, error: Exception) -> None:
615616
elif "timeout" in error_str or "connection" in error_str:
616617
raise NetworkError("gemini", "Network error", original_error=error)
617618
else:
618-
raise ProviderError("gemini", f"Request failed: {str(error)}", original_error=error)
619+
raise ProviderError("gemini", f"Request failed: {str(error)}", original_error=error)

0 commit comments

Comments
 (0)