Skip to content

Commit 8823c8c

Browse files
Luca CandelaLuca Candela
authored andcommitted
tighten optional providers and expand ci triggers
1 parent ff41c1d commit 8823c8c

File tree

11 files changed

+123
-48
lines changed

11 files changed

+123
-48
lines changed

.github/workflows/ci.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ name: CI
33
on:
44
push:
55
branches:
6-
- main
6+
- '**'
77
pull_request:
88

99
jobs:
Lines changed: 23 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,33 @@
1+
from __future__ import annotations
2+
13
from .generic import GenericLiteLLMReranker
24
from .openai import OpenAIReranker
35

46
__all__ = ['GenericLiteLLMReranker', 'OpenAIReranker']
57

8+
9+
def _missing_provider(name: str, dependency: str) -> type:
10+
class _MissingProvider: # pragma: no cover - simple error shim
11+
def __init__(self, *args, **kwargs):
12+
raise ImportError(
13+
f'{name} requires the optional dependency "{dependency}". '
14+
f'Install it via `uv sync --extra {dependency}` or the matching extras group.'
15+
)
16+
17+
_MissingProvider.__name__ = f'Missing{name}'
18+
return _MissingProvider
19+
20+
621
try: # pragma: no cover - optional dependency
7-
from .gemini import GeminiReranker
8-
except ImportError: # pragma: no cover - optional dependency guard
9-
GeminiReranker = None # type: ignore[assignment]
10-
else: # pragma: no cover - optional dependency available
22+
from .gemini import GeminiReranker # type: ignore[unused-import]
23+
except ImportError: # pragma: no cover
24+
GeminiReranker = _missing_provider('GeminiReranker', 'google-genai') # type: ignore[assignment]
25+
else: # pragma: no cover
1126
__all__.append('GeminiReranker')
1227

1328
try: # pragma: no cover - optional dependency
14-
from .bge import BGEReranker
15-
except ImportError: # pragma: no cover - optional dependency guard
16-
BGEReranker = None # type: ignore[assignment]
17-
else: # pragma: no cover - optional dependency available
29+
from .bge import BGEReranker # type: ignore[unused-import]
30+
except ImportError: # pragma: no cover
31+
BGEReranker = _missing_provider('BGEReranker', 'sentence-transformers') # type: ignore[assignment]
32+
else: # pragma: no cover
1833
__all__.append('BGEReranker')

graphium_core/cross_encoder/rerankers/gemini.py

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -11,11 +11,15 @@
1111
from ..reranker_client import RerankerClient
1212

1313
if TYPE_CHECKING: # pragma: no cover - typing only
14-
genai = typing.Any # type: ignore[assignment]
15-
types = typing.Any # type: ignore[assignment]
14+
try:
15+
from google import genai as genai # type: ignore[import-not-found]
16+
from google.genai import types as types # type: ignore[import-not-found]
17+
except ImportError: # pragma: no cover - optional dependency missing at type time
18+
genai = typing.cast(typing.Any, None)
19+
types = typing.cast(typing.Any, None)
1620
else: # pragma: no cover - runtime placeholders
17-
genai = typing.Any
18-
types = typing.Any
21+
genai = typing.cast(typing.Any, None)
22+
types = typing.cast(typing.Any, None)
1923

2024
logger = logging.getLogger(__name__)
2125

graphium_core/embedder/providers/__init__.py

Lines changed: 26 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -4,26 +4,39 @@
44

55
__all__ = ['OpenAIEmbedder', 'OpenAIEmbedderConfig']
66

7+
8+
def _missing_provider(name: str, dependency: str) -> type:
9+
class _MissingProvider: # pragma: no cover - shim for optional extras
10+
def __init__(self, *args, **kwargs):
11+
raise ImportError(
12+
f'{name} requires the optional dependency "{dependency}". '
13+
f'Install it via `uv sync --extra {dependency}` or the corresponding extras group.'
14+
)
15+
16+
_MissingProvider.__name__ = f'Missing{name}'
17+
return _MissingProvider
18+
19+
720
try: # pragma: no cover - optional dependency
8-
from .gemini import GeminiEmbedder, GeminiEmbedderConfig
9-
except ImportError: # pragma: no cover - optional dependency guard
10-
GeminiEmbedder = None # type: ignore[assignment]
11-
GeminiEmbedderConfig = None # type: ignore[assignment]
12-
else: # pragma: no cover - optional dependency present
21+
from .gemini import GeminiEmbedder, GeminiEmbedderConfig # type: ignore[unused-import]
22+
except ImportError: # pragma: no cover
23+
GeminiEmbedder = _missing_provider('GeminiEmbedder', 'google-genai') # type: ignore[assignment]
24+
GeminiEmbedderConfig = _missing_provider('GeminiEmbedderConfig', 'google-genai') # type: ignore[assignment]
25+
else: # pragma: no cover
1326
__all__.extend(['GeminiEmbedder', 'GeminiEmbedderConfig'])
1427

1528
try: # pragma: no cover - optional dependency
16-
from .voyage import VoyageAIEmbedder, VoyageAIEmbedderConfig
17-
except ImportError: # pragma: no cover - optional dependency guard
18-
VoyageAIEmbedder = None # type: ignore[assignment]
19-
VoyageAIEmbedderConfig = None # type: ignore[assignment]
29+
from .voyage import VoyageAIEmbedder, VoyageAIEmbedderConfig # type: ignore[unused-import]
30+
except ImportError: # pragma: no cover
31+
VoyageAIEmbedder = _missing_provider('VoyageAIEmbedder', 'voyageai') # type: ignore[assignment]
32+
VoyageAIEmbedderConfig = _missing_provider('VoyageAIEmbedderConfig', 'voyageai') # type: ignore[assignment]
2033
else: # pragma: no cover
2134
__all__.extend(['VoyageAIEmbedder', 'VoyageAIEmbedderConfig'])
2235

2336
try: # pragma: no cover - optional dependency
24-
from .embeddinggemma import EmbeddingGemmaConfig, EmbeddingGemmaEmbedder
25-
except ImportError: # pragma: no cover - optional dependency guard
26-
EmbeddingGemmaEmbedder = None # type: ignore[assignment]
27-
EmbeddingGemmaConfig = None # type: ignore[assignment]
37+
from .embeddinggemma import EmbeddingGemmaConfig, EmbeddingGemmaEmbedder # type: ignore[unused-import]
38+
except ImportError: # pragma: no cover
39+
EmbeddingGemmaEmbedder = _missing_provider('EmbeddingGemmaEmbedder', 'sentence-transformers') # type: ignore[assignment]
40+
EmbeddingGemmaConfig = _missing_provider('EmbeddingGemmaConfig', 'sentence-transformers') # type: ignore[assignment]
2841
else: # pragma: no cover
2942
__all__.extend(['EmbeddingGemmaEmbedder', 'EmbeddingGemmaConfig'])

graphium_core/llm_client/pydantic_ai_adapter.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -117,6 +117,14 @@ async def maybe_run_with_pydantic_ai(
117117
if conversation is None:
118118
return None
119119

120+
api_key = settings.resolved_api_key()
121+
if not api_key:
122+
logger.debug(
123+
'Skipping Pydantic AI path for provider %s: API key unavailable.',
124+
getattr(settings.provider, 'value', settings.provider),
125+
)
126+
return None
127+
120128
try:
121129
model = create_pydantic_ai_model(settings, model_name)
122130
except PydanticAIUserError:

graphium_core/providers/factory.py

Lines changed: 28 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -140,7 +140,13 @@ def create_embedder(
140140
return OpenAIEmbedder(config=config)
141141

142142
if provider == EmbedderProvider.gemini:
143-
from graphium_core.embedder import GeminiEmbedder, GeminiEmbedderConfig
143+
try:
144+
from graphium_core.embedder import GeminiEmbedder, GeminiEmbedderConfig
145+
except ImportError as exc: # pragma: no cover - optional dependency
146+
raise ImportError(
147+
'Gemini embedder requires the optional google-genai dependency. '
148+
'Install it via `uv sync --extra google-genai` to enable Gemini embeddings.'
149+
) from exc
144150

145151
_ensure_embedder_api_key(provider, resolved_settings, resolved_llm_settings)
146152
config_kwargs = {
@@ -153,7 +159,13 @@ def create_embedder(
153159
return GeminiEmbedder(config=config)
154160

155161
if provider == EmbedderProvider.embeddinggemma:
156-
from graphium_core.embedder import EmbeddingGemmaConfig, EmbeddingGemmaEmbedder
162+
try:
163+
from graphium_core.embedder import EmbeddingGemmaConfig, EmbeddingGemmaEmbedder
164+
except ImportError as exc: # pragma: no cover - optional dependency
165+
raise ImportError(
166+
'EmbeddingGemma embedder requires the optional sentence-transformers dependency. '
167+
'Install it via `uv sync --extra sentence-transformers` to enable EmbeddingGemma.'
168+
) from exc
157169

158170
config_kwargs = {
159171
'embedding_dim': embedding_dim,
@@ -196,14 +208,26 @@ def create_reranker(
196208
return OpenAIReranker(config=config)
197209

198210
if provider == LLMProvider.gemini:
199-
from graphium_core.cross_encoder import GeminiReranker
211+
try:
212+
from graphium_core.cross_encoder import GeminiReranker
213+
except ImportError as exc: # pragma: no cover - optional dependency
214+
raise ImportError(
215+
'Gemini reranker requires the optional google-genai dependency. '
216+
'Install it via `uv sync --extra google-genai` to enable Gemini reranking.'
217+
) from exc
200218

201219
_ensure_llm_api_key(settings)
202220
config = LLMConfig(api_key=settings.resolved_api_key(), model=settings.model)
203221
return GeminiReranker(config=config)
204222

205223
if provider.value == 'bge':
206-
from graphium_core.cross_encoder import BGEReranker
224+
try:
225+
from graphium_core.cross_encoder import BGEReranker
226+
except ImportError as exc: # pragma: no cover - optional dependency
227+
raise ImportError(
228+
'BGE reranker requires the optional sentence-transformers dependency. '
229+
'Install it via `uv sync --extra sentence-transformers` to enable BGE reranking.'
230+
) from exc
207231

208232
return BGEReranker()
209233

graphium_core/settings.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,11 @@
3535

3636

3737
class GraphiumBaseSettings(BaseSettings):
38-
model_config = SettingsConfigDict(env_file='.env', extra='ignore')
38+
model_config = SettingsConfigDict(
39+
env_file='.env',
40+
extra='ignore',
41+
populate_by_name=True,
42+
)
3943

4044

4145
class LLMProvider(str, Enum):

mcp_server/graphium_mcp/state.py

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,15 +3,21 @@
33
from __future__ import annotations
44

55
import asyncio
6-
from collections.abc import Awaitable, Callable
7-
from typing import TYPE_CHECKING
6+
from collections.abc import Awaitable
7+
from typing import TYPE_CHECKING, Any, Protocol, runtime_checkable
88

99
from .config import GraphiumConfig
1010

1111
if TYPE_CHECKING:
1212
from graphium_core import Graphium
1313

14-
EpisodeProcessor = Callable[[], Awaitable[None]]
14+
15+
@runtime_checkable
16+
class EpisodeProcessor(Protocol):
17+
queue_metadata: dict[str, Any] | None # attribute populated by queue management
18+
19+
def __call__(self) -> Awaitable[None]:
20+
...
1521

1622
graphium_config: GraphiumConfig = GraphiumConfig()
1723
graphium_client: Graphium | None = None

mcp_server/graphium_mcp/tools.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -106,13 +106,14 @@ async def process_episode() -> None:
106106
group_id_str,
107107
)
108108

109-
process_episode.queue_metadata = {
109+
queue_task = cast(state.EpisodeProcessor, process_episode)
110+
queue_task.queue_metadata = {
110111
'name': name,
111112
'group_id': group_id_str,
112113
'source': source_type.value,
113114
}
114115

115-
position = await enqueue_episode(group_id_str, process_episode)
116+
position = await enqueue_episode(group_id_str, queue_task)
116117

117118
pending_failures = state.queue_failures.get(group_id_str, [])
118119
message = f"Episode '{name}' queued for processing (position: {position})"

pyproject.toml

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -35,16 +35,16 @@ groq = ["groq>=0.2.0"]
3535
google-genai = ["google-genai>=1.8.0"]
3636
kuzu = ["kuzu>=0.11.2"]
3737
falkordb = ["falkordb>=1.1.2,<2.0.0"]
38-
voyageai = ["voyageai>=0.2.3"]
38+
voyageai = ["voyageai>=0.3.3"]
3939
neo4j-opensearch = ["boto3>=1.39.16", "opensearch-py>=3.0.0"]
40-
sentence-transformers = ["sentence-transformers>=3.2.1"]
40+
sentence-transformers = ["sentence-transformers>=5.0.0"]
4141
neptune = ["langchain-aws>=0.2.29", "opensearch-py>=3.0.0", "boto3>=1.39.16"]
4242
tracing = ["opentelemetry-api>=1.20.0", "opentelemetry-sdk>=1.20.0"]
4343
dev = [
4444
"pyright>=1.1.404",
4545
"fastapi>=0.115.0",
4646
"uvicorn>=0.30.0",
47-
"fastmcp>=2.12.0",
47+
"fastmcp>=2.12.4",
4848
"mcp>=1.5.0",
4949
"groq>=0.2.0",
5050
"anthropic>=0.49.0",
@@ -61,12 +61,12 @@ dev = [
6161
"langchain-anthropic>=0.2.4",
6262
"langsmith>=0.1.108",
6363
"langchain-openai>=0.2.6",
64-
"sentence-transformers>=3.2.1",
64+
"sentence-transformers>=5.0.0",
6565
"transformers>=4.45.2",
66-
"voyageai>=0.2.3",
66+
"voyageai>=0.3.3",
6767
"pytest>=8.3.3",
6868
"pytest-cov>=7.0.0",
69-
"pytest-asyncio>=0.24.0",
69+
"pytest-asyncio>=1.0.0",
7070
"pytest-xdist>=3.6.1",
7171
"ruff>=0.7.1",
7272
"opentelemetry-sdk>=1.20.0",

0 commit comments

Comments
 (0)