Skip to content

Commit 3160bd1

Browse files
author
dori
committed
feat: fix build
1 parent 3b54c38 commit 3160bd1

File tree

3 files changed

+7
-1
lines changed

3 files changed

+7
-1
lines changed

src/mcp_as_a_judge/db/dynamic_token_limits.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -69,6 +69,8 @@ def get_model_limits(model_name: str | None = None) -> ModelLimits:
6969
_model_limits_cache[model_name] = limits
7070

7171
except Exception:
72+
# LiteLLM not available or model info retrieval failed
73+
# Continue with hardcoded defaults
7274
pass
7375

7476
return limits

src/mcp_as_a_judge/db/token_utils.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ async def detect_model_name(ctx=None) -> str | None:
3535
if client and hasattr(client, "config") and client.config.model_name:
3636
return client.config.model_name
3737
except Exception:
38+
# LLM client not available or configuration error
3839
pass
3940

4041
# Try MCP sampling if context available
@@ -57,6 +58,7 @@ async def detect_model_name(ctx=None) -> str | None:
5758
return result.model
5859

5960
except Exception:
61+
# MCP sampling failed or not available
6062
pass
6163

6264
return None

tests/test_improved_token_counting.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -152,7 +152,9 @@ async def test_enhanced_calls_with_optional_params(self):
152152
"""Test enhanced calls with optional model parameters."""
153153
# New-style calls with optional parameters should work
154154
tokens1 = await calculate_tokens("Hello world", model_name=None, ctx=None)
155-
tokens2 = await calculate_record_tokens("Hello", "world", model_name=None, ctx=None)
155+
tokens2 = await calculate_record_tokens(
156+
"Hello", "world", model_name=None, ctx=None
157+
)
156158

157159
assert tokens1 > 0
158160
assert tokens2 > 0

0 commit comments

Comments
 (0)