Skip to content

Commit 32da200

Browse files
author
StickyCoolDev
committed
Intial Commit
0 parents  commit 32da200

File tree

6 files changed

+335
-0
lines changed

6 files changed

+335
-0
lines changed

.gitignore

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
# Python-generated files
2+
__pycache__/
3+
*.py[oc]
4+
build/
5+
dist/
6+
wheels/
7+
*.egg-info
8+
9+
# Virtual environments
10+
.venv

.python-version

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
3.12

README.md

Whitespace-only changes.

pyproject.toml

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
[project]
2+
name = "yaka-llm"
3+
version = "0.1.0"
4+
description = "Add your description here"
5+
readme = "README.md"
6+
authors = [
7+
{ name = "StickyCoolDev", email = "hujifa.info@gmaill.com" }
8+
]
9+
requires-python = ">=3.12"
10+
dependencies = []
11+
12+
[build-system]
13+
requires = ["uv_build>=0.9.15,<0.10.0"]
14+
build-backend = "uv_build"

src/yaka_llm/__init__.py

Lines changed: 310 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,310 @@
1+
from __future__ import annotations
2+
import json
3+
import time
4+
import inspect
5+
import urllib.request
6+
import urllib.error
7+
from typing import Any, Callable, Dict, List, Optional, Tuple
8+
9+
JSONSchema = Dict[str, Any]
10+
11+
def _pytype_to_json_schema(py_type: Any) -> str:
12+
"""Map simple python annotation types to JSON schema types.
13+
Falls back to "string" when unknown.
14+
"""
15+
if py_type in (int, float):
16+
return "number"
17+
if py_type is bool:
18+
return "boolean"
19+
if py_type is str:
20+
return "string"
21+
origin = getattr(py_type, "__origin__", None)
22+
if origin is not None:
23+
args = getattr(py_type, "__args__", ()) or ()
24+
if args:
25+
return _pytype_to_json_schema(args[0])
26+
return "string"
27+
28+
class GeminiModel:
29+
"""LLM class for Gemin loop.
30+
How to use:
31+
gm = GeminiModel("gemini-2.5-flash", api_key="...")
32+
33+
@gm.tool
34+
def add_numbers(a: float, b: float):
35+
'''Add two numbers a and b.''' #This Docstring is used as the description for the tool
36+
return {"result": a + b}
37+
38+
text = gm.call([], prompt="Add 5 and 7 using your tools", role="user")
39+
"""
40+
41+
def __init__(self, model: str, api_key: str, max_iterations: int = 6, sleep_between: float = 0.2):
42+
self.model = model
43+
self.api_key = api_key
44+
self.max_iterations = max_iterations
45+
self.sleep_between = sleep_between
46+
47+
self._functions: Dict[str, Callable[..., Any]] = {}
48+
self._tools_declarations: List[Dict[str, Any]] = []
49+
50+
def tool(self, fn: Optional[Callable] = None, *, name: Optional[str] = None) -> Callable:
51+
"""Decorator to register a function as a tool the model can call.
52+
53+
Usage:
54+
@gm.tool
55+
def foo(...):
56+
'''Docstring used as the tool description'''
57+
...
58+
59+
The decorator registers the function under its __name__ by default, or
60+
under the provided `name` kwarg.
61+
"""
62+
63+
def _register(f: Callable) -> Callable:
64+
nonlocal name
65+
tool_name = name or f.__name__
66+
self._functions[tool_name] = f
67+
68+
sig = inspect.signature(f)
69+
properties: Dict[str, Any] = {}
70+
required: List[str] = []
71+
for param_name, param in sig.parameters.items():
72+
ann = param.annotation if param.annotation is not inspect._empty else str
73+
jtype = _pytype_to_json_schema(ann)
74+
properties[param_name] = {"type": jtype}
75+
if param.default is inspect._empty:
76+
required.append(param_name)
77+
78+
decl = {
79+
"name": tool_name,
80+
"description": (f.__doc__ or ""),
81+
"parameters": {
82+
"type": "object",
83+
"properties": properties,
84+
"required": required,
85+
},
86+
}
87+
self._rebuild_tools_declarations()
88+
return f
89+
90+
if fn is None:
91+
return _register
92+
else:
93+
return _register(fn)
94+
95+
def _rebuild_tools_declarations(self) -> None:
96+
"""Rebuild self._tools_declarations from currently registered functions."""
97+
decls: List[Dict[str, Any]] = []
98+
for name, fn in self._functions.items():
99+
sig = inspect.signature(fn)
100+
properties: Dict[str, Any] = {}
101+
required: List[str] = []
102+
for param_name, param in sig.parameters.items():
103+
ann = param.annotation if param.annotation is not inspect._empty else str
104+
jtype = _pytype_to_json_schema(ann)
105+
properties[param_name] = {"type": jtype}
106+
if param.default is inspect._empty:
107+
required.append(param_name)
108+
109+
decls.append(
110+
{
111+
"name": name,
112+
"description": (fn.__doc__ or ""),
113+
"parameters": {
114+
"type": "object",
115+
"properties": properties,
116+
"required": required,
117+
},
118+
}
119+
)
120+
self._tools_declarations = [{"function_declarations": decls}]
121+
122+
def _gemini_call_text(self, user_text: str) -> Dict[str, Any]:
123+
"""Send a text prompt (single string) to Gemini; return parsed JSON response."""
124+
payload = {
125+
"contents": [{"parts": [{"text": user_text}]}],
126+
"tools": self._tools_declarations,
127+
}
128+
url = f"https://generativelanguage.googleapis.com/v1beta/models/{self.model}:generateContent?key={self.api_key}"
129+
data = json.dumps(payload).encode("utf-8")
130+
req = urllib.request.Request(url, data=data, headers={"Content-Type": "application/json"}, method="POST")
131+
try:
132+
with urllib.request.urlopen(req, timeout=30) as resp:
133+
raw = resp.read().decode("utf-8")
134+
return json.loads(raw)
135+
except urllib.error.HTTPError as e:
136+
body = e.read().decode("utf-8", errors="ignore")
137+
raise RuntimeError(f"HTTPError {e.code}: {e.reason}\n{body}")
138+
except Exception as e:
139+
raise RuntimeError(f"Network error: {e}")
140+
141+
@staticmethod
142+
def _extract_function_call_from_part(part: Any) -> Tuple[Optional[str], Optional[Dict[str, Any]]]:
143+
"""Return (name, args_dict) if this part includes a function call."""
144+
candidates = []
145+
if isinstance(part, dict):
146+
if "functionCall" in part:
147+
candidates.append(part["functionCall"])
148+
if "function_call" in part:
149+
candidates.append(part["function_call"])
150+
for v in part.values():
151+
if isinstance(v, dict) and ("name" in v and "args" in v):
152+
candidates.append(v)
153+
for c in candidates:
154+
if not isinstance(c, dict):
155+
continue
156+
name = c.get("name")
157+
args_raw = c.get("args")
158+
args = {}
159+
if isinstance(args_raw, dict):
160+
args = args_raw
161+
elif isinstance(args_raw, str):
162+
try:
163+
args = json.loads(args_raw)
164+
except Exception:
165+
try:
166+
args = eval(args_raw, {"__builtins__": {}})
167+
except Exception:
168+
args = {}
169+
else:
170+
args = {}
171+
return name, args
172+
return None, None
173+
174+
@staticmethod
175+
def _extract_text_from_response(resp: Dict[str, Any]) -> Optional[str]:
176+
candidates = resp.get("candidates", []) or []
177+
texts: List[str] = []
178+
for cand in candidates:
179+
content = cand.get("content", {}) or {}
180+
parts = content.get("parts", []) or []
181+
for part in parts:
182+
if isinstance(part, dict):
183+
if "text" in part:
184+
texts.append(part["text"])
185+
elif "content" in part and isinstance(part["content"], dict) and "text" in part["content"]:
186+
texts.append(part["content"]["text"])
187+
else:
188+
if "functionCall" not in part and "function_call" not in part:
189+
texts.append(json.dumps(part))
190+
elif isinstance(part, str):
191+
texts.append(part)
192+
return "\n".join(texts) if texts else None
193+
194+
@staticmethod
195+
def _convert_simple(v: Any) -> Any:
196+
if isinstance(v, str):
197+
try:
198+
if v.isdigit() or (v.startswith("-") and v[1:].isdigit()):
199+
return int(v)
200+
if any(ch.isdigit() for ch in v) and "." in v:
201+
return float(v)
202+
except Exception:
203+
pass
204+
return v
205+
206+
@staticmethod
207+
def _normalize_args(fn: Callable, args: Any) -> Dict[str, Any]:
208+
sig = inspect.signature(fn)
209+
param_names = list(sig.parameters.keys())
210+
211+
if args is None:
212+
return {}
213+
if isinstance(args, (list, tuple)):
214+
return {param_names[i]: args[i] for i in range(min(len(args), len(param_names)))}
215+
if isinstance(args, dict) and all(str(k).isdigit() for k in args.keys()):
216+
pairs = sorted(((int(k), v) for k, v in args.items()), key=lambda x: x[0])
217+
return {param_names[i]: GeminiModel._convert_simple(v) for i, (_, v) in enumerate(pairs) if i < len(param_names)}
218+
if isinstance(args, dict):
219+
out: Dict[str, Any] = {}
220+
for k, v in args.items():
221+
if isinstance(k, str) and k.isdigit():
222+
idx = int(k)
223+
if 0 <= idx < len(param_names):
224+
out[param_names[idx]] = GeminiModel._convert_simple(v)
225+
else:
226+
out[k] = GeminiModel._convert_simple(v)
227+
else:
228+
out[k] = GeminiModel._convert_simple(v)
229+
for i, name in enumerate(param_names):
230+
if name not in out and str(i) in args:
231+
out[name] = GeminiModel._convert_simple(args[str(i)])
232+
return out
233+
return {param_names[0]: GeminiModel._convert_simple(args)} if param_names else {}
234+
235+
def call(self, history: List[str], prompt: str, role: str = "user") -> Optional[str]:
236+
"""Run the LangChain-like loop.
237+
238+
Args:
239+
history: list[str] of previous messages (best-effort: will be prefixed as `User:` lines)
240+
prompt: the new message to send
241+
role: role name for the prompt (e.g. "user")
242+
243+
Returns final assistant text or None.
244+
"""
245+
self._rebuild_tools_declarations()
246+
247+
conversation: List[Dict[str, Any]] = []
248+
for h in history:
249+
conversation.append({"role": "user", "text": h})
250+
conversation.append({"role": role, "text": prompt})
251+
252+
for _iteration in range(self.max_iterations):
253+
convo_text = ""
254+
for m in conversation:
255+
if m["role"] == "user":
256+
convo_text += f"User: {m['text']}\n"
257+
elif m["role"] == "assistant":
258+
if m.get("function_call"):
259+
fc = m["function_call"]
260+
convo_text += f"Assistant (function_call): {fc.get('name')} args={json.dumps(fc.get('args'))}\n"
261+
else:
262+
convo_text += f"Assistant: {m['text']}\n"
263+
elif m["role"] == "tool":
264+
name = m.get("name", "tool")
265+
convo_text += f"Tool {name} returned: {m['text']}\n"
266+
267+
convo_text += "\nInstruction: Continue the conversation above. Use the available tools if needed and, if you call a tool, respond with a function call. Otherwise provide the final answer.\n"
268+
269+
resp = self._gemini_call_text(convo_text)
270+
271+
272+
candidates = resp.get("candidates", []) or []
273+
function_called = False
274+
for cand in candidates:
275+
content = cand.get("content", {}) or {}
276+
parts = content.get("parts", []) or []
277+
for part in parts:
278+
name, args = self._extract_function_call_from_part(part)
279+
if name:
280+
function_called = True
281+
conversation.append({"role": "assistant", "function_call": {"name": name, "args": args}})
282+
fn = self._functions.get(name)
283+
if not fn:
284+
tool_result = {"error": f"function '{name}' not implemented locally."}
285+
else:
286+
try:
287+
clean_args = self._normalize_args(fn, args)
288+
tool_result = fn(**clean_args)
289+
except TypeError as e:
290+
tool_result = {"error": f"argument mismatch: {e}"}
291+
conversation.append({"role": "tool", "name": name, "text": json.dumps(tool_result)})
292+
time.sleep(self.sleep_between)
293+
break
294+
if function_called:
295+
break
296+
297+
if not function_called:
298+
final_text = self._extract_text_from_response(resp)
299+
if final_text:
300+
conversation.append({"role": "assistant", "text": final_text})
301+
return final_text
302+
else:
303+
return None
304+
305+
return None
306+
307+
308+
__all__ = ["GeminiModel"]
309+
310+

src/yaka_llm/py.typed

Whitespace-only changes.

0 commit comments

Comments
 (0)