Skip to content

Commit e2df27f

Browse files
author
StickyCoolDev
committed
Add chatgpt
1 parent 32da200 commit e2df27f

File tree

5 files changed

+283
-7
lines changed

5 files changed

+283
-7
lines changed

README.md

Lines changed: 94 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,94 @@
1+
# YakaLLM
2+
A smaller, faster and cooler version of LangChain. the repo without `.git` is only 74k. Its like preact for react, but nothing is backward compatible.
3+
4+
5+
## how to install
6+
pip:
7+
```
8+
pip install yaka-llm
9+
# or
10+
pip3 install yaka-llm
11+
```
12+
uv:
13+
```
14+
uv add yaka-llm
15+
# or
16+
uv pip install yaka-llm
17+
```
18+
19+
## Usage
20+
```
21+
from typing import List
22+
from yaka_llm.open_ai import ChatGPTModel
23+
# from yaka_llm import GeminiModel
24+
# ( warn : this example code is made with ai!)
25+
26+
gm = ChatGPTModel(
27+
model="openai/gpt-oss-20b:free",
28+
url=".../v1/chat/completions",
29+
api_key="",
30+
)
31+
32+
33+
# gm = GeminiModel("gemini-2.5-flash", "")
34+
def run_chat_system():
35+
36+
# 2. Register tools
37+
@gm.tool
38+
def add_numbers(a: float, b: float):
39+
"""Add two numbers a and b. Use this for all arithmetic addition requests."""
40+
return {"result": a + b}
41+
42+
print("🤖 LLM System Initialized. Type 'quit' or 'exit' to stop.")
43+
print("-" * 30)
44+
45+
# 3. Initialize History
46+
history: List[str] = []
47+
48+
# 4. Enter the Repeating Loop
49+
while True:
50+
try:
51+
# Read user input
52+
user_prompt = input("You: ")
53+
54+
# Check for exit commands
55+
if user_prompt.lower() in ("quit", "exit"):
56+
print("👋 Goodbye!")
57+
break
58+
59+
if not user_prompt.strip():
60+
continue
61+
62+
# Call the model
63+
assistant_response = gm.call(
64+
history=history, prompt=user_prompt, role="user"
65+
)
66+
67+
# Print the response and update history
68+
print(
69+
f"Gemini: {assistant_response or '... (No response or an error occurred)'}"
70+
)
71+
72+
# Update history for the next turn
73+
# The current `call` method implementation uses the history for context,
74+
# so we update it with the conversation turn.
75+
if assistant_response:
76+
# Add user prompt and assistant response to history
77+
# Note: The `call` method already formats the history internally,
78+
# so we just append the raw text of the turn.
79+
history.append(user_prompt)
80+
history.append(assistant_response)
81+
82+
print("-" * 30)
83+
84+
except KeyboardInterrupt:
85+
print("\n👋 Goodbye!")
86+
break
87+
except Exception as e:
88+
print(f"An error occurred: {e}")
89+
break
90+
91+
92+
if __name__ == "__main__":
93+
run_chat_system()
94+
```

pyproject.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
[project]
22
name = "yaka-llm"
3-
version = "0.1.0"
4-
description = "Add your description here"
3+
version = "0.1.1"
4+
description = "Faster and Smaller alternative to Lanchain"
55
readme = "README.md"
66
authors = [
77
{ name = "StickyCoolDev", email = "hujifa.info@gmaill.com" }

src/yaka_llm/__init__.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,8 @@
55
import urllib.request
66
import urllib.error
77
from typing import Any, Callable, Dict, List, Optional, Tuple
8-
8+
from .open_ai import ChatGPTModel
9+
from . import open_ai
910
JSONSchema = Dict[str, Any]
1011

1112
def _pytype_to_json_schema(py_type: Any) -> str:
@@ -35,7 +36,7 @@ def add_numbers(a: float, b: float):
3536
'''Add two numbers a and b.''' #This Docstring is used as the description for the tool
3637
return {"result": a + b}
3738
38-
text = gm.call([], prompt="Add 5 and 7 using your tools", role="user")
39+
text = gm.call([], prompt="Add 58027934 and 7902783 using your tools", role="user")
3940
"""
4041

4142
def __init__(self, model: str, api_key: str, max_iterations: int = 6, sleep_between: float = 0.2):
@@ -305,6 +306,4 @@ def call(self, history: List[str], prompt: str, role: str = "user") -> Optional[
305306
return None
306307

307308

308-
__all__ = ["GeminiModel"]
309-
310-
309+
__all__ = ["GeminiModel", "ChatGPTModel"]

src/yaka_llm/open_ai.py

Lines changed: 175 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,175 @@
1+
import json
2+
import time
3+
import inspect
4+
from typing import Any, Callable, Dict, List, Optional, Tuple
5+
import urllib.request
6+
import urllib.error
7+
8+
JSONSchema = Dict[str, Any]
9+
10+
11+
def _pytype_to_json_schema(py_type: Any) -> str:
12+
if py_type in (int, float):
13+
return "number"
14+
if py_type is bool:
15+
return "boolean"
16+
if py_type is str:
17+
return "string"
18+
return "string"
19+
20+
21+
class ChatGPTModel:
22+
"""
23+
OpenAI / OpenRouter compatible chat client with tool-calling loop.
24+
"""
25+
26+
def __init__(
27+
self,
28+
model: str,
29+
api_key: str,
30+
url: str,
31+
max_iterations: int = 6,
32+
sleep_between: float = 0.2,
33+
):
34+
self.model = model
35+
self.api_key = api_key
36+
self.url = url
37+
self.max_iterations = max_iterations
38+
self.sleep_between = sleep_between
39+
40+
self._functions: Dict[str, Callable[..., Any]] = {}
41+
self._tools: List[Dict[str, Any]] = []
42+
43+
# ------------------------------------------------------------------ TOOLS
44+
45+
def tool(self, fn: Optional[Callable] = None, *, name: Optional[str] = None):
46+
def register(f: Callable):
47+
tool_name = name or f.__name__
48+
self._functions[tool_name] = f
49+
self._rebuild_tools()
50+
return f
51+
52+
return register(fn) if fn else register
53+
54+
def _rebuild_tools(self):
55+
tools = []
56+
57+
for name, fn in self._functions.items():
58+
sig = inspect.signature(fn)
59+
properties = {}
60+
required = []
61+
62+
for pname, p in sig.parameters.items():
63+
ann = p.annotation if p.annotation is not inspect._empty else str
64+
properties[pname] = {"type": _pytype_to_json_schema(ann)}
65+
if p.default is inspect._empty:
66+
required.append(pname)
67+
68+
tools.append(
69+
{
70+
"type": "function",
71+
"function": {
72+
"name": name,
73+
"description": fn.__doc__ or "",
74+
"parameters": {
75+
"type": "object",
76+
"properties": properties,
77+
"required": required,
78+
},
79+
},
80+
}
81+
)
82+
83+
self._tools = tools
84+
85+
# ------------------------------------------------------------- HTTP CALL
86+
87+
def _post(self, payload: Dict[str, Any]) -> Dict[str, Any]:
88+
data = json.dumps(payload).encode()
89+
headers = {
90+
"Content-Type": "application/json",
91+
"Authorization": f"Bearer {self.api_key}",
92+
}
93+
94+
req = urllib.request.Request(self.url, data=data, headers=headers, method="POST")
95+
96+
try:
97+
with urllib.request.urlopen(req, timeout=30) as resp:
98+
return json.loads(resp.read().decode())
99+
except urllib.error.HTTPError as e:
100+
body = e.read().decode(errors="ignore")
101+
raise RuntimeError(f"HTTP {e.code}: {body}")
102+
except Exception as e:
103+
raise RuntimeError(f"Network error: {e}")
104+
105+
# --------------------------------------------------------- TOOL EXECUTION
106+
107+
def _execute_tool(self, name: str, args: Dict[str, Any]) -> Dict[str, Any]:
108+
fn = self._functions.get(name)
109+
if not fn:
110+
return {"error": f"Tool '{name}' not registered"}
111+
112+
try:
113+
return fn(**args)
114+
except Exception as e:
115+
return {"error": str(e)}
116+
117+
# ------------------------------------------------------------------ CALL
118+
119+
def call(self, history: List[str], prompt: str, role: str = "user") -> Optional[str]:
120+
self._rebuild_tools()
121+
122+
messages = [{"role": "user", "content": h} for h in history]
123+
messages.append({"role": role, "content": prompt})
124+
125+
for _ in range(self.max_iterations):
126+
payload = {
127+
"model": self.model,
128+
"messages": messages,
129+
}
130+
131+
if self._tools:
132+
payload["tools"] = self._tools
133+
payload["tool_choice"] = "auto"
134+
135+
resp = self._post(payload)
136+
choices = resp.get("choices", [])
137+
138+
for choice in choices:
139+
msg = choice.get("message", {})
140+
141+
# ---------------- TOOL CALL
142+
if "tool_calls" in msg:
143+
for call in msg["tool_calls"]:
144+
name = call["function"]["name"]
145+
args = json.loads(call["function"].get("arguments", "{}"))
146+
147+
result = self._execute_tool(name, args)
148+
149+
messages.append(msg)
150+
messages.append(
151+
{
152+
"role": "tool",
153+
"tool_call_id": call["id"],
154+
"content": json.dumps(result),
155+
}
156+
)
157+
158+
time.sleep(self.sleep_between)
159+
break
160+
else:
161+
continue
162+
break
163+
164+
# ---------------- FINAL TEXT
165+
content = msg.get("content")
166+
if content:
167+
messages.append(msg)
168+
return content
169+
170+
return None
171+
172+
return None
173+
174+
175+
__all__ = ["ChatGPTModel"]

uv.lock

Lines changed: 8 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)