Skip to content

Commit bfe0793

Browse files
committed
[0.5.6] protobuf removed as a dependency
1 parent 60ffc87 commit bfe0793

File tree

11 files changed

+66
-13
lines changed

11 files changed

+66
-13
lines changed

docs/changelog.rst

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,17 @@ minor versions.
77

88
All relevant steps to be taken will be mentioned here.
99

10+
0.5.6
11+
-----
12+
13+
- Remove protobuf as a dependency in because bunch of other packages break. The functions are still present
14+
15+
0.5.5
16+
-----
17+
18+
- In all implmenetations of ``tuneapi.types.chats.ModelInterface`` add new input to the API endpoints called ``extra_headers``
19+
which is a dictionary to update the outgoing headers.
20+
1021
0.5.4
1122
-----
1223

docs/conf.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
project = "tuneapi"
1414
copyright = "2024, Frello Technologies"
1515
author = "Frello Technologies"
16-
release = "0.5.4"
16+
release = "0.5.6"
1717

1818
# -- General configuration ---------------------------------------------------
1919
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration

pyproject.toml

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[tool.poetry]
22
name = "tuneapi"
3-
version = "0.5.4"
3+
version = "0.5.6"
44
description = "Tune AI APIs."
55
authors = ["Frello Technology Private Limited <[email protected]>"]
66
license = "MIT"
@@ -12,16 +12,18 @@ python = "^3.10"
1212
fire = "0.5.0"
1313
requests = "^2.31.0"
1414
cloudpickle = "3.0.0"
15-
protobuf = "^5.27.3"
1615
cryptography = ">=42.0.5"
1716
tqdm = "^4.66.1"
1817
snowflake_id = "1.0.2"
1918
nutree = "0.8.0"
2019
pillow = "^10.2.0"
20+
protobuf = { version = "^5.27.3", optional = true }
2121
boto3 = { version = "1.29.6", optional = true }
2222

2323
[tool.poetry.extras]
2424
boto3 = ["boto3"]
25+
protobuf = ["protobuf"]
26+
all = ["boto3", "protobuf"]
2527

2628
# [tool.poetry.scripts]
2729
# tuneapi = "tuneapi.__main__:main"

tuneapi/apis/model_anthropic.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -114,6 +114,7 @@ def chat(
114114
temperature: Optional[float] = None,
115115
token: Optional[str] = None,
116116
return_message: bool = False,
117+
extra_headers: Optional[Dict[str, str]] = None,
117118
**kwargs,
118119
):
119120
output = ""
@@ -124,6 +125,7 @@ def chat(
124125
max_tokens=max_tokens,
125126
temperature=temperature,
126127
token=token,
128+
extra_headers=extra_headers,
127129
raw=False,
128130
**kwargs,
129131
):
@@ -147,6 +149,7 @@ def stream_chat(
147149
timeout=(5, 30),
148150
raw: bool = False,
149151
debug: bool = False,
152+
extra_headers: Optional[Dict[str, str]] = None,
150153
**kwargs,
151154
) -> Any:
152155

@@ -156,6 +159,8 @@ def stream_chat(
156159
for t in tools:
157160
t["input_schema"] = t.pop("parameters")
158161
headers, system, claude_messages = self._process_input(chats=chats, token=token)
162+
if extra_headers:
163+
headers.update(extra_headers)
159164

160165
data = {
161166
"model": model or self.model_id,

tuneapi/apis/model_gemini.py

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77

88
import json
99
import requests
10-
from typing import Optional, Any
10+
from typing import Optional, Any, Dict
1111

1212
import tuneapi.utils as tu
1313
import tuneapi.types as tt
@@ -114,7 +114,7 @@ def chat(
114114
temperature: float = 1,
115115
token: Optional[str] = None,
116116
timeout=None,
117-
raw: bool = False,
117+
extra_headers: Optional[Dict[str, str]] = None,
118118
**kwargs,
119119
) -> Any:
120120
output = ""
@@ -125,7 +125,8 @@ def chat(
125125
temperature=temperature,
126126
token=token,
127127
timeout=timeout,
128-
raw=raw,
128+
extra_headers=extra_headers,
129+
raw=False,
129130
**kwargs,
130131
):
131132
if isinstance(x, dict):
@@ -144,12 +145,15 @@ def stream_chat(
144145
timeout=(5, 60),
145146
raw: bool = False,
146147
debug: bool = False,
148+
extra_headers: Optional[Dict[str, str]] = None,
147149
**kwargs,
148150
):
149151
tools = []
150152
if isinstance(chats, tt.Thread):
151153
tools = [x.to_dict() for x in chats.tools]
152154
headers, system, messages, params = self._process_input(chats, token)
155+
if extra_headers:
156+
headers.update(extra_headers)
153157
data = {
154158
"systemInstruction": {
155159
"parts": [{"text": system}],

tuneapi/apis/model_groq.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -96,6 +96,7 @@ def chat(
9696
temperature: float = 1,
9797
token: Optional[str] = None,
9898
timeout=(5, 30),
99+
extra_headers: Optional[Dict[str, str]] = None,
99100
**kwargs,
100101
) -> str | Dict[str, Any]:
101102
output = ""
@@ -106,6 +107,8 @@ def chat(
106107
temperature=temperature,
107108
token=token,
108109
timeout=timeout,
110+
extra_headers=extra_headers,
111+
raw=False,
109112
**kwargs,
110113
):
111114
if isinstance(x, dict):
@@ -123,12 +126,15 @@ def stream_chat(
123126
token: Optional[str] = None,
124127
timeout=(5, 60),
125128
debug: bool = False,
129+
extra_headers: Optional[Dict[str, str]] = None,
126130
raw: bool = False,
127131
):
128132
tools = []
129133
if isinstance(chats, tt.Thread):
130134
tools = [{"type": "function", "function": x.to_dict()} for x in chats.tools]
131135
headers, messages = self._process_input(chats, token)
136+
if extra_headers:
137+
headers.update(extra_headers)
132138
data = {
133139
"temperature": temperature,
134140
"messages": messages,

tuneapi/apis/model_mistral.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -99,6 +99,7 @@ def chat(
9999
temperature: float = 1,
100100
token: Optional[str] = None,
101101
timeout=(5, 30),
102+
extra_headers: Optional[Dict[str, str]] = None,
102103
**kwargs,
103104
) -> str | Dict[str, Any]:
104105
output = ""
@@ -109,6 +110,8 @@ def chat(
109110
temperature=temperature,
110111
token=token,
111112
timeout=timeout,
113+
extra_headers=extra_headers,
114+
raw=False,
112115
**kwargs,
113116
):
114117
if isinstance(x, dict):
@@ -127,11 +130,14 @@ def stream_chat(
127130
timeout=(5, 60),
128131
raw: bool = False,
129132
debug: bool = False,
133+
extra_headers: Optional[Dict[str, str]] = None,
130134
):
131135
tools = []
132136
if isinstance(chats, Thread):
133137
tools = [{"type": "function", "function": x.to_dict()} for x in chats.tools]
134138
headers, messages = self._process_input(chats, token)
139+
if extra_headers:
140+
headers.update(extra_headers)
135141
data = {
136142
"messages": messages,
137143
"model": model or self.model_id,

tuneapi/apis/model_openai.py

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,8 @@
66

77
import json
88
import requests
9-
from typing import Optional, Any, List
109

10+
from typing import Optional, Any, List, Dict
1111

1212
import tuneapi.utils as tu
1313
import tuneapi.types as tt
@@ -98,6 +98,7 @@ def chat(
9898
max_tokens: int = 1024,
9999
temperature: float = 1,
100100
token: Optional[str] = None,
101+
extra_headers: Optional[Dict[str, str]] = None,
101102
**kwargs,
102103
) -> Any:
103104
output = ""
@@ -107,6 +108,8 @@ def chat(
107108
max_tokens=max_tokens,
108109
temperature=temperature,
109110
token=token,
111+
extra_headers=extra_headers,
112+
raw=False,
110113
**kwargs,
111114
):
112115
if isinstance(x, dict):
@@ -123,10 +126,13 @@ def stream_chat(
123126
temperature: float = 1,
124127
token: Optional[str] = None,
125128
timeout=(5, 60),
126-
raw: bool = False,
129+
extra_headers: Optional[Dict[str, str]] = None,
127130
debug: bool = False,
131+
raw: bool = False,
128132
):
129133
headers, messages = self._process_input(chats, token)
134+
if extra_headers:
135+
headers.update(extra_headers)
130136
data = {
131137
"temperature": temperature,
132138
"messages": messages,
@@ -191,11 +197,14 @@ def embedding(
191197
token: Optional[str] = None,
192198
timeout=(5, 60),
193199
raw: bool = False,
200+
extra_headers: Optional[Dict[str, str]] = None,
194201
):
195202
"""If you pass a list then returned items are in the insertion order"""
196203
text = []
197204

198205
headers = self._process_header(token)
206+
if extra_headers:
207+
headers.update(extra_headers)
199208
if isinstance(chats, tt.Thread):
200209
_, messages = self._process_input(chats, token)
201210
for i, m in enumerate(messages):

tuneapi/apis/model_tune.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -115,6 +115,7 @@ def chat(
115115
token: Optional[str] = None,
116116
timeout=(5, 60),
117117
stop: Optional[List[str]] = None,
118+
extra_headers: Optional[Dict[str, str]] = None,
118119
**kwargs,
119120
) -> str | Dict[str, Any]:
120121
output = ""
@@ -126,6 +127,8 @@ def chat(
126127
token=token,
127128
timeout=timeout,
128129
stop=stop,
130+
extra_headers=extra_headers,
131+
raw=False,
129132
**kwargs,
130133
):
131134
if isinstance(x, dict):
@@ -145,13 +148,16 @@ def stream_chat(
145148
stop: Optional[List[str]] = None,
146149
raw: bool = False,
147150
debug: bool = False,
151+
extra_headers: Optional[Dict[str, str]] = None,
148152
):
149153
model = model or self.model_id
150154
if not model:
151155
raise Exception(
152156
"Tune Model ID not found. Please set TUNEAPI_MODEL environment variable or pass through function"
153157
)
154158
headers, messages = self._process_input(chats, token)
159+
if extra_headers:
160+
headers.update(extra_headers)
155161
data = {
156162
"temperature": temperature,
157163
"messages": messages,

tuneapi/utils/misc.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,6 @@
99
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
1010

1111
from datetime import datetime, timezone, timedelta
12-
from google.protobuf.timestamp_pb2 import Timestamp as Timestamp_pb
1312

1413
from tuneapi.utils.logger import logger
1514
from tuneapi.utils.serdeser import to_json
@@ -54,7 +53,9 @@ def get_now_human(date: bool = True, tz=None) -> str:
5453
dt = dt.astimezone(tz)
5554
return dt.strftime(fmt_str)
5655

57-
def get_now_pb() -> Timestamp_pb:
56+
def get_now_pb():
57+
from google.protobuf.timestamp_pb2 import Timestamp as Timestamp_pb
58+
5859
ts = Timestamp_pb()
5960
ts.GetCurrentTime()
6061
return ts

0 commit comments

Comments
 (0)