Skip to content

Commit fd63987

Browse files
committed
fix: Remove yepchat and opengpt providers
fix: Remove provider promo in Blackboxai's responses refactor: Format code with black. fix: Other minor bugs
1 parent 5c361e4 commit fd63987

File tree

15 files changed

+94
-1150
lines changed

15 files changed

+94
-1150
lines changed

docs/README.md

Lines changed: 1 addition & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,6 @@ The name *python-tgpt* draws inspiration from its parent project [tgpt](https://
7575
These are simply the hosts of the LLMs, they include:
7676

7777
- [Koboldai](https://koboldai-koboldcpp-tiefighter.hf.space)
78-
- [OpenGPTs](https://opengpts-example-vz4y4ooboq-uc.a.run.app/)
7978
- [OpenAI](https://chat.openai.com) *(API key required)*
8079
- [Phind](https://www.phind.com)
8180
- [Blackboxai](https://www.blackbox.ai)
@@ -299,24 +298,6 @@ print(bot.chat("<Your-prompt>"))
299298

300299
</details>
301300

302-
303-
<details>
304-
305-
<summary>
306-
Opengpt
307-
308-
</summary>
309-
310-
```python
311-
import pytgpt.opengpt as opengpt
312-
bot = opengpt.OPENGPT()
313-
print(bot.chat("<Your-prompt>"))
314-
```
315-
316-
</details>
317-
318-
<details>
319-
320301
<summary>
321302
phind
322303

@@ -349,7 +330,7 @@ print(bot.chat("<Your-prompt>"))
349330

350331
**Version 0.7.0** introduces asynchronous implementation to almost all providers except a few such as *perplexity*, which relies on other libraries which lacks such implementation.
351332

352-
To make it easier, you just have to prefix `Async` to the common synchronous class name. For instance `OPENGPT` will be accessed as `AsyncOPENGPT`:
333+
To make it easier, you just have to prefix `Async` to the common synchronous class name. For instance `PHIND` will be accessed as `AsyncPHIND`:
353334

354335
#### Streaming Whole ai response.
355336

src/pytgpt/__init__.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,6 @@
1313
tgpt_providers = [
1414
"auto",
1515
"openai",
16-
"opengpt",
1716
"koboldai",
1817
"phind",
1918
"blackboxai",
@@ -22,7 +21,6 @@
2221
"poe",
2322
"groq",
2423
"perplexity",
25-
"yepchat",
2624
"novita",
2725
]
2826

src/pytgpt/async_providers.py

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,4 @@
11
from pytgpt.phind import AsyncPHIND
2-
from pytgpt.yepchat import AsyncYEPCHAT
3-
from pytgpt.opengpt import AsyncOPENGPT
42
from pytgpt.openai import AsyncOPENAI
53
from pytgpt.koboldai import AsyncKOBOLDAI
64
from pytgpt.groq import AsyncGROQ
@@ -10,20 +8,16 @@
108

119
mapper: dict[str, object] = {
1210
"phind": AsyncPHIND,
13-
"opengpt": AsyncOPENGPT,
1411
"koboldai": AsyncKOBOLDAI,
1512
"blackboxai": AsyncBLACKBOXAI,
1613
"gpt4free": AsyncGPT4FREE,
17-
"yepchat": AsyncYEPCHAT,
1814
"groq": AsyncGROQ,
1915
"openai": AsyncOPENAI,
2016
"novita": AsyncNOVITA,
2117
}
2218

2319
tgpt_mapper: dict[str, object] = {
2420
"phind": AsyncPHIND,
25-
"opengpt": AsyncOPENGPT,
2621
"koboldai": AsyncKOBOLDAI,
2722
"blackboxai": AsyncBLACKBOXAI,
28-
"yepchat": AsyncYEPCHAT,
2923
}

src/pytgpt/auto/main.py

Lines changed: 2 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
from pytgpt.base import Provider, AsyncProvider
2-
from pytgpt.opengpt import OPENGPT, AsyncOPENGPT
32
from pytgpt.koboldai import KOBOLDAI, AsyncKOBOLDAI
43
from pytgpt.phind import PHIND, AsyncPHIND
54
from pytgpt.blackboxai import BLACKBOXAI, AsyncBLACKBOXAI
@@ -16,12 +15,9 @@
1615
import logging
1716

1817

19-
provider_map: dict[
20-
str, Union[OPENGPT, KOBOLDAI, PHIND, BLACKBOXAI, PERPLEXITY, GPT4FREE]
21-
] = {
18+
provider_map: dict[str, Union[KOBOLDAI, PHIND, BLACKBOXAI, PERPLEXITY, GPT4FREE]] = {
2219
"phind": PHIND,
2320
"perplexity": PERPLEXITY,
24-
"opengpt": OPENGPT,
2521
"koboldai": KOBOLDAI,
2622
"blackboxai": BLACKBOXAI,
2723
"gpt4free": GPT4FREE,
@@ -56,9 +52,7 @@ def __init__(
5652
act (str|int, optional): Awesome prompt key or index. (Used as intro). Defaults to None.
5753
exclude(list[str], optional): List of providers to be excluded. Defaults to [].
5854
"""
59-
self.provider: Union[
60-
OPENGPT, KOBOLDAI, PHIND, BLACKBOXAI, PERPLEXITY, GPT4FREE
61-
] = None
55+
self.provider: Union[KOBOLDAI, PHIND, BLACKBOXAI, PERPLEXITY, GPT4FREE] = None
6256
self.provider_name: str = None
6357
self.is_conversation = is_conversation
6458
self.max_tokens = max_tokens
@@ -263,7 +257,6 @@ def __init__(
263257
exclude(list[str], optional): List of providers to be excluded. Defaults to [].
264258
"""
265259
self.provider: Union[
266-
AsyncOPENGPT,
267260
AsyncKOBOLDAI,
268261
AsyncPHIND,
269262
AsyncBLACKBOXAI,

src/pytgpt/blackboxai/main.py

Lines changed: 20 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import re
12
import json
23
import httpx
34
import requests
@@ -12,6 +13,10 @@
1213

1314
default_model = None
1415

16+
provider_promo_text = (
17+
r"Generated by BLACKBOX.AI, try unlimited chat https://www.blackbox.ai"
18+
)
19+
1520

1621
class BLACKBOXAI(Provider):
1722
def __init__(
@@ -153,8 +158,13 @@ def for_stream():
153158
try:
154159
if bool(value):
155160
streaming_text += value + ("\n" if stream else "")
156-
157-
resp = dict(text=streaming_text)
161+
resp = dict(
162+
text=(
163+
re.sub(provider_promo_text, "", streaming_text)
164+
if provider_promo_text in streaming_text
165+
else streaming_text
166+
).strip()
167+
)
158168
self.last_response.update(resp)
159169
yield value if raw else resp
160170
except json.decoder.JSONDecodeError:
@@ -352,7 +362,13 @@ async def for_stream():
352362
try:
353363
if bool(value):
354364
streaming_text += value + ("\n" if stream else "")
355-
resp = dict(text=streaming_text)
365+
resp = dict(
366+
text=(
367+
re.sub(provider_promo_text, "", streaming_text)
368+
if provider_promo_text in streaming_text
369+
else streaming_text
370+
).strip()
371+
)
356372
self.last_response.update(resp)
357373
yield value if raw else resp
358374
except json.decoder.JSONDecodeError:
@@ -421,7 +437,7 @@ async def get_message(self, response: dict) -> str:
421437
bot = BLACKBOXAI()
422438

423439
def main():
424-
resp = bot.ask("hello")
440+
resp = bot.ask("hello", True)
425441
for value in resp:
426442
print(value)
427443

src/pytgpt/console.py

Lines changed: 40 additions & 58 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@
2828

2929
from typing import Iterable
3030

31-
#pytgpt
31+
# pytgpt
3232

3333
from pytgpt.utils import Optimizers
3434
from pytgpt.utils import default_path
@@ -344,6 +344,7 @@ def main(*args, **kwargs):
344344

345345
return decorator
346346

347+
347348
class CustomCompleter(Completer):
348349
"""Suggests query based on user prompts"""
349350

@@ -371,14 +372,15 @@ def get_completions(self, document: Document, complete_event):
371372
)
372373
return completions
373374
for count, suggestion in enumerate(
374-
suggest_query(word, timeout=2, die_silently=True),
375-
start=1):
375+
suggest_query(word, timeout=2, die_silently=True), start=1
376+
):
376377
completions.append(Completion(suggestion, start_position=-len(word)))
377378
if count >= self.suggestions_limit:
378379
break
379380
return completions
380381
return []
381382

383+
382384
class Main(cmd.Cmd):
383385
intro = (
384386
"Welcome to AI Chat in terminal. "
@@ -506,21 +508,6 @@ def __init__(
506508
act=awesome_prompt,
507509
)
508510

509-
elif provider == "opengpt":
510-
from pytgpt.opengpt import OPENGPT
511-
512-
self.bot = OPENGPT(
513-
is_conversation=disable_conversation,
514-
max_tokens=max_tokens,
515-
timeout=timeout,
516-
intro=intro,
517-
filepath=filepath,
518-
update_file=update_file,
519-
proxies=proxies,
520-
history_offset=history_offset,
521-
act=awesome_prompt,
522-
)
523-
524511
elif provider == "koboldai":
525512
from pytgpt.koboldai import KOBOLDAI
526513

@@ -571,26 +558,6 @@ def __init__(
571558
act=awesome_prompt,
572559
)
573560

574-
elif provider == "yepchat":
575-
from pytgpt.yepchat import main as yepchat
576-
577-
self.bot = yepchat.YEPCHAT(
578-
is_conversation=disable_conversation,
579-
max_tokens=max_tokens,
580-
temperature=temperature,
581-
presence_penalty=top_p,
582-
frequency_penalty=top_k,
583-
top_p=top_p,
584-
model=getOr(model, yepchat.model),
585-
timeout=timeout,
586-
intro=intro,
587-
filepath=filepath,
588-
update_file=update_file,
589-
proxies=proxies,
590-
history_offset=history_offset,
591-
act=awesome_prompt,
592-
)
593-
594561
elif provider == "gpt4all":
595562
assert auth, (
596563
"Path to LLM (.gguf or .bin) file is required. "
@@ -743,20 +710,35 @@ def __init__(
743710
self.path_to_last_response_audio = None
744711
if not non_interactive:
745712
self.completer_session = PromptSession(
746-
"",
747-
completer=ThreadedCompleter(
748-
CustomCompleter(
749-
self,
750-
suggestions_limit,
751-
[
752-
"cd", "copy_this", "h", "last_response", "rawdog",
753-
"settings", "with_copied",
754-
"clear", "exec", "help", "load", "reread", "shell",
755-
"code", "exit", "history", "new_intro", "reset", "sys",
756-
],
757-
)
758-
),
759-
)
713+
"",
714+
completer=ThreadedCompleter(
715+
CustomCompleter(
716+
self,
717+
suggestions_limit,
718+
[
719+
"cd",
720+
"copy_this",
721+
"h",
722+
"last_response",
723+
"rawdog",
724+
"settings",
725+
"with_copied",
726+
"clear",
727+
"exec",
728+
"help",
729+
"load",
730+
"reread",
731+
"shell",
732+
"code",
733+
"exit",
734+
"history",
735+
"new_intro",
736+
"reset",
737+
"sys",
738+
],
739+
)
740+
),
741+
)
760742
self.__init_time = time.time()
761743
self.__start_time = time.time()
762744
self.__end_time = time.time()
@@ -787,7 +769,7 @@ def find_range(start, end, hms: bool = False):
787769
f"~[`{Fore.LIGHTWHITE_EX}🕒{Fore.BLUE}{current_time}-`"
788770
f"{Fore.LIGHTWHITE_EX}💻{Fore.RED}{find_range(self.__init_time, time.time(), True)}-`"
789771
f"{Fore.LIGHTWHITE_EX}{Fore.YELLOW}{find_range(self.__start_time, self.__end_time)}s]`"
790-
# f"\n╰─>"
772+
# f"\n╰─>"
791773
)
792774
whitelist = ["[", "]", "~", "-", "(", ")"]
793775
for character in whitelist:
@@ -800,8 +782,9 @@ def find_range(start, end, hms: bool = False):
800782
f"~[🕒{current_time}"
801783
f"-💻{find_range(self.__init_time, time.time(), True)}"
802784
f"-⚡{find_range(self.__start_time, self.__end_time)}s]"
803-
#"\n╰─>"
785+
# "\n╰─>"
804786
)
787+
805788
def cmdloop(self, intro=None):
806789
"""Repeatedly issue a prompt, accept input, parse an initial prefix
807790
off the received input, and dispatch to action methods, passing them
@@ -864,7 +847,6 @@ def cmdloop(self, intro=None):
864847
except ImportError:
865848
pass
866849

867-
868850
def output_bond(
869851
self,
870852
title: str,
@@ -1470,7 +1452,7 @@ class ChatInteractive:
14701452
),
14711453
)
14721454
@click.option(
1473-
'-sl',
1455+
"-sl",
14741456
"--suggestions-limit",
14751457
type=click.INT,
14761458
help="Prompt suggestions limit - 0 to disable suggestion",
@@ -1625,7 +1607,7 @@ def interactive(
16251607
internal_exec=internal_exec,
16261608
confirm_script=confirm_script,
16271609
interpreter=interpreter,
1628-
suggestions_limit=suggestions_limit
1610+
suggestions_limit=suggestions_limit,
16291611
)
16301612
busy_bar.spin_index = busy_bar_index
16311613
bot.code_theme = code_theme
@@ -1925,7 +1907,7 @@ def generate(
19251907
internal_exec=internal_exec,
19261908
confirm_script=confirm_script,
19271909
interpreter=interpreter,
1928-
non_interactive=True
1910+
non_interactive=True,
19291911
)
19301912
prompt = prompt if prompt else ""
19311913
copied_placeholder = "{{copied}}"

src/pytgpt/opengpt/__init__.py

Lines changed: 0 additions & 4 deletions
This file was deleted.

0 commit comments

Comments
 (0)