Skip to content

Commit a839b9f

Browse files
authored
Merge pull request ricklamers#17 from dinosaurtirex/patch-2
Fixed imports
2 parents 9c62248 + 01b4b00 commit a839b9f

File tree

1 file changed

+7
-34
lines changed

1 file changed

+7
-34
lines changed

server/backend.py

Lines changed: 7 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,11 @@
1-
from datetime import datetime
2-
from requests import get, post
31
from flask import request
42

5-
from server.config import *
3+
from datetime import datetime
4+
from requests import get
5+
from requests import post
6+
7+
from server.config import models
8+
from server.config import special_instructions
69

710

811
class Backend_Api:
@@ -74,34 +77,6 @@ def _conversation(self):
7477
gpt_resp = post('https://www.sqlchat.ai/api/chat',
7578
headers=headers, json=data, stream=True)
7679

77-
# headers = {
78-
# 'authority': 'www.t3nsor.tech',
79-
# 'accept': '*/*',
80-
# 'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
81-
# 'cache-control': 'no-cache',
82-
# 'content-type': 'application/json',
83-
# 'origin': 'https://www.t3nsor.tech',
84-
# 'pragma': 'no-cache',
85-
# 'referer': 'https://www.t3nsor.tech/',
86-
# 'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
87-
# 'sec-ch-ua-mobile': '?0',
88-
# 'sec-ch-ua-platform': '"macOS"',
89-
# 'sec-fetch-dest': 'empty',
90-
# 'sec-fetch-mode': 'cors',
91-
# 'sec-fetch-site': 'same-origin',
92-
# 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
93-
# }
94-
95-
# gpt_resp = post('https://www.t3nsor.tech/api/chat', headers = headers, stream = True, json = {
96-
# 'model': {
97-
# 'id' : 'gpt-3.5-turbo',
98-
# 'name' : 'Default (GPT-3.5)'
99-
# },
100-
# 'messages' : conversation,
101-
# 'key' : '',
102-
# 'prompt' : system_message
103-
# })
104-
10580
def stream():
10681
answer = ''
10782
for chunk in gpt_resp.iter_content(chunk_size=1024):
@@ -116,9 +91,7 @@ def stream():
11691
print(e)
11792
print(e.__traceback__.tb_next)
11893
continue
119-
120-
# Thread(target=log, args = [ip_address, model, prompt['content'], answer]).start()
121-
94+
12295
return self.app.response_class(stream(), mimetype='text/event-stream')
12396

12497
except Exception as e:

0 commit comments

Comments
 (0)