Skip to content

Commit d3a2174

Browse files
committed
Remove rarely used options
1 parent 6e20d04 commit d3a2174

File tree

6 files changed

+5
-26
lines changed

6 files changed

+5
-26
lines changed

chat.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,6 @@
3434
response = client.chat.completions.create(
3535
model=MODEL_NAME,
3636
temperature=0.7,
37-
n=1,
3837
messages=[
3938
{"role": "system", "content": "You are a helpful assistant that makes lots of cat references and uses emojis."},
4039
{"role": "user", "content": "Write a haiku about a hungry cat who wants tuna"},

chat_async.py

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -42,12 +42,7 @@ async def generate_response(location):
4242
"content": f"Name a single place I should visit on my trip to {location} and describe in one sentence",
4343
},
4444
],
45-
temperature=1,
46-
max_tokens=400,
47-
top_p=0.95,
48-
frequency_penalty=0,
49-
presence_penalty=0,
50-
stop=None,
45+
temperature=0.7,
5146
)
5247
print("Got response for ", location)
5348
return response.choices[0].message.content

chat_history.py

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -40,12 +40,7 @@
4040
response = client.chat.completions.create(
4141
model=MODEL_NAME,
4242
messages=messages,
43-
temperature=1,
44-
max_tokens=400,
45-
top_p=0.95,
46-
frequency_penalty=0,
47-
presence_penalty=0,
48-
stop=None,
43+
temperature=0.5,
4944
)
5045
bot_response = response.choices[0].message.content
5146
messages.append({"role": "assistant", "content": bot_response})

chat_history_stream.py

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -40,13 +40,7 @@
4040
response = client.chat.completions.create(
4141
model=MODEL_NAME,
4242
messages=messages,
43-
temperature=1,
44-
max_tokens=400,
45-
top_p=0.95,
46-
frequency_penalty=0,
47-
presence_penalty=0,
48-
stop=None,
49-
stream=True,
43+
temperature=0.7,
5044
)
5145

5246
print("\nAnswer: ")

chat_safety.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -32,8 +32,6 @@
3232
response = client.chat.completions.create(
3333
model=MODEL_NAME,
3434
temperature=0.7,
35-
max_tokens=100,
36-
n=1,
3735
messages=[
3836
{
3937
"role": "system",

chat_stream.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -28,11 +28,9 @@
2828
MODEL_NAME = os.environ["OPENAI_MODEL"]
2929

3030

31-
completion = client.chat.completions.create(
31+
completion_stream = client.chat.completions.create(
3232
model=MODEL_NAME,
3333
temperature=0.7,
34-
max_tokens=500,
35-
n=1,
3634
messages=[
3735
{"role": "system", "content": "You are a helpful assistant that makes lots of cat references and uses emojis."},
3836
{"role": "user", "content": "please write a haiku about a hungry cat that wants tuna"},
@@ -41,7 +39,7 @@
4139
)
4240

4341
print(f"Response from {API_HOST}: \n")
44-
for event in completion:
42+
for event in completion_stream:
4543
if event.choices:
4644
content = event.choices[0].delta.content
4745
if content:

0 commit comments

Comments
 (0)