Skip to content

Commit 9887168

Browse files
committed
removed any reference to gpt-3.5-turbo and replaced it with gpt-4o-mini
1 parent 5953a78 commit 9887168

File tree

5 files changed

+4
-9
lines changed

5 files changed

+4
-9
lines changed

src/config.ts

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -23,11 +23,6 @@ export const DEFAULT_RESET_CRON =
2323
process.env.DEFAULT_RESET_CRON || '0 0 1 */3 *'
2424

2525
export const validModels = [
26-
{
27-
name: 'gpt-3.5-turbo',
28-
deployment: process.env.GPT_35_TURBO || 'curredev35',
29-
context: 16_384,
30-
},
3126
{
3227
name: 'gpt-4',
3328
deployment: process.env.GPT_4 || 'curredev4',

src/server/chatInstances/usage.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ export const checkUsage = async (
2525
{ id, isPowerUser, isAdmin }: UserType,
2626
model: string
2727
): Promise<boolean> => {
28-
if (model === 'gpt-3.5-turbo') return true
28+
if (model === 'gpt-4o-mini') return true
2929

3030
const usage = await getUsage(id)
3131

src/server/db/models/chatInstance.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ ChatInstance.init(
5050
model: {
5151
type: DataTypes.STRING,
5252
allowNull: false,
53-
defaultValue: 'gpt-3.5-turbo',
53+
defaultValue: 'gpt-4o-mini',
5454
},
5555
usageLimit: {
5656
type: DataTypes.INTEGER,

src/server/util/tiktoken.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ import { encoding_for_model, TiktokenModel } from '@dqbd/tiktoken'
33

44
const getEncoding = (model: string) => {
55
if (model === 'mock') {
6-
model = 'gpt-3.5-turbo'
6+
model = 'gpt-4o-mini'
77
}
88

99
return encoding_for_model(model as TiktokenModel)

src/server/util/util.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ export const getAllowedModels = (model: string): string[] => {
3434

3535
if (model === 'gpt-4') return allModels
3636

37-
return ['gpt-3.5-turbo']
37+
return ['gpt-4o-mini']
3838
}
3939

4040
export const getModelContextLimit = (modelName: string) => {

0 commit comments

Comments
 (0)