@@ -2,7 +2,7 @@ import { NextRequest, NextResponse } from "next/server";
22import { getServerSideConfig } from "../config/server" ;
33import { OPENAI_BASE_URL , ServiceProvider } from "../constant" ;
44import { cloudflareAIGatewayUrl } from "../utils/cloudflare" ;
5- import { getModelProvider , isModelAvailableInServer } from "../utils/model" ;
5+ import { getModelProvider , isModelNotavailableInServer } from "../utils/model" ;
66
77const serverConfig = getServerSideConfig ( ) ;
88
@@ -118,15 +118,14 @@ export async function requestOpenai(req: NextRequest) {
118118
119119 // not undefined and is false
120120 if (
121- isModelAvailableInServer (
121+ isModelNotavailableInServer (
122122 serverConfig . customModels ,
123123 jsonBody ?. model as string ,
124- ServiceProvider . OpenAI as string ,
125- ) ||
126- isModelAvailableInServer (
127- serverConfig . customModels ,
128- jsonBody ?. model as string ,
129- ServiceProvider . Azure as string ,
124+ [
125+ ServiceProvider . OpenAI ,
126+ ServiceProvider . Azure ,
127+ jsonBody ?. model as string , // support provider-unspecified model
128+ ] ,
130129 )
131130 ) {
132131 return NextResponse . json (
0 commit comments