@@ -2669,6 +2669,7 @@ describe("ClineProvider - Router Models", () => {
26692669 expect ( getModels ) . toHaveBeenCalledWith ( { provider : "requesty" , apiKey : "requesty-key" } )
26702670 expect ( getModels ) . toHaveBeenCalledWith ( { provider : "glama" } )
26712671 expect ( getModels ) . toHaveBeenCalledWith ( { provider : "unbound" , apiKey : "unbound-key" } )
2672+ expect ( getModels ) . toHaveBeenCalledWith ( { provider : "vercel-ai-gateway" } )
26722673 expect ( getModels ) . toHaveBeenCalledWith ( {
26732674 provider : "litellm" ,
26742675 apiKey : "litellm-key" ,
@@ -2686,6 +2687,7 @@ describe("ClineProvider - Router Models", () => {
26862687 litellm : mockModels ,
26872688 ollama : { } ,
26882689 lmstudio : { } ,
2690+ "vercel-ai-gateway" : mockModels ,
26892691 } ,
26902692 } )
26912693 } )
@@ -2716,6 +2718,7 @@ describe("ClineProvider - Router Models", () => {
27162718 . mockRejectedValueOnce ( new Error ( "Requesty API error" ) ) // requesty fail
27172719 . mockResolvedValueOnce ( mockModels ) // glama success
27182720 . mockRejectedValueOnce ( new Error ( "Unbound API error" ) ) // unbound fail
2721+ . mockResolvedValueOnce ( mockModels ) // vercel-ai-gateway success
27192722 . mockRejectedValueOnce ( new Error ( "LiteLLM connection failed" ) ) // litellm fail
27202723
27212724 await messageHandler ( { type : "requestRouterModels" } )
@@ -2731,6 +2734,7 @@ describe("ClineProvider - Router Models", () => {
27312734 ollama : { } ,
27322735 lmstudio : { } ,
27332736 litellm : { } ,
2737+ "vercel-ai-gateway" : mockModels ,
27342738 } ,
27352739 } )
27362740
@@ -2841,6 +2845,7 @@ describe("ClineProvider - Router Models", () => {
28412845 litellm : { } ,
28422846 ollama : { } ,
28432847 lmstudio : { } ,
2848+ "vercel-ai-gateway" : mockModels ,
28442849 } ,
28452850 } )
28462851 } )
0 commit comments