@@ -383,7 +383,7 @@ func PerformGeneralRequest(input string, history []sharedtypes.HistoricMessage,
383
383
llmHandlerEndpoint := config .GlobalConfig .LLM_HANDLER_ENDPOINT
384
384
385
385
// Set up WebSocket connection with LLM and send chat request
386
- responseChannel := sendChatRequest (input , "general" , history , 0 , systemPrompt , llmHandlerEndpoint , nil , nil , nil )
386
+ responseChannel := sendChatRequest (input , "general" , history , 0 , systemPrompt , llmHandlerEndpoint , nil , nil , nil , nil )
387
387
// If isStream is true, create a stream channel and return asap
388
388
if isStream {
389
389
// Create a stream channel
@@ -440,7 +440,7 @@ func PerformGeneralRequestWithImages(input string, history []sharedtypes.Histori
440
440
llmHandlerEndpoint := config .GlobalConfig .LLM_HANDLER_ENDPOINT
441
441
442
442
// Set up WebSocket connection with LLM and send chat request
443
- responseChannel := sendChatRequest (input , "general" , history , 0 , systemPrompt , llmHandlerEndpoint , nil , nil , images )
443
+ responseChannel := sendChatRequest (input , "general" , history , 0 , systemPrompt , llmHandlerEndpoint , nil , nil , nil , images )
444
444
// If isStream is true, create a stream channel and return asap
445
445
if isStream {
446
446
// Create a stream channel
@@ -499,7 +499,7 @@ func PerformGeneralModelSpecificationRequest(input string, history []sharedtypes
499
499
500
500
llmHandlerEndpoint := config .GlobalConfig .LLM_HANDLER_ENDPOINT
501
501
// Set up WebSocket connection with LLM and send chat request
502
- responseChannel := sendChatRequest (input , "general" , history , 0 , systemPrompt , llmHandlerEndpoint , modelIds , nil , nil )
502
+ responseChannel := sendChatRequest (input , "general" , history , 0 , systemPrompt , llmHandlerEndpoint , modelIds , nil , nil , nil )
503
503
504
504
// If isStream is true, create a stream channel and return asap
505
505
if isStream {
@@ -557,7 +557,7 @@ func PerformGeneralRequestSpecificModel(input string, history []sharedtypes.Hist
557
557
llmHandlerEndpoint := config .GlobalConfig .LLM_HANDLER_ENDPOINT
558
558
559
559
// Set up WebSocket connection with LLM and send chat request
560
- responseChannel := sendChatRequest (input , "general" , history , 0 , systemPrompt , llmHandlerEndpoint , modelIds , nil , nil )
560
+ responseChannel := sendChatRequest (input , "general" , history , 0 , systemPrompt , llmHandlerEndpoint , modelIds , nil , nil , nil )
561
561
562
562
// If isStream is true, create a stream channel and return asap
563
563
if isStream {
@@ -616,7 +616,7 @@ func PerformGeneralRequestSpecificModelAndModelOptions(input string, history []s
616
616
llmHandlerEndpoint := config .GlobalConfig .LLM_HANDLER_ENDPOINT
617
617
618
618
// Set up WebSocket connection with LLM and send chat request
619
- responseChannel := sendChatRequest (input , "general" , history , 0 , systemPrompt , llmHandlerEndpoint , modelIds , & modelOptions , nil )
619
+ responseChannel := sendChatRequest (input , "general" , history , 0 , systemPrompt , llmHandlerEndpoint , modelIds , nil , & modelOptions , nil )
620
620
621
621
// If isStream is true, create a stream channel and return asap
622
622
if isStream {
@@ -671,12 +671,12 @@ func PerformGeneralRequestSpecificModelAndModelOptions(input string, history []s
671
671
// Returns:
672
672
// - message: the response message
673
673
// - stream: the stream channel
674
- func PerformGeneralRequestSpecificModelModelOptionsAndImages (input string , history []sharedtypes.HistoricMessage , isStream bool , systemPrompt string , modelIds []string , modelOptions sharedtypes.ModelOptions , images []string ) (message string , stream * chan string ) {
674
+ func PerformGeneralRequestSpecificModelModelOptionsAndImages (input string , history []sharedtypes.HistoricMessage , isStream bool , systemPrompt string , modelIds []string , modelOptions sharedtypes.ModelOptions , images []string , modelCategory [] string ) (message string , stream * chan string ) {
675
675
// get the LLM handler endpoint
676
676
llmHandlerEndpoint := config .GlobalConfig .LLM_HANDLER_ENDPOINT
677
677
678
678
// Set up WebSocket connection with LLM and send chat request
679
- responseChannel := sendChatRequest (input , "general" , history , 0 , systemPrompt , llmHandlerEndpoint , modelIds , & modelOptions , images )
679
+ responseChannel := sendChatRequest (input , "general" , history , 0 , systemPrompt , llmHandlerEndpoint , modelIds , modelCategory , & modelOptions , images )
680
680
681
681
// If isStream is true, create a stream channel and return asap
682
682
if isStream {
@@ -735,7 +735,7 @@ func PerformGeneralRequestSpecificModelNoStreamWithOpenAiTokenOutput(input strin
735
735
llmHandlerEndpoint := config .GlobalConfig .LLM_HANDLER_ENDPOINT
736
736
737
737
// Set up WebSocket connection with LLM and send chat request
738
- responseChannel := sendChatRequest (input , "general" , history , 0 , systemPrompt , llmHandlerEndpoint , modelIds , nil , nil )
738
+ responseChannel := sendChatRequest (input , "general" , history , 0 , systemPrompt , llmHandlerEndpoint , modelIds , nil , nil , nil )
739
739
defer close (responseChannel )
740
740
741
741
// else Process all responses
@@ -812,7 +812,7 @@ func PerformGeneralRequestSpecificModelAndModelOptionsNoStreamWithOpenAiTokenOut
812
812
llmHandlerEndpoint := config .GlobalConfig .LLM_HANDLER_ENDPOINT
813
813
814
814
// Set up WebSocket connection with LLM and send chat request
815
- responseChannel := sendChatRequest (input , "general" , history , 0 , systemPrompt , llmHandlerEndpoint , modelIds , & modelOptions , nil )
815
+ responseChannel := sendChatRequest (input , "general" , history , 0 , systemPrompt , llmHandlerEndpoint , modelIds , nil , & modelOptions , nil )
816
816
defer close (responseChannel )
817
817
818
818
// else Process all responses
@@ -890,7 +890,7 @@ func PerformGeneralRequestSpecificModelAndModelOptionsNoStreamWithOpenAiInputOut
890
890
llmHandlerEndpoint := config .GlobalConfig .LLM_HANDLER_ENDPOINT
891
891
892
892
// Set up WebSocket connection with LLM and send chat request
893
- responseChannel := sendChatRequest (input , "general" , history , 0 , systemPrompt , llmHandlerEndpoint , modelIds , & modelOptions , nil )
893
+ responseChannel := sendChatRequest (input , "general" , history , 0 , systemPrompt , llmHandlerEndpoint , modelIds , nil , & modelOptions , nil )
894
894
defer close (responseChannel )
895
895
896
896
// else Process all responses
@@ -961,7 +961,7 @@ func PerformCodeLLMRequest(input string, history []sharedtypes.HistoricMessage,
961
961
llmHandlerEndpoint := config .GlobalConfig .LLM_HANDLER_ENDPOINT
962
962
963
963
// Set up WebSocket connection with LLM and send chat request
964
- responseChannel := sendChatRequest (input , "code" , history , 0 , "" , llmHandlerEndpoint , nil , nil , nil )
964
+ responseChannel := sendChatRequest (input , "code" , history , 0 , "" , llmHandlerEndpoint , nil , nil , nil , nil )
965
965
966
966
// If isStream is true, create a stream channel and return asap
967
967
if isStream {
@@ -1043,7 +1043,7 @@ func PerformGeneralRequestNoStreaming(input string, history []sharedtypes.Histor
1043
1043
llmHandlerEndpoint := config .GlobalConfig .LLM_HANDLER_ENDPOINT
1044
1044
1045
1045
// Set up WebSocket connection with LLM and send chat request
1046
- responseString := sendChatRequestNoStreaming (input , "general" , history , 0 , systemPrompt , llmHandlerEndpoint , nil , nil , nil )
1046
+ responseString := sendChatRequestNoStreaming (input , "general" , history , 0 , systemPrompt , llmHandlerEndpoint , nil , nil , nil , nil )
1047
1047
1048
1048
// Return the response
1049
1049
return responseString
0 commit comments