Skip to content

Commit adb91b9

Browse files
add model category (#222)
1 parent 88cba00 commit adb91b9

File tree

6 files changed

+31
-26
lines changed

6 files changed

+31
-26
lines changed

go.mod

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ module github.com/ansys/aali-flowkit
33
go 1.24.2
44

55
require (
6-
github.com/ansys/aali-sharedtypes v1.0.4-0.20250812152015-1a2272e3b910
6+
github.com/ansys/aali-sharedtypes v1.0.4-0.20250819105103-58cf9e0fa68b
77
github.com/google/go-github/v56 v56.0.0
88
github.com/google/uuid v1.6.0
99
github.com/pandodao/tokenizer-go v0.2.0

go.sum

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,8 +30,8 @@ github.com/PuerkitoBio/goquery v1.9.2 h1:4/wZksC3KgkQw7SQgkKotmKljk0M6V8TUvA8Wb4
3030
github.com/PuerkitoBio/goquery v1.9.2/go.mod h1:GHPCaP0ODyyxqcNoFGYlAprUFH81NuRPd0GX3Zu2Mvk=
3131
github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss=
3232
github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU=
33-
github.com/ansys/aali-sharedtypes v1.0.4-0.20250812152015-1a2272e3b910 h1:+m3OFXnCZlqSo2jptPpREL3xktmzlqOenu50j/J9f7k=
34-
github.com/ansys/aali-sharedtypes v1.0.4-0.20250812152015-1a2272e3b910/go.mod h1:cWfGDKNuQQdQzVoRGaz5nLUopFGUME1vHUlgAf8KCxQ=
33+
github.com/ansys/aali-sharedtypes v1.0.4-0.20250819105103-58cf9e0fa68b h1:fH+44NQh9CwCWUX7f13vk0QEJ/4PKmQd2GyyE8NjcAw=
34+
github.com/ansys/aali-sharedtypes v1.0.4-0.20250819105103-58cf9e0fa68b/go.mod h1:cWfGDKNuQQdQzVoRGaz5nLUopFGUME1vHUlgAf8KCxQ=
3535
github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk=
3636
github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4=
3737
github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4=

pkg/externalfunctions/ansysgpt.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -324,7 +324,7 @@ func AnsysGPTPerformLLMRequest(finalQuery string, history []sharedtypes.Historic
324324
llmHandlerEndpoint := config.GlobalConfig.LLM_HANDLER_ENDPOINT
325325

326326
// Set up WebSocket connection with LLM and send chat request
327-
responseChannel := sendChatRequest(finalQuery, "general", history, 0, systemPrompt, llmHandlerEndpoint, nil, nil, nil)
327+
responseChannel := sendChatRequest(finalQuery, "general", history, 0, systemPrompt, llmHandlerEndpoint, nil, nil, nil, nil)
328328

329329
// If isStream is true, create a stream channel and return asap
330330
if isStream {
@@ -1006,7 +1006,7 @@ func AecPerformLLMFinalRequest(systemTemplate string,
10061006
llmHandlerEndpoint := config.GlobalConfig.LLM_HANDLER_ENDPOINT
10071007

10081008
// Set up WebSocket connection with LLM and send chat request.
1009-
responseChannel := sendChatRequest(userPrompt, "general", nil, 0, systemPrompt, llmHandlerEndpoint, nil, options, nil)
1009+
responseChannel := sendChatRequest(userPrompt, "general", nil, 0, systemPrompt, llmHandlerEndpoint, nil, nil, options, nil)
10101010

10111011
// Create a stream channel
10121012
streamChannel := make(chan string, 400)

pkg/externalfunctions/ansysmaterials.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -322,7 +322,7 @@ func PerformMultipleGeneralRequestsAndExtractAttributesWithOpenAiTokenOutput(inp
322322

323323
// Helper function to send a request and get the response as string
324324
sendRequest := func() string {
325-
responseChannel := sendChatRequest(input, "general", history, 0, systemPrompt, llmHandlerEndpoint, modelIds, nil, nil)
325+
responseChannel := sendChatRequest(input, "general", history, 0, systemPrompt, llmHandlerEndpoint, modelIds, nil, nil, nil)
326326
defer close(responseChannel)
327327

328328
var responseStr string

pkg/externalfunctions/llmhandler.go

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -383,7 +383,7 @@ func PerformGeneralRequest(input string, history []sharedtypes.HistoricMessage,
383383
llmHandlerEndpoint := config.GlobalConfig.LLM_HANDLER_ENDPOINT
384384

385385
// Set up WebSocket connection with LLM and send chat request
386-
responseChannel := sendChatRequest(input, "general", history, 0, systemPrompt, llmHandlerEndpoint, nil, nil, nil)
386+
responseChannel := sendChatRequest(input, "general", history, 0, systemPrompt, llmHandlerEndpoint, nil, nil, nil, nil)
387387
// If isStream is true, create a stream channel and return asap
388388
if isStream {
389389
// Create a stream channel
@@ -440,7 +440,7 @@ func PerformGeneralRequestWithImages(input string, history []sharedtypes.Histori
440440
llmHandlerEndpoint := config.GlobalConfig.LLM_HANDLER_ENDPOINT
441441

442442
// Set up WebSocket connection with LLM and send chat request
443-
responseChannel := sendChatRequest(input, "general", history, 0, systemPrompt, llmHandlerEndpoint, nil, nil, images)
443+
responseChannel := sendChatRequest(input, "general", history, 0, systemPrompt, llmHandlerEndpoint, nil, nil, nil, images)
444444
// If isStream is true, create a stream channel and return asap
445445
if isStream {
446446
// Create a stream channel
@@ -499,7 +499,7 @@ func PerformGeneralModelSpecificationRequest(input string, history []sharedtypes
499499

500500
llmHandlerEndpoint := config.GlobalConfig.LLM_HANDLER_ENDPOINT
501501
// Set up WebSocket connection with LLM and send chat request
502-
responseChannel := sendChatRequest(input, "general", history, 0, systemPrompt, llmHandlerEndpoint, modelIds, nil, nil)
502+
responseChannel := sendChatRequest(input, "general", history, 0, systemPrompt, llmHandlerEndpoint, modelIds, nil, nil, nil)
503503

504504
// If isStream is true, create a stream channel and return asap
505505
if isStream {
@@ -557,7 +557,7 @@ func PerformGeneralRequestSpecificModel(input string, history []sharedtypes.Hist
557557
llmHandlerEndpoint := config.GlobalConfig.LLM_HANDLER_ENDPOINT
558558

559559
// Set up WebSocket connection with LLM and send chat request
560-
responseChannel := sendChatRequest(input, "general", history, 0, systemPrompt, llmHandlerEndpoint, modelIds, nil, nil)
560+
responseChannel := sendChatRequest(input, "general", history, 0, systemPrompt, llmHandlerEndpoint, modelIds, nil, nil, nil)
561561

562562
// If isStream is true, create a stream channel and return asap
563563
if isStream {
@@ -616,7 +616,7 @@ func PerformGeneralRequestSpecificModelAndModelOptions(input string, history []s
616616
llmHandlerEndpoint := config.GlobalConfig.LLM_HANDLER_ENDPOINT
617617

618618
// Set up WebSocket connection with LLM and send chat request
619-
responseChannel := sendChatRequest(input, "general", history, 0, systemPrompt, llmHandlerEndpoint, modelIds, &modelOptions, nil)
619+
responseChannel := sendChatRequest(input, "general", history, 0, systemPrompt, llmHandlerEndpoint, modelIds, nil, &modelOptions, nil)
620620

621621
// If isStream is true, create a stream channel and return asap
622622
if isStream {
@@ -671,12 +671,12 @@ func PerformGeneralRequestSpecificModelAndModelOptions(input string, history []s
671671
// Returns:
672672
// - message: the response message
673673
// - stream: the stream channel
674-
func PerformGeneralRequestSpecificModelModelOptionsAndImages(input string, history []sharedtypes.HistoricMessage, isStream bool, systemPrompt string, modelIds []string, modelOptions sharedtypes.ModelOptions, images []string) (message string, stream *chan string) {
674+
func PerformGeneralRequestSpecificModelModelOptionsAndImages(input string, history []sharedtypes.HistoricMessage, isStream bool, systemPrompt string, modelIds []string, modelOptions sharedtypes.ModelOptions, images []string, modelCategory []string) (message string, stream *chan string) {
675675
// get the LLM handler endpoint
676676
llmHandlerEndpoint := config.GlobalConfig.LLM_HANDLER_ENDPOINT
677677

678678
// Set up WebSocket connection with LLM and send chat request
679-
responseChannel := sendChatRequest(input, "general", history, 0, systemPrompt, llmHandlerEndpoint, modelIds, &modelOptions, images)
679+
responseChannel := sendChatRequest(input, "general", history, 0, systemPrompt, llmHandlerEndpoint, modelIds, modelCategory, &modelOptions, images)
680680

681681
// If isStream is true, create a stream channel and return asap
682682
if isStream {
@@ -735,7 +735,7 @@ func PerformGeneralRequestSpecificModelNoStreamWithOpenAiTokenOutput(input strin
735735
llmHandlerEndpoint := config.GlobalConfig.LLM_HANDLER_ENDPOINT
736736

737737
// Set up WebSocket connection with LLM and send chat request
738-
responseChannel := sendChatRequest(input, "general", history, 0, systemPrompt, llmHandlerEndpoint, modelIds, nil, nil)
738+
responseChannel := sendChatRequest(input, "general", history, 0, systemPrompt, llmHandlerEndpoint, modelIds, nil, nil, nil)
739739
defer close(responseChannel)
740740

741741
// else Process all responses
@@ -812,7 +812,7 @@ func PerformGeneralRequestSpecificModelAndModelOptionsNoStreamWithOpenAiTokenOut
812812
llmHandlerEndpoint := config.GlobalConfig.LLM_HANDLER_ENDPOINT
813813

814814
// Set up WebSocket connection with LLM and send chat request
815-
responseChannel := sendChatRequest(input, "general", history, 0, systemPrompt, llmHandlerEndpoint, modelIds, &modelOptions, nil)
815+
responseChannel := sendChatRequest(input, "general", history, 0, systemPrompt, llmHandlerEndpoint, modelIds, nil, &modelOptions, nil)
816816
defer close(responseChannel)
817817

818818
// else Process all responses
@@ -890,7 +890,7 @@ func PerformGeneralRequestSpecificModelAndModelOptionsNoStreamWithOpenAiInputOut
890890
llmHandlerEndpoint := config.GlobalConfig.LLM_HANDLER_ENDPOINT
891891

892892
// Set up WebSocket connection with LLM and send chat request
893-
responseChannel := sendChatRequest(input, "general", history, 0, systemPrompt, llmHandlerEndpoint, modelIds, &modelOptions, nil)
893+
responseChannel := sendChatRequest(input, "general", history, 0, systemPrompt, llmHandlerEndpoint, modelIds, nil, &modelOptions, nil)
894894
defer close(responseChannel)
895895

896896
// else Process all responses
@@ -961,7 +961,7 @@ func PerformCodeLLMRequest(input string, history []sharedtypes.HistoricMessage,
961961
llmHandlerEndpoint := config.GlobalConfig.LLM_HANDLER_ENDPOINT
962962

963963
// Set up WebSocket connection with LLM and send chat request
964-
responseChannel := sendChatRequest(input, "code", history, 0, "", llmHandlerEndpoint, nil, nil, nil)
964+
responseChannel := sendChatRequest(input, "code", history, 0, "", llmHandlerEndpoint, nil, nil, nil, nil)
965965

966966
// If isStream is true, create a stream channel and return asap
967967
if isStream {
@@ -1043,7 +1043,7 @@ func PerformGeneralRequestNoStreaming(input string, history []sharedtypes.Histor
10431043
llmHandlerEndpoint := config.GlobalConfig.LLM_HANDLER_ENDPOINT
10441044

10451045
// Set up WebSocket connection with LLM and send chat request
1046-
responseString := sendChatRequestNoStreaming(input, "general", history, 0, systemPrompt, llmHandlerEndpoint, nil, nil, nil)
1046+
responseString := sendChatRequestNoStreaming(input, "general", history, 0, systemPrompt, llmHandlerEndpoint, nil, nil, nil, nil)
10471047

10481048
// Return the response
10491049
return responseString

pkg/externalfunctions/privatefunctions.go

Lines changed: 13 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -267,7 +267,7 @@ func sendTokenCountToEndpoint(jwtToken string, tokenCountEndpoint string, inputT
267267
// Returns:
268268
// - chan sharedtypes.HandlerResponse: the response channel
269269
func sendChatRequestNoHistory(data string, chatRequestType string, maxKeywordsSearch uint32, llmHandlerEndpoint string, modelIds []string, options *sharedtypes.ModelOptions) chan sharedtypes.HandlerResponse {
270-
return sendChatRequest(data, chatRequestType, nil, maxKeywordsSearch, "", llmHandlerEndpoint, modelIds, options, nil)
270+
return sendChatRequest(data, chatRequestType, nil, maxKeywordsSearch, "", llmHandlerEndpoint, modelIds, nil, options, nil)
271271
}
272272

273273
// sendChatRequest sends a chat request to LLM
@@ -284,7 +284,7 @@ func sendChatRequestNoHistory(data string, chatRequestType string, maxKeywordsSe
284284
//
285285
// Returns:
286286
// - chan sharedtypes.HandlerResponse: the response channel
287-
func sendChatRequest(data string, chatRequestType string, history []sharedtypes.HistoricMessage, maxKeywordsSearch uint32, systemPrompt interface{}, llmHandlerEndpoint string, modelIds []string, options *sharedtypes.ModelOptions, images []string) chan sharedtypes.HandlerResponse {
287+
func sendChatRequest(data string, chatRequestType string, history []sharedtypes.HistoricMessage, maxKeywordsSearch uint32, systemPrompt interface{}, llmHandlerEndpoint string, modelIds []string, modelCategory []string, options *sharedtypes.ModelOptions, images []string) chan sharedtypes.HandlerResponse {
288288
// Initiate the channels
289289
requestChannelChat := make(chan []byte, 400)
290290
responseChannel := make(chan sharedtypes.HandlerResponse) // Create a channel for responses
@@ -293,7 +293,7 @@ func sendChatRequest(data string, chatRequestType string, history []sharedtypes.
293293
go shutdownHandler(c)
294294
go listener(c, responseChannel, false)
295295
go writer(c, requestChannelChat, responseChannel)
296-
go sendRequest("chat", data, requestChannelChat, chatRequestType, "true", false, history, maxKeywordsSearch, systemPrompt, responseChannel, modelIds, options, images)
296+
go sendRequest("chat", data, requestChannelChat, chatRequestType, "true", false, history, maxKeywordsSearch, systemPrompt, responseChannel, modelIds, modelCategory, options, images)
297297

298298
return responseChannel // Return the response channel
299299
}
@@ -312,7 +312,7 @@ func sendChatRequest(data string, chatRequestType string, history []sharedtypes.
312312
//
313313
// Returns:
314314
// - string: the response
315-
func sendChatRequestNoStreaming(data string, chatRequestType string, history []sharedtypes.HistoricMessage, maxKeywordsSearch uint32, systemPrompt string, llmHandlerEndpoint string, modelIds []string, options *sharedtypes.ModelOptions, images []string) string {
315+
func sendChatRequestNoStreaming(data string, chatRequestType string, history []sharedtypes.HistoricMessage, maxKeywordsSearch uint32, systemPrompt string, llmHandlerEndpoint string, modelIds []string, modelCategory []string, options *sharedtypes.ModelOptions, images []string) string {
316316
// Initiate the channels
317317
requestChannelChat := make(chan []byte, 400)
318318
responseChannel := make(chan sharedtypes.HandlerResponse) // Create a channel for responses
@@ -322,7 +322,7 @@ func sendChatRequestNoStreaming(data string, chatRequestType string, history []s
322322
go shutdownHandler(c)
323323
go listener(c, responseChannel, true)
324324
go writer(c, requestChannelChat, responseChannel)
325-
go sendRequest("chat", data, requestChannelChat, chatRequestType, "false", false, history, maxKeywordsSearch, systemPrompt, responseChannel, modelIds, options, images)
325+
go sendRequest("chat", data, requestChannelChat, chatRequestType, "false", false, history, maxKeywordsSearch, systemPrompt, responseChannel, modelIds, modelCategory, options, images)
326326

327327
// receive single answer from the response channel
328328
response := <-responseChannel
@@ -356,7 +356,7 @@ func sendEmbeddingsRequest(data interface{}, llmHandlerEndpoint string, getSpars
356356
go listener(c, responseChannel, false)
357357
go writer(c, requestChannelEmbeddings, responseChannel)
358358

359-
go sendRequest("embeddings", data, requestChannelEmbeddings, "", "", getSparseEmbeddings, nil, 0, "", responseChannel, modelIds, nil, nil)
359+
go sendRequest("embeddings", data, requestChannelEmbeddings, "", "", getSparseEmbeddings, nil, 0, "", responseChannel, modelIds, nil, nil, nil)
360360
return responseChannel // Return the response channel
361361
}
362362

@@ -538,7 +538,7 @@ func writer(c *websocket.Conn, RequestChannel chan []byte, responseChannel chan
538538
// - dataStream: the data stream flag
539539
// - history: the conversation history
540540
// - sc: the session context
541-
func sendRequest(adapter string, data interface{}, RequestChannel chan []byte, chatRequestType string, dataStream string, getSparseEmbeddings bool, history []sharedtypes.HistoricMessage, maxKeywordsSearch uint32, systemPrompt interface{}, responseChannel chan sharedtypes.HandlerResponse, modelIds []string, options *sharedtypes.ModelOptions, images []string) {
541+
func sendRequest(adapter string, data interface{}, RequestChannel chan []byte, chatRequestType string, dataStream string, getSparseEmbeddings bool, history []sharedtypes.HistoricMessage, maxKeywordsSearch uint32, systemPrompt interface{}, responseChannel chan sharedtypes.HandlerResponse, modelIds []string, modelCategory []string, options *sharedtypes.ModelOptions, images []string) {
542542
request := sharedtypes.HandlerRequest{
543543
Adapter: adapter,
544544
InstructionGuid: strings.Replace(uuid.New().String(), "-", "", -1),
@@ -554,6 +554,11 @@ func sendRequest(adapter string, data interface{}, RequestChannel chan []byte, c
554554
request.ModelIds = modelIds
555555
}
556556

557+
// check for model category
558+
if len(modelCategory) > 0 {
559+
request.ModelCategory = modelCategory
560+
}
561+
557562
// If history is not empty, set the IsConversation flag to true
558563
// and set the conversation history
559564
if len(history) > 0 {
@@ -1736,7 +1741,7 @@ func performGeneralRequest(input string, history []sharedtypes.HistoricMessage,
17361741
llmHandlerEndpoint := config.GlobalConfig.LLM_HANDLER_ENDPOINT
17371742

17381743
// Set up WebSocket connection with LLM and send chat request.
1739-
responseChannel := sendChatRequest(input, "general", history, 0, systemPrompt, llmHandlerEndpoint, nil, options, nil)
1744+
responseChannel := sendChatRequest(input, "general", history, 0, systemPrompt, llmHandlerEndpoint, nil, nil, options, nil)
17401745

17411746
// If isStream is true, create a stream channel and return asap.
17421747
if isStream {

0 commit comments

Comments
 (0)