@@ -12,7 +12,7 @@ import (
1212 "strings"
1313
1414 "github.com/gin-gonic/gin"
15- "github.com/go-playground/validator/v10 "
15+ "github.com/openai/openai-go/v3 "
1616 "opencsg.com/csghub-server/aigateway/component"
1717 "opencsg.com/csghub-server/aigateway/token"
1818 "opencsg.com/csghub-server/aigateway/types"
@@ -166,6 +166,9 @@ func (h *OpenAIHandlerImpl) GetModel(c *gin.Context) {
166166 c .PureJSON (http .StatusOK , model )
167167}
168168
169+ var _ openai.ChatCompletion
170+ var _ openai.ChatCompletionChunk
171+
169172// Chat godoc
170173// @Security ApiKey
171174// @Summary Chat with backend model
@@ -174,7 +177,8 @@ func (h *OpenAIHandlerImpl) GetModel(c *gin.Context) {
174177// @Accept json
175178// @Produce json
176179// @Param request body ChatCompletionRequest true "Chat completion request"
177- // @Success 200 {object} ChatCompletionResponse "OK"
180+ // @Success 200 {object} openai.ChatCompletion "OK"
181+ // @Success 200 {object} openai.ChatCompletionChunk "OK"
178182// @Failure 400 {object} error "Bad request"
179183// @Failure 404 {object} error "Model not found"
180184// @Failure 500 {object} error "Internal server error"
@@ -189,24 +193,7 @@ func (h *OpenAIHandlerImpl) Chat(c *gin.Context) {
189193 username := httpbase .GetCurrentUser (c )
190194 userUUID := httpbase .GetCurrentUserUUID (c )
191195 chatReq := & ChatCompletionRequest {}
192- bodyBytes , err := io .ReadAll (c .Request .Body )
193- if err != nil {
194- slog .Error ("failed to read request body" , "error" , err .Error ())
195- c .String (http .StatusBadRequest , fmt .Errorf ("invalid chat compoletion request body:%w" , err ).Error ())
196- return
197- }
198-
199- c .Request .Body = io .NopCloser (bytes .NewReader (bodyBytes ))
200- c .Request .ContentLength = int64 (len (bodyBytes ))
201-
202- if err = json .Unmarshal (bodyBytes , chatReq ); err != nil {
203- slog .Error ("failed to parse request body" , "error" , err .Error ())
204- c .String (http .StatusBadRequest , fmt .Errorf ("invalid chat compoletion request body:%w" , err ).Error ())
205- return
206- }
207-
208- validate := validator .New ()
209- if err = validate .Struct (chatReq ); err != nil {
196+ if err := c .BindJSON (chatReq ); err != nil {
210197 slog .Error ("invalid chat compoletion request body" , "error" , err .Error ())
211198 c .String (http .StatusBadRequest , fmt .Errorf ("invalid chat compoletion request body:%w" , err ).Error ())
212199 return
@@ -263,16 +250,7 @@ func (h *OpenAIHandlerImpl) Chat(c *gin.Context) {
263250 c .String (http .StatusBadRequest , err .Error ())
264251 return
265252 }
266-
267- var reqMap map [string ]interface {}
268- if err = json .Unmarshal (bodyBytes , & reqMap ); err != nil {
269- slog .Error ("failed to unmarshal request body to map" , "error" , err )
270- c .String (http .StatusBadRequest , fmt .Errorf ("invalid chat completion request body: %w" , err ).Error ())
271- return
272- }
273- // directly update model field in request map
274- reqMap ["model" ] = modelName
275-
253+ chatReq .Model = modelName
276254 if chatReq .Stream {
277255 c .Writer .Header ().Set ("Content-Type" , "text/event-stream" )
278256 if ! strings .Contains (model .ImageID , "vllm-cpu" ) {
@@ -283,13 +261,7 @@ func (h *OpenAIHandlerImpl) Chat(c *gin.Context) {
283261 }
284262
285263 // marshal updated request map back to JSON bytes
286- updatedBodyBytes , err := json .Marshal (reqMap )
287- if err != nil {
288- slog .Error ("failed to marshal updated request map" , "error" , err )
289- c .String (http .StatusInternalServerError , fmt .Errorf ("failed to process chat request: %w" , err ).Error ())
290- return
291- }
292-
264+ updatedBodyBytes , _ := json .Marshal (chatReq )
293265 c .Request .Body = io .NopCloser (bytes .NewReader (updatedBodyBytes ))
294266 c .Request .ContentLength = int64 (len (updatedBodyBytes ))
295267 rp , _ := proxy .NewReverseProxy (target )
0 commit comments