@@ -103,27 +103,24 @@ def is_streaming_response(response):
103103
104104def get_llm_model (instance ):
105105 if hasattr (instance , "_model_name" ):
106- return instance ._model_name .replace ("models/" , "" )
106+ return instance ._model_name .replace ("publishers/google/ models/" , "" )
107107 return getattr (instance , "_model_id" , "unknown" )
108108
109109
110110def serialize_prompts (args , kwargs ):
111- prompt = ""
112- if args is not None and len ( args ) > 0 :
111+ if args and len ( args ) > 0 :
112+ prompt_parts = []
113113 for arg in args :
114114 if isinstance (arg , str ):
115- prompt = f" { prompt } { arg } \n "
115+ prompt_parts . append ( arg )
116116 elif isinstance (arg , list ):
117117 for subarg in arg :
118118 if type (subarg ).__name__ == "Part" :
119- prompt = f" { prompt } { json .dumps (subarg .to_dict ())} \n "
119+ prompt_parts . append ( json .dumps (subarg .to_dict ()))
120120 else :
121- prompt = f"{ prompt } { subarg } \n "
121+ prompt_parts .append (str (subarg ))
122+
123+ return [{"role" : "user" , "content" : "\n " .join (prompt_parts )}]
122124 else :
123- prompt = [
124- {
125- "role" : "user" ,
126- "content" : kwargs .get ("prompt" ) or kwargs .get ("message" ),
127- }
128- ]
129- return prompt
125+ content = kwargs .get ("prompt" ) or kwargs .get ("message" )
126+ return [{"role" : "user" , "content" : content }] if content else []
0 commit comments