Skip to content

Commit e0fa9e3

Browse files
committed
bugfix: #70 acquire llm output by CustomLLMModel class
1 parent 089b68f commit e0fa9e3

File tree

2 files changed

+8
-6
lines changed

2 files changed

+8
-6
lines changed

muagent/codechat/code_analyzer/code_intepreter.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -32,9 +32,10 @@ def get_intepretation(self, code_list):
3232
res = {}
3333
for code in code_list:
3434
message = CODE_INTERPERT_TEMPLATE.format(code=code)
35-
message = [HumanMessage(content=message)]
36-
chat_res = chat_model.predict_messages(message)
37-
content = chat_res.content
35+
# message = [HumanMessage(content=message)]
36+
# chat_res = chat_model.predict_messages(message)
37+
# content = chat_res.content
38+
content = chat_model.predict(message)
3839
res[code] = content
3940
return res
4041

muagent/codechat/code_search/cypher_generator.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -54,9 +54,10 @@ def get_cypher(self, query: str):
5454
content = self.NGQL_GENERATION_PROMPT.format(schema=schema, question=query)
5555
# logger.info(content)
5656
ans = ''
57-
message = [HumanMessage(content=content)]
58-
chat_res = self.model.predict_messages(message)
59-
ans = chat_res.content
57+
# message = [HumanMessage(content=content)]
58+
# chat_res = self.model.predict_messages(message)
59+
# ans = chat_res.content
60+
self.model.predict(content)
6061

6162
ans = replace_lt_gt(ans)
6263

0 commit comments

Comments
 (0)