We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
1 parent 3a9a0c2 commit b520febCopy full SHA for b520feb
genai/rag/llm/cloud_chat_model.py
@@ -8,7 +8,8 @@
8
9
10
class CloudLLM:
11
- """A concrete implementation of a cloud-based LLM. Uses openai as the default LLM provider."""
+ """A concrete implementation of a cloud-based LLM.
12
+ Uses openai as the default LLM provider."""
13
14
def __init__(
15
self,
@@ -70,7 +71,7 @@ def get_system_prompt(self) -> str:
70
71
meal timing), answer them precisely based on the
72
context and query.
73
"""
74
+
75
def invoke(self, prompt: PromptValue) -> BaseMessage:
76
"""Invoke the LLM with the given prompt"""
77
return self.model.invoke(prompt)
-
0 commit comments