Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
86 changes: 40 additions & 46 deletions ovos_solver_openai_persona/engines.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,11 +101,11 @@ def __init__(self, config=None,
enable_cache: bool = False,
internal_lang: Optional[str] = None):
"""
Initializes the OpenAIChatCompletionsSolver with API configuration, memory settings, and system prompt.

Raises:
ValueError: If the API key is not provided in the configuration.
"""
Initialize an OpenAIChatCompletionsSolver instance with API configuration, conversation memory settings, and system prompt.
Raises:
ValueError: If the API key is missing from the configuration.
"""
super().__init__(config=config, translator=translator,
detector=detector, priority=priority,
enable_tx=enable_tx, enable_cache=enable_cache,
Expand All @@ -131,16 +131,16 @@ def __init__(self, config=None,
# OpenAI API integration
def _do_api_request(self, messages):
"""
Sends a chat completion request to the OpenAI API and returns the assistant's reply.
Send a chat completion request to the OpenAI API using the provided conversation history and return the assistant's reply.

Args:
messages: A list of message dictionaries representing the conversation history.
Parameters:
messages (list): Conversation history as a list of message dictionaries.

Returns:
The content of the assistant's reply as a string.
str: The assistant's reply content.

Raises:
RequestException: If the OpenAI API returns an error in the response.
RequestException: If the OpenAI API response contains an error.
"""
s = requests.Session()
headers = {
Expand Down Expand Up @@ -243,14 +243,14 @@ def get_chat_history(self, system_prompt=None):

def get_messages(self, utt, system_prompt=None) -> MessageList:
"""
Builds a list of chat messages including the system prompt, recent conversation history, and the current user utterance.
Constructs a list of chat messages for the API, including the system prompt, recent conversation history, and the current user utterance.

Args:
utt: The current user input to be appended as the latest message.
Parameters:
utt: The current user input to be added as the latest message.
system_prompt: Optional system prompt to use as the initial message.

Returns:
A list of message dictionaries representing the chat context for the API.
A list of message dictionaries representing the chat context.
"""
messages = self.get_chat_history(system_prompt)
messages.append({"role": "user", "content": utt})
Expand All @@ -261,18 +261,18 @@ def continue_chat(self, messages: MessageList,
lang: Optional[str],
units: Optional[str] = None) -> Optional[str]:
"""
Generates a chat response using the provided message history and updates memory if enabled.

If the first message is not a system prompt, prepends the system prompt. Processes the API response and returns a cleaned answer, or None if the answer is empty or only punctuation/underscores. Updates internal memory with the latest question and answer if memory is enabled.

Args:
messages: List of chat messages with 'role' and 'content' keys.
lang: Optional language code for the response.
units: Optional unit system for numerical values.

Returns:
The generated response as a string, or None if no valid response is produced.
"""
Generate a chat response based on the provided message history and update conversation memory if enabled.
If the first message is not a system prompt, prepends the system prompt. Returns a cleaned response string, or None if the response is empty or contains only punctuation or underscores. Updates internal memory with the latest user message and answer when memory is enabled.
Parameters:
messages (MessageList): List of chat messages, each with 'role' and 'content' keys.
lang (Optional[str]): Language code for the response.
units (Optional[str]): Unit system for numerical values.
Returns:
Optional[str]: The generated response string, or None if no valid response is produced.
"""
if messages[0]["role"] != "system":
messages = [{"role": "system", "content": self.system_prompt }] + messages
response = self._do_api_request(messages)
Expand All @@ -288,16 +288,10 @@ def stream_chat_utterances(self, messages: MessageList,
lang: Optional[str] = None,
units: Optional[str] = None) -> Iterable[str]:
"""
Stream utterances for the given chat history as they become available.

Args:
messages: The chat messages.
lang (Optional[str]): Optional language code. Defaults to None.
units (Optional[str]): Optional units for the query. Defaults to None.

Returns:
Iterable[str]: An iterable of utterances.
"""
Streams partial assistant responses for a chat conversation as they are generated.

Yields post-processed segments of the assistant's reply, emitting each chunk when a sentence or phrase boundary is detected. If conversation memory is enabled, updates the internal memory with the accumulating answer.
"""
if messages[0]["role"] != "system":
messages = [{"role": "system", "content": self.system_prompt }] + messages
answer = ""
Expand All @@ -322,16 +316,16 @@ def stream_utterances(self, query: str,
lang: Optional[str] = None,
units: Optional[str] = None) -> Iterable[str]:
"""
Stream utterances for the given query as they become available.

Args:
query (str): The query text.
lang (Optional[str]): Optional language code. Defaults to None.
units (Optional[str]): Optional units for the query. Defaults to None.

Returns:
Iterable[str]: An iterable of utterances.
"""
Yields partial responses for a query as they are generated by the chat completions API.
Parameters:
query (str): The user query to send to the chat model.
lang (Optional[str]): Language code for the response, if applicable.
units (Optional[str]): Units relevant to the query, if applicable.
Returns:
Iterable[str]: An iterator yielding segments of the model's response as they become available.
"""
messages = self.get_messages(query)
yield from self.stream_chat_utterances(messages, lang, units)

Expand Down
Loading