|
15 | 15 | from pyqt_openai.chat_widget.center.chatHome import ChatHome |
16 | 16 | from pyqt_openai.chat_widget.center.menuWidget import MenuWidget |
17 | 17 | from pyqt_openai.chat_widget.center.prompt import Prompt |
18 | | -from pyqt_openai.chat_widget.llamaOpenAIThread import LlamaOpenAIThread |
| 18 | +from pyqt_openai.chat_widget.llamaIndexThread import LlamaIndexThread |
19 | 19 | from pyqt_openai.config_loader import CONFIG_MANAGER |
20 | 20 | from pyqt_openai.globals import LLAMAINDEX_WRAPPER, DB |
21 | | -from pyqt_openai.util.script import get_argument, ChatThread |
| 21 | +from pyqt_openai.util.common import get_argument, ChatThread |
22 | 22 | from pyqt_openai.lang.translations import LangClass |
23 | 23 | from pyqt_openai.models import ChatMessageContainer |
24 | 24 | from pyqt_openai.widgets.notifier import NotifierWidget |
@@ -261,10 +261,9 @@ def __chat(self): |
261 | 261 | query_text = self.__prompt.getContent() |
262 | 262 | self.__browser.showLabel(query_text, False, container) |
263 | 263 |
|
264 | | - # Get parameters for OpenAI |
| 264 | + # Run a different thread based on whether the llama-index is enabled or not. |
265 | 265 | if is_llama_available: |
266 | | - # Run a different thread based on whether the llama-index is enabled or not. |
267 | | - self.__t = LlamaOpenAIThread( |
| 266 | + self.__t = LlamaIndexThread( |
268 | 267 | param, container, LLAMAINDEX_WRAPPER, query_text |
269 | 268 | ) |
270 | 269 | else: |
|
0 commit comments