File tree Expand file tree Collapse file tree 3 files changed +31
-17
lines changed
Expand file tree Collapse file tree 3 files changed +31
-17
lines changed Original file line number Diff line number Diff line change @@ -44,6 +44,8 @@ If you have a resource-constrained PC, try increasing `HEALTHCHECK_START_PERIOD`
4444enough before healthcheck begins.
4545For more information, please refer to this [ link] ( https://docs.docker.com/reference/compose-file/services/#healthcheck )
4646
47+
48+ Set the model by updating your ` .env ` file:
4749``` bash
4850cd backend
4951cp .env.example .env
@@ -60,9 +62,9 @@ make docker-down
6062
6163### Prerequisites
6264
63- - [ ` uv ` ] ( https://docs.astral.sh/uv/ ) (for managing Python, virtual environments, and dependencies)
64- - ` wget `
65- - ` pandoc `
65+ - [ ` uv ` ] ( https://docs.astral.sh/uv/ ) (for managing Python, virtual environments, and dependencies)
66+ - ` wget `
67+ - ` pandoc `
6668- ` git `
6769
6870** Step 1** : Install the required dependencies.
@@ -141,12 +143,12 @@ flowchart LR
141143 id1([Vectorstore]) --- id3([MMR Retriever])
142144 id1([Vectorstore]) --- id4([BM25 Retriever])
143145
144- id2([Semantic Retriever]) -- Retrieved Docs ---> id5([Reranking])
146+ id2([Semantic Retriever]) -- Retrieved Docs ---> id5([Reranking])
145147 id3([MMR Retriever]) -- Retrieved Docs ---> id5([Reranking])
146148 id4([BM25 Retriever]) -- Retrieved Docs ---> id5([Reranking])
147149
148150 id5([Reranking]) ---> id6(top-n docs)
149-
151+
150152```
151153
152154Depending on the input query, each query can be forwarded to any one of the following retrievers,
Original file line number Diff line number Diff line change 5454 llm = ChatOllama (model = model_name , temperature = llm_temp )
5555
5656elif os .getenv ("LLM_MODEL" ) == "gemini" :
57- if os .getenv ("GOOGLE_GEMINI" ) == "1_pro" :
58- llm = ChatGoogleGenerativeAI (model = "gemini-pro" , temperature = llm_temp )
59- elif os .getenv ("GOOGLE_GEMINI" ) == "1.5_flash" :
60- llm = ChatVertexAI (model_name = "gemini-1.5-flash" , temperature = llm_temp )
61- elif os .getenv ("GOOGLE_GEMINI" ) == "1.5_pro" :
62- llm = ChatVertexAI (model_name = "gemini-1.5-pro" , temperature = llm_temp )
57+ gemini_model = os .getenv ("GOOGLE_GEMINI" )
58+ if gemini_model in {"1_pro" , "1.5_flash" , "1.5_pro" }:
59+ raise ValueError (
60+ f"The selected Gemini model '{ gemini_model } ' (version 1.0–1.5) is disabled. "
61+ "Please upgrade to version 2.0 or higher (e.g., 2.0_flash, 2.5_pro)."
62+ )
63+ elif gemini_model == "2.0_flash" :
64+ llm = ChatVertexAI (model_name = "gemini-2.0-flash" , temperature = llm_temp )
65+ elif gemini_model == "2.5_flash" :
66+ llm = ChatVertexAI (model_name = "gemini-2.5-flash" , temperature = llm_temp )
67+ elif gemini_model == "2.5_pro" :
68+ llm = ChatVertexAI (model_name = "gemini-2.5-pro" , temperature = llm_temp )
6369 else :
6470 raise ValueError ("GOOGLE_GEMINI environment variable not set to a valid value." )
6571
Original file line number Diff line number Diff line change 7575 llm = ChatOllama (model = model_name , temperature = llm_temp )
7676
7777elif os .getenv ("LLM_MODEL" ) == "gemini" :
78- if os .getenv ("GOOGLE_GEMINI" ) == "1_pro" :
79- llm = ChatGoogleGenerativeAI (model = "gemini-pro" , temperature = llm_temp )
80- elif os .getenv ("GOOGLE_GEMINI" ) == "1.5_flash" :
81- llm = ChatVertexAI (model_name = "gemini-1.5-flash" , temperature = llm_temp )
82- elif os .getenv ("GOOGLE_GEMINI" ) == "1.5_pro" :
83- llm = ChatVertexAI (model_name = "gemini-1.5-pro" , temperature = llm_temp )
78+ gemini_model = os .getenv ("GOOGLE_GEMINI" )
79+ if gemini_model in {"1_pro" , "1.5_flash" , "1.5_pro" }:
80+ raise ValueError (
81+ f"The selected Gemini model '{ gemini_model } ' (version 1.0–1.5) is disabled. "
82+ "Please upgrade to version 2.0 or higher (e.g., 2.0_flash, 2.5_pro)."
83+ )
84+ elif gemini_model == "2.0_flash" :
85+ llm = ChatVertexAI (model_name = "gemini-2.0-flash" , temperature = llm_temp )
86+ elif gemini_model == "2.5_flash" :
87+ llm = ChatVertexAI (model_name = "gemini-2.5-flash" , temperature = llm_temp )
88+ elif gemini_model == "2.5_pro" :
89+ llm = ChatVertexAI (model_name = "gemini-2.5-pro" , temperature = llm_temp )
8490 else :
8591 raise ValueError ("GOOGLE_GEMINI environment variable not set to a valid value." )
8692
You can’t perform that action at this time.
0 commit comments