Skip to content

Commit b922b49

Browse files
removed hardcodeadresses and modified prompts
1 parent a876a4b commit b922b49

File tree

3 files changed

+7
-7
lines changed

3 files changed

+7
-7
lines changed
Binary file not shown.

FASTAPI-DEPLOYMENT/rhl_fastapi_v2_modify.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -487,7 +487,7 @@ async def _background_update_and_save(user_id: str, user_message: str, bot_reply
487487
- Always include a follow up question
488488
- if <context_followup> is non-empty in the format(without bullet points) "Would you like to know about <a follow up question STRICTLY from context_followup not overlapping with the answer generated>?"
489489
- The genrated follow up should be about a medical topic and not any general topic(eg.Clinical Reference Manual for Advanced Neonatal Care in Ethiopia?" ❌) STRICTLY from context_followup
490-
490+
- THE FRAMING OF ANSWER SHOULD BE AT PROFESSIONAL EXPERT OF ENGLISH AND THE PERFECT FLOW OF ALL INFORMATION SHOULD MAKE SENSE AND NOT BE RANDOM SPITTING OF INFORMATION.
491491
- STRICTLY ADHERE TO THE WORD LIMIT AND BULLET POINT RULES and not fetching any information from the web or general knowledge or prior knowledge or any other sources.
492492
493493
Example : Query : what is cure for dispesion?
@@ -991,7 +991,7 @@ def handle_chitchat(user_message: str, chat_history: str) -> str:
991991

992992
# -------------------- VIDEO MATCHING SYSTEM (SIMPLIFIED BERT APPROACH) --------------------
993993
class VideoMatchingSystem:
994-
def __init__(self, video_file_path: str = "D:\\RHL-WH\\RHL-FASTAPI\\FILES\\video_link_topic.xlsx"):
994+
def __init__(self, video_file_path: str = "./FILES/video_link_topic.xlsx"):
995995
"""Initialize the simplified video matching system using BERT similarity"""
996996
self.video_file_path = video_file_path
997997
self.topic_list = [] # List of topic strings
@@ -1081,11 +1081,9 @@ def find_relevant_video(self, answer: str) -> Optional[str]:
10811081

10821082
def _verify_with_llm(self, answer: str, description: str) -> bool:
10831083
"""Use Gemini to verify if the video description is contextually relevant to the answer"""
1084-
prompt = f"""Analyze if the video description is contextually relevant to the medical answer.
1084+
prompt = f"""Analyze if the video description majorly aligns with the medical answer with referance to the below rules
10851085
1086-
Medical Answer: {answer}
10871086
1088-
Video Description: {description}
10891087
10901088
Question: Is this video description DIRECTLY and STRONGLY related to the medical answer?
10911089
@@ -1098,7 +1096,9 @@ def _verify_with_llm(self, answer: str, description: str) -> bool:
10981096
- Answer about "eye care for newborns" + Description "video about applying eye medication to prevent infections" → YES
10991097
- Answer about "eye care for newborns" + Description "video about umbilical cord care procedures" → NO
11001098
- Answer about "temperature measurement" + Description "video about using thermometer to check baby temperature" → YES
1099+
Medical Answer: ```{answer}```
11011100
1101+
Video Description: ```{description}```
11021102
Response (YES/NO only):"""
11031103

11041104
try:
@@ -1111,7 +1111,7 @@ def _verify_with_llm(self, answer: str, description: str) -> bool:
11111111

11121112
# -------------------- CACHE SYSTEM (BERT + LLM APPROACH) --------------------
11131113
class CacheSystem:
1114-
def __init__(self, cache_file_path: str = "D:\\RHL-WH\\RHL-FASTAPI\\FILES\\cache_questions.xlsx"):
1114+
def __init__(self, cache_file_path: str = "./FILES/cache_questions.xlsx"):
11151115
"""Initialize the cache system using BERT similarity + LLM verification"""
11161116
self.cache_file_path = cache_file_path
11171117
self.question_list = [] # List of cached questions
@@ -1185,7 +1185,7 @@ def check_cache(self, reformulated_query: str) -> Optional[str]:
11851185
print(f"[CACHE_SYSTEM] Best cached question: {self.question_list[best_idx][:100]}...")
11861186

11871187
# Step 2: Combined LLM Verification + Reframing (only for top match)
1188-
if best_similarity >= 0.4: # Higher threshold for cache (more strict)
1188+
if best_similarity >= 0.6: # Higher threshold for cache (more strict)
11891189
print("[CACHE_SYSTEM] Step 2: Combined LLM verification and reframing...")
11901190
llm_start = time.perf_counter()
11911191

chat_history.db

124 KB
Binary file not shown.

0 commit comments

Comments
 (0)