@@ -1049,15 +1049,38 @@ def _llm_enabled():
10491049 return bool (_get_course_openai_key ())
10501050
10511051#fetch the course-wide openai API key used to enable LLM-based async peer discussion (only works for openai currently)
1052+ # def _get_course_openai_key():
1053+ # try:
1054+ # token_record = asyncio.get_event_loop().run_until_complete(
1055+ # fetch_api_token(course_id=auth.user.course_id, provider="openai")
1056+ # )
1057+ # if token_record and token_record.token:
1058+ # return token_record.token.strip()
1059+ # except Exception:
1060+ # logger.exception("Failed to fetch course-wide OpenAI token for peer LLM")
1061+ # return ""
10521062def _get_course_openai_key ():
10531063 try :
1064+ course = db (
1065+ db .courses .course_name == auth .user .course_name
1066+ ).select ().first ()
1067+
1068+ if not course :
1069+ logger .warning ("PEER LLM: no course row found" )
1070+ return ""
1071+ logger .warning (f"PEER LLM course_name={ auth .user .course_name } " )
1072+ logger .warning (f"PEER LLM auth.user.course_id={ auth .user .course_id } " )
1073+ logger .warning (f"PEER LLM resolved course.id={ course .id if course else None } " )
10541074 token_record = asyncio .get_event_loop ().run_until_complete (
1055- fetch_api_token (course_id = auth . user . course_id , provider = "openai" )
1075+ fetch_api_token (course_id = course . id , provider = "openai" )
10561076 )
1077+
10571078 if token_record and token_record .token :
10581079 return token_record .token .strip ()
1080+
10591081 except Exception :
10601082 logger .exception ("Failed to fetch course-wide OpenAI token for peer LLM" )
1083+
10611084 return ""
10621085
10631086
@@ -1088,4 +1111,4 @@ def _call_openai(messages):
10881111 logger .warning (f"PEER LLM CALL | provider=openai-course-token | model={ model } " )
10891112 resp .raise_for_status ()
10901113 data = resp .json ()
1091- return data ["choices" ][0 ]["message" ]["content" ].strip ()
1114+ return data ["choices" ][0 ]["message" ]["content" ].strip ()
0 commit comments