@@ -802,7 +802,7 @@ def get_async_explainer():
802802 logger .debug (f"Get message for { div_id } " )
803803 return json .dumps ({"mess" : mess , "user" : "" , "answer" : "" , "responses" : {}})
804804
805-
805+ #get question text, code, and answer choices for an MCQ
806806def _get_mcq_context (div_id ):
807807 q = db (db .questions .name == div_id ).select ().first ()
808808 if not q :
@@ -824,6 +824,9 @@ def _get_mcq_context(div_id):
824824 logger .warning (f"Could not parse choices for { div_id } : { e } " )
825825 return question , code , choices
826826
827+
828+ #handle async peer instruction reflection using an LLM:
829+ #logs student messages and return an LLM peer-style reply
827830@auth .requires_login ()
828831def get_async_llm_reflection ():
829832 logger .warning ("LLM REFLECTION CALLED" )
@@ -948,9 +951,9 @@ def get_async_llm_reflection():
948951 try :
949952 db .useinfo .insert (
950953 course_id = auth .user .course_name ,
951- sid = "llm_peer" ,
954+ sid = auth . user . username ,
952955 div_id = div_id ,
953- event = "sendmessage " ,
956+ event = "llm_peer_sendmessage " ,
954957 act = f"to: student:{ reply } " ,
955958 timestamp = datetime .datetime .utcnow (),
956959 )
@@ -981,7 +984,7 @@ def _get_user_answer(div_id, s):
981984 return ans .act .split (":" )[1 ]
982985 else :
983986 return ""
984-
987+ #check if the student has already submitted a reflection for the question
985988def _has_reflection (div_id , sid ):
986989 row = (
987990 db (
@@ -1041,11 +1044,11 @@ def send_lti_scores():
10411044 return json .dumps ("success" )
10421045
10431046
1044-
1047+ #determine whether LLM-based async peer discussion is enabled for this course based on coursewide api key
10451048def _llm_enabled ():
10461049 return bool (_get_course_openai_key ())
10471050
1048-
1051+ #fetch the course-wide openai API key used to enable LLM-based async peer discussion (only works for openai currently)
10491052def _get_course_openai_key ():
10501053 try :
10511054 token_record = asyncio .get_event_loop ().run_until_complete (
@@ -1057,6 +1060,8 @@ def _get_course_openai_key():
10571060 logger .exception ("Failed to fetch course-wide OpenAI token for peer LLM" )
10581061 return ""
10591062
1063+
1064+ #call the openai chat completion API using the course-wide token and return the model reply
10601065def _call_openai (messages ):
10611066 """
10621067 Minimal HTTP call using the instructor-provided course-wide OpenAI token.
0 commit comments