@@ -50,6 +50,25 @@ defmodule CadetWeb.AICodeAnalysisController do
5050 end
5151 end
5252
53+ defp check_llm_grading_parameters ( llm_api_key , llm_model , llm_api_url , llm_course_level_prompt ) do
54+ cond do
55+ is_nil ( llm_api_key ) ->
56+ { :error , "LLM API key is not configured for this course or in the environment" }
57+
58+ is_nil ( llm_model ) or llm_model == "" ->
59+ { :error , "LLM model is not configured for this course" }
60+
61+ is_nil ( llm_api_url ) or llm_api_url == "" ->
62+ { :error , "LLM API URL is not configured for this course" }
63+
64+ is_nil ( llm_course_level_prompt ) or llm_course_level_prompt == "" ->
65+ { :error , "LLM course-level prompt is not configured for this course" }
66+
67+ true ->
68+ :ok
69+ end
70+ end
71+
5372 @ doc """
5473 Fetches the question details and answers based on submissionid and questionid and generates AI-generated comments.
5574 """
@@ -69,26 +88,16 @@ defmodule CadetWeb.AICodeAnalysisController do
6988 decrypted_api_key = decrypt_llm_api_key ( course . llm_api_key )
7089 api_key = decrypted_api_key || Application . get_env ( :openai , :api_key )
7190
72- cond do
73- is_nil ( api_key ) ->
74- conn
75- |> put_status ( :internal_server_error )
76- |> text ( "No OpenAI API key configured" )
77-
78- is_nil ( course . llm_model ) or course . llm_model == "" ->
79- conn
80- |> put_status ( :internal_server_error )
81- |> text ( "No LLM model configured for this course" )
82-
83- is_nil ( course . llm_api_url ) or course . llm_api_url == "" ->
84- conn
85- |> put_status ( :internal_server_error )
86- |> text ( "No LLM API URL configured for this course" )
87-
88- is_nil ( course . llm_course_level_prompt ) or course . llm_course_level_prompt == "" ->
91+ case check_llm_grading_parameters (
92+ api_key ,
93+ course . llm_model ,
94+ course . llm_api_url ,
95+ course . llm_course_level_prompt
96+ ) do
97+ { :error , error_msg } ->
8998 conn
90- |> put_status ( :internal_server_error )
91- |> text ( "No course-level prompt configured for this course" )
99+ |> put_status ( :bad_request )
100+ |> text ( error_msg )
92101
93102 true ->
94103 case Assessments . get_answers_in_submission ( submission_id , question_id ) do
@@ -104,14 +113,16 @@ defmodule CadetWeb.AICodeAnalysisController do
104113 # and question since we filter to only 1 question)
105114 analyze_code (
106115 conn ,
107- hd ( answers ) ,
108- submission_id ,
109- question_id ,
110- api_key ,
111- course . llm_model ,
112- course . llm_api_url ,
113- course . llm_course_level_prompt ,
114- Assessments . get_llm_assessment_prompt ( question_id )
116+ % {
117+ answers: hd ( answers ) ,
118+ submission_id: submission_id ,
119+ question_id: question_id ,
120+ api_key: api_key ,
121+ llm_model: course . llm_model ,
122+ llm_api_url: course . llm_api_url ,
123+ llm_course_level_prompt: course . llm_course_level_prompt ,
124+ assessment_prompt: Assessments . get_llm_assessment_prompt ( question_id )
125+ }
115126 )
116127 end
117128
@@ -201,14 +212,16 @@ defmodule CadetWeb.AICodeAnalysisController do
201212
202213 defp analyze_code (
203214 conn ,
204- answer ,
205- submission_id ,
206- question_id ,
207- api_key ,
208- llm_model ,
209- llm_api_url ,
210- course_prompt ,
211- assessment_prompt
215+ % {
216+ answer: answer ,
217+ submission_id: submission_id ,
218+ question_id: question_id ,
219+ api_key: api_key ,
220+ llm_model: llm_model ,
221+ llm_api_url: llm_api_url ,
222+ course_prompt: course_prompt ,
223+ assessment_prompt: assessment_prompt
224+ }
212225 ) do
213226 formatted_answer =
214227 answer
0 commit comments