|
18 | 18 | if TYPE_CHECKING: |
19 | 19 | from lsprotocol import types |
20 | 20 |
|
21 | | - from codeflash.models.models import GeneratedTestsList, OptimizationSet |
22 | | - |
23 | 21 |
|
24 | 22 | @dataclass |
25 | 23 | class OptimizableFunctionsParams: |
@@ -178,67 +176,6 @@ def provide_api_key(server: CodeflashLanguageServer, params: ProvideApiKeyParams |
178 | 176 | return {"status": "error", "message": "something went wrong while saving the api key"} |
179 | 177 |
|
180 | 178 |
|
181 | | -@server.feature("prepareOptimization") |
182 | | -def prepare_optimization(server: CodeflashLanguageServer, params: FunctionOptimizationParams) -> dict[str, str]: |
183 | | - current_function = server.optimizer.current_function_being_optimized |
184 | | - |
185 | | - module_prep_result = server.optimizer.prepare_module_for_optimization(current_function.file_path) |
186 | | - validated_original_code, original_module_ast = module_prep_result |
187 | | - |
188 | | - function_optimizer = server.optimizer.create_function_optimizer( |
189 | | - current_function, |
190 | | - function_to_optimize_source_code=validated_original_code[current_function.file_path].source_code, |
191 | | - original_module_ast=original_module_ast, |
192 | | - original_module_path=current_function.file_path, |
193 | | - ) |
194 | | - |
195 | | - server.optimizer.current_function_optimizer = function_optimizer |
196 | | - if not function_optimizer: |
197 | | - return {"functionName": params.functionName, "status": "error", "message": "No function optimizer found"} |
198 | | - |
199 | | - initialization_result = function_optimizer.can_be_optimized() |
200 | | - if not is_successful(initialization_result): |
201 | | - return {"functionName": params.functionName, "status": "error", "message": initialization_result.failure()} |
202 | | - |
203 | | - return {"functionName": params.functionName, "status": "success", "message": "Optimization preparation completed"} |
204 | | - |
205 | | - |
206 | | -@server.feature("generateTests") |
207 | | -def generate_tests(server: CodeflashLanguageServer, params: FunctionOptimizationParams) -> dict[str, str]: |
208 | | - function_optimizer = server.optimizer.current_function_optimizer |
209 | | - if not function_optimizer: |
210 | | - return {"functionName": params.functionName, "status": "error", "message": "No function optimizer found"} |
211 | | - |
212 | | - initialization_result = function_optimizer.can_be_optimized() |
213 | | - if not is_successful(initialization_result): |
214 | | - return {"functionName": params.functionName, "status": "error", "message": initialization_result.failure()} |
215 | | - |
216 | | - should_run_experiment, code_context, original_helper_code = initialization_result.unwrap() |
217 | | - |
218 | | - test_setup_result = function_optimizer.generate_and_instrument_tests( |
219 | | - code_context, should_run_experiment=should_run_experiment |
220 | | - ) |
221 | | - if not is_successful(test_setup_result): |
222 | | - return {"functionName": params.functionName, "status": "error", "message": test_setup_result.failure()} |
223 | | - generated_tests_list: GeneratedTestsList |
224 | | - optimizations_set: OptimizationSet |
225 | | - generated_tests_list, _, concolic__test_str, optimizations_set = test_setup_result.unwrap() |
226 | | - |
227 | | - generated_tests: list[str] = [ |
228 | | - generated_test.generated_original_test_source for generated_test in generated_tests_list.generated_tests |
229 | | - ] |
230 | | - optimizations_dict = { |
231 | | - candidate.optimization_id: {"source_code": candidate.source_code.markdown, "explanation": candidate.explanation} |
232 | | - for candidate in optimizations_set.control + optimizations_set.experiment |
233 | | - } |
234 | | - |
235 | | - return { |
236 | | - "functionName": params.functionName, |
237 | | - "status": "success", |
238 | | - "message": {"generated_tests": generated_tests, "optimizations": optimizations_dict}, |
239 | | - } |
240 | | - |
241 | | - |
242 | 179 | @server.feature("performFunctionOptimization") |
243 | 180 | def perform_function_optimization( # noqa: PLR0911 |
244 | 181 | server: CodeflashLanguageServer, params: FunctionOptimizationParams |
|
0 commit comments