Skip to content

Commit 7d3de7c

Browse files
Merge branch 'vsc/environment-validation' of github.com:codeflash-ai/codeflash into vsc/environment-validation
2 parents 53660de + d598283 commit 7d3de7c

File tree

3 files changed

+24
-73
lines changed

3 files changed

+24
-73
lines changed

codeflash/lsp/beta.py

Lines changed: 1 addition & 67 deletions
Original file line numberDiff line numberDiff line change
@@ -16,13 +16,10 @@
1616
from codeflash.discovery.functions_to_optimize import filter_functions, get_functions_within_git_diff
1717
from codeflash.either import is_successful
1818
from codeflash.lsp.server import CodeflashLanguageServer, CodeflashLanguageServerProtocol
19-
from codeflash.result.explanation import Explanation
2019

2120
if TYPE_CHECKING:
2221
from lsprotocol import types
2322

24-
from codeflash.models.models import GeneratedTestsList, OptimizationSet
25-
2623

2724
@dataclass
2825
class OptimizableFunctionsParams:
@@ -219,67 +216,6 @@ def provide_api_key(server: CodeflashLanguageServer, params: ProvideApiKeyParams
219216
return {"status": "error", "message": "something went wrong while saving the api key"}
220217

221218

222-
@server.feature("prepareOptimization")
223-
def prepare_optimization(server: CodeflashLanguageServer, params: FunctionOptimizationParams) -> dict[str, str]:
224-
current_function = server.optimizer.current_function_being_optimized
225-
226-
module_prep_result = server.optimizer.prepare_module_for_optimization(current_function.file_path)
227-
validated_original_code, original_module_ast = module_prep_result
228-
229-
function_optimizer = server.optimizer.create_function_optimizer(
230-
current_function,
231-
function_to_optimize_source_code=validated_original_code[current_function.file_path].source_code,
232-
original_module_ast=original_module_ast,
233-
original_module_path=current_function.file_path,
234-
)
235-
236-
server.optimizer.current_function_optimizer = function_optimizer
237-
if not function_optimizer:
238-
return {"functionName": params.functionName, "status": "error", "message": "No function optimizer found"}
239-
240-
initialization_result = function_optimizer.can_be_optimized()
241-
if not is_successful(initialization_result):
242-
return {"functionName": params.functionName, "status": "error", "message": initialization_result.failure()}
243-
244-
return {"functionName": params.functionName, "status": "success", "message": "Optimization preparation completed"}
245-
246-
247-
@server.feature("generateTests")
248-
def generate_tests(server: CodeflashLanguageServer, params: FunctionOptimizationParams) -> dict[str, str]:
249-
function_optimizer = server.optimizer.current_function_optimizer
250-
if not function_optimizer:
251-
return {"functionName": params.functionName, "status": "error", "message": "No function optimizer found"}
252-
253-
initialization_result = function_optimizer.can_be_optimized()
254-
if not is_successful(initialization_result):
255-
return {"functionName": params.functionName, "status": "error", "message": initialization_result.failure()}
256-
257-
should_run_experiment, code_context, original_helper_code = initialization_result.unwrap()
258-
259-
test_setup_result = function_optimizer.generate_and_instrument_tests(
260-
code_context, should_run_experiment=should_run_experiment
261-
)
262-
if not is_successful(test_setup_result):
263-
return {"functionName": params.functionName, "status": "error", "message": test_setup_result.failure()}
264-
generated_tests_list: GeneratedTestsList
265-
optimizations_set: OptimizationSet
266-
generated_tests_list, _, concolic__test_str, optimizations_set = test_setup_result.unwrap()
267-
268-
generated_tests: list[str] = [
269-
generated_test.generated_original_test_source for generated_test in generated_tests_list.generated_tests
270-
]
271-
optimizations_dict = {
272-
candidate.optimization_id: {"source_code": candidate.source_code.markdown, "explanation": candidate.explanation}
273-
for candidate in optimizations_set.control + optimizations_set.experiment
274-
}
275-
276-
return {
277-
"functionName": params.functionName,
278-
"status": "success",
279-
"message": {"generated_tests": generated_tests, "optimizations": optimizations_dict},
280-
}
281-
282-
283219
@server.feature("performFunctionOptimization")
284220
def perform_function_optimization( # noqa: PLR0911
285221
server: CodeflashLanguageServer, params: FunctionOptimizationParams
@@ -391,16 +327,14 @@ def perform_function_optimization( # noqa: PLR0911
391327

392328
server.show_message_log(f"Optimization completed for {params.functionName} with {speedup:.2f}x speedup", "Info")
393329

394-
explanation = best_optimization.candidate.explanation
395-
explanation_str = explanation.explanation_message() if isinstance(explanation, Explanation) else explanation
396330
return {
397331
"functionName": params.functionName,
398332
"status": "success",
399333
"message": "Optimization completed successfully",
400334
"extra": f"Speedup: {speedup:.2f}x faster",
401335
"optimization": optimized_source,
402336
"patch_file": str(patch_file),
403-
"explanation": explanation_str,
337+
"explanation": best_optimization.explanation_v2,
404338
}
405339
finally:
406340
cleanup_the_optimizer(server)

codeflash/models/models.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -91,6 +91,7 @@ def __hash__(self) -> int:
9191

9292
class BestOptimization(BaseModel):
9393
candidate: OptimizedCandidate
94+
explanation_v2: Optional[str] = None
9495
helper_functions: list[FunctionSource]
9596
code_context: CodeOptimizationContext
9697
runtime: int
@@ -343,7 +344,7 @@ class TestsInFile:
343344
test_type: TestType
344345

345346

346-
@dataclass
347+
@dataclass(frozen=True)
347348
class OptimizedCandidate:
348349
source_code: CodeStringsMarkdown
349350
explanation: str

codeflash/optimization/function_optimizer.py

Lines changed: 21 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1158,7 +1158,6 @@ def find_and_process_best_optimization(
11581158
original_helper_code,
11591159
code_context,
11601160
)
1161-
self.log_successful_optimization(explanation, generated_tests, exp_type)
11621161
return best_optimization
11631162

11641163
def process_review(
@@ -1232,10 +1231,9 @@ def process_review(
12321231
file_path=explanation.file_path,
12331232
benchmark_details=explanation.benchmark_details,
12341233
)
1234+
self.log_successful_optimization(new_explanation, generated_tests, exp_type)
12351235

1236-
best_optimization.candidate.explanation = new_explanation
1237-
1238-
console.print(Panel(new_explanation_raw_str, title="Best Candidate Explanation", border_style="blue"))
1236+
best_optimization.explanation_v2 = new_explanation.explanation_message()
12391237

12401238
data = {
12411239
"original_code": original_code_combined,
@@ -1258,7 +1256,25 @@ def process_review(
12581256
data["git_remote"] = self.args.git_remote
12591257
check_create_pr(**data)
12601258
elif self.args.staging_review:
1261-
create_staging(**data)
1259+
response = create_staging(**data)
1260+
if response.status_code == 200:
1261+
staging_url = f"https://app.codeflash.ai/review-optimizations/{self.function_trace_id[:-4] + exp_type if self.experiment_id else self.function_trace_id}"
1262+
console.print(
1263+
Panel(
1264+
f"[bold green]✅ Staging created:[/bold green]\n[link={staging_url}]{staging_url}[/link]",
1265+
title="Staging Link",
1266+
border_style="green",
1267+
)
1268+
)
1269+
else:
1270+
console.print(
1271+
Panel(
1272+
f"[bold red]❌ Failed to create staging[/bold red]\nStatus: {response.status_code}",
1273+
title="Staging Error",
1274+
border_style="red",
1275+
)
1276+
)
1277+
12621278
else:
12631279
# Mark optimization success since no PR will be created
12641280
mark_optimization_success(

0 commit comments

Comments
 (0)