diff --git a/codeflash/api/aiservice.py b/codeflash/api/aiservice.py index f7c5a425f..81ab84d42 100644 --- a/codeflash/api/aiservice.py +++ b/codeflash/api/aiservice.py @@ -11,6 +11,7 @@ from codeflash.cli_cmds.console import console, logger from codeflash.code_utils.env_utils import get_codeflash_api_key +from codeflash.code_utils.git_utils import get_last_commit_author_if_pr_exists, get_repo_owner_and_name from codeflash.models.models import OptimizedCandidate from codeflash.telemetry.posthog_cf import ph from codeflash.version import __version__ as codeflash_version @@ -97,6 +98,12 @@ def optimize_python_code( # noqa: D417 """ start_time = time.perf_counter() + try: + git_repo_owner, git_repo_name = get_repo_owner_and_name() + except Exception as e: + logger.warning(f"Could not determine repo owner and name: {e}") + git_repo_owner, git_repo_name = None, None + payload = { "source_code": source_code, "dependency_code": dependency_code, @@ -105,6 +112,9 @@ def optimize_python_code( # noqa: D417 "python_version": platform.python_version(), "experiment_metadata": experiment_metadata, "codeflash_version": codeflash_version, + "current_username": get_last_commit_author_if_pr_exists(None), + "repo_owner": git_repo_owner, + "repo_name": git_repo_name, } logger.info("Generating optimized candidates…") diff --git a/codeflash/api/cfapi.py b/codeflash/api/cfapi.py index 87c54b148..b908a57ea 100644 --- a/codeflash/api/cfapi.py +++ b/codeflash/api/cfapi.py @@ -224,3 +224,14 @@ def add_code_context_hash(code_context_hash: str) -> None: "POST", {"owner": owner, "repo": repo, "pr_number": pr_number, "code_hash": code_context_hash}, ) + + +def mark_optimization_success(trace_id: str, *, is_optimization_found: bool) -> Response: + """Mark an optimization event as success or not. + + :param trace_id: The unique identifier for the optimization event. + :param is_optimization_found: Boolean indicating whether the optimization was found. + :return: The response object from the API. + """ + payload = {"trace_id": trace_id, "is_optimization_found": is_optimization_found} + return make_cfapi_request(endpoint="/mark-as-success", method="POST", payload=payload) diff --git a/codeflash/code_utils/git_utils.py b/codeflash/code_utils/git_utils.py index 875b261cd..78873fd28 100644 --- a/codeflash/code_utils/git_utils.py +++ b/codeflash/code_utils/git_utils.py @@ -1,5 +1,6 @@ from __future__ import annotations +import os import shutil import subprocess import sys @@ -176,3 +177,20 @@ def remove_git_worktrees(worktree_root: Path | None, worktrees: list[Path]) -> N logger.warning(f"Error removing worktrees: {e}") if worktree_root: shutil.rmtree(worktree_root) + + +def get_last_commit_author_if_pr_exists(repo: Repo | None = None) -> str | None: + """Return the author's name of the last commit in the current branch if PR_NUMBER is set. + + Otherwise, return None. + """ + if "PR_NUMBER" not in os.environ: + return None + try: + repository: Repo = repo if repo else git.Repo(search_parent_directories=True) + last_commit = repository.head.commit + except Exception: + logger.exception("Failed to get last commit author.") + return None + else: + return last_commit.author.name diff --git a/codeflash/optimization/function_optimizer.py b/codeflash/optimization/function_optimizer.py index fd33b2386..f6c7661b4 100644 --- a/codeflash/optimization/function_optimizer.py +++ b/codeflash/optimization/function_optimizer.py @@ -19,7 +19,7 @@ from rich.tree import Tree from codeflash.api.aiservice import AiServiceClient, LocalAiServiceClient -from codeflash.api.cfapi import add_code_context_hash +from codeflash.api.cfapi import add_code_context_hash, mark_optimization_success from codeflash.benchmarking.utils import process_benchmark_data from codeflash.cli_cmds.console import code_print, console, logger, progress_bar from codeflash.code_utils import env_utils @@ -390,6 +390,11 @@ def optimize_function(self) -> Result[BestOptimization, str]: # noqa: PLR0911 original_helper_code, self.function_to_optimize.file_path, ) + else: + # Mark optimization success since no PR will be created + mark_optimization_success( + trace_id=self.function_trace_id, is_optimization_found=best_optimization is not None + ) self.log_successful_optimization(explanation, generated_tests, exp_type) # Add function to code context hash if in gh actions