Skip to content
Merged
10 changes: 10 additions & 0 deletions codeflash/api/aiservice.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@

from codeflash.cli_cmds.console import console, logger
from codeflash.code_utils.env_utils import get_codeflash_api_key
from codeflash.code_utils.git_utils import get_last_commit_author_if_pr_exists, get_repo_owner_and_name
from codeflash.models.models import OptimizedCandidate
from codeflash.telemetry.posthog_cf import ph
from codeflash.version import __version__ as codeflash_version
Expand Down Expand Up @@ -97,6 +98,12 @@ def optimize_python_code( # noqa: D417

"""
start_time = time.perf_counter()
try:
git_repo_owner, git_repo_name = get_repo_owner_and_name()
except Exception as e:
logger.warning(f"Could not determine repo owner and name: {e}")
git_repo_owner, git_repo_name = None, None

payload = {
"source_code": source_code,
"dependency_code": dependency_code,
Expand All @@ -105,6 +112,9 @@ def optimize_python_code( # noqa: D417
"python_version": platform.python_version(),
"experiment_metadata": experiment_metadata,
"codeflash_version": codeflash_version,
"current_username": get_last_commit_author_if_pr_exists(None),
"repo_owner": git_repo_owner,
"repo_name": git_repo_name,
}

logger.info("Generating optimized candidates…")
Expand Down
11 changes: 11 additions & 0 deletions codeflash/api/cfapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,3 +224,14 @@ def add_code_context_hash(code_context_hash: str) -> None:
"POST",
{"owner": owner, "repo": repo, "pr_number": pr_number, "code_hash": code_context_hash},
)


def mark_optimization_success(trace_id: str, *, is_optimization_found: bool) -> Response:
"""Mark an optimization event as success or not.

:param trace_id: The unique identifier for the optimization event.
:param is_optimization_found: Boolean indicating whether the optimization was found.
:return: The response object from the API.
"""
payload = {"trace_id": trace_id, "is_optimization_found": is_optimization_found}
return make_cfapi_request(endpoint="/mark-as-success", method="POST", payload=payload)
18 changes: 18 additions & 0 deletions codeflash/code_utils/git_utils.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from __future__ import annotations

import os
import shutil
import subprocess
import sys
Expand Down Expand Up @@ -176,3 +177,20 @@ def remove_git_worktrees(worktree_root: Path | None, worktrees: list[Path]) -> N
logger.warning(f"Error removing worktrees: {e}")
if worktree_root:
shutil.rmtree(worktree_root)


def get_last_commit_author_if_pr_exists(repo: Repo | None = None) -> str | None:
"""Return the author's name of the last commit in the current branch if PR_NUMBER is set.

Otherwise, return None.
"""
if "PR_NUMBER" not in os.environ:
return None
try:
repository: Repo = repo if repo else git.Repo(search_parent_directories=True)
last_commit = repository.head.commit
except Exception:
logger.exception("Failed to get last commit author.")
return None
else:
return last_commit.author.name
7 changes: 6 additions & 1 deletion codeflash/optimization/function_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from rich.tree import Tree

from codeflash.api.aiservice import AiServiceClient, LocalAiServiceClient
from codeflash.api.cfapi import add_code_context_hash
from codeflash.api.cfapi import add_code_context_hash, mark_optimization_success
from codeflash.benchmarking.utils import process_benchmark_data
from codeflash.cli_cmds.console import code_print, console, logger, progress_bar
from codeflash.code_utils import env_utils
Expand Down Expand Up @@ -390,6 +390,11 @@ def optimize_function(self) -> Result[BestOptimization, str]: # noqa: PLR0911
original_helper_code,
self.function_to_optimize.file_path,
)
else:
# Mark optimization success since no PR will be created
mark_optimization_success(
trace_id=self.function_trace_id, is_optimization_found=best_optimization is not None
)
self.log_successful_optimization(explanation, generated_tests, exp_type)

# Add function to code context hash if in gh actions
Expand Down
Loading