Skip to content
Merged
Show file tree
Hide file tree
Changes from 23 commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
d8a7255
lsp silent logs
mohammedahmed18 Sep 5, 2025
9f8b100
override other log methods and log serialized lsp messages
mohammedahmed18 Sep 6, 2025
23c75fe
send the module root to the lsp client
mohammedahmed18 Sep 9, 2025
7f87a12
Merge branch 'main' into lsp/verbose-quiet-logs
mohammedahmed18 Sep 9, 2025
bae3504
lsp messages
mohammedahmed18 Sep 10, 2025
92c8caa
Merge branch 'lsp/verbose-quiet-logs' of github.com:codeflash-ai/code…
mohammedahmed18 Sep 10, 2025
0f2a8d1
code print over lsp
mohammedahmed18 Sep 10, 2025
b9b4664
more enhancements
mohammedahmed18 Sep 11, 2025
3260e4a
more enhancements
mohammedahmed18 Sep 14, 2025
f630096
cf optimization
mohammedahmed18 Sep 14, 2025
2422e9a
log tags for lsp
mohammedahmed18 Sep 14, 2025
3c5c706
better markdown support for lsp message logging
mohammedahmed18 Sep 15, 2025
5dc178c
simple markdown table
mohammedahmed18 Sep 15, 2025
8d54ab6
force lsp log (tag)
mohammedahmed18 Sep 15, 2025
f766ac6
Merge branch 'main' of github.com:codeflash-ai/codeflash into lsp/ver…
mohammedahmed18 Sep 16, 2025
aab0aa4
fixes for cli console logs
mohammedahmed18 Sep 16, 2025
47dfc3c
small fixes
mohammedahmed18 Sep 16, 2025
a78ea5f
small fix
mohammedahmed18 Sep 16, 2025
a59a77b
it should work this time
mohammedahmed18 Sep 16, 2025
cd42934
prevent worktree log in lsp
mohammedahmed18 Sep 16, 2025
42c4652
logging enhancement
mohammedahmed18 Sep 16, 2025
06830ed
file name for best candidate
mohammedahmed18 Sep 16, 2025
a07b67d
reminder
mohammedahmed18 Sep 16, 2025
06de6d9
lsp logs formatting and small fixes
mohammedahmed18 Sep 17, 2025
231352f
typo
mohammedahmed18 Sep 17, 2025
3a30d9e
fixes for the api key and the lsp gracefull shutdown
mohammedahmed18 Sep 18, 2025
6e17f04
Merge branch 'main' into lsp/verbose-quiet-logs
Saga4 Sep 18, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 7 additions & 6 deletions codeflash/api/aiservice.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ def optimize_python_code( # noqa: D417
"repo_name": git_repo_name,
}

logger.info("Generating optimized candidates…")
logger.info("!lsp|tags|Generating optimized candidates…")
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Maybe we can just have it broken in json object with "message", component. That would be easy for anyone to use.
or lets just drop tags as its confusing.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@Saga4 the idea of tags is to use the same message in console and lsp,
sending a json, we would re-write all logger lines, so I think this is the alternative option but dropping it will just not give us control over displaying the messages in the extension (the markdown)

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Okay, lets then we can remove "tags" as delimeter as thats not helping, pipe is enough I guess.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yeah right, that would be cleaner

console.rule()
try:
response = self.make_ai_service_request("/optimize", payload=payload, timeout=600)
Expand All @@ -144,10 +144,10 @@ def optimize_python_code( # noqa: D417

if response.status_code == 200:
optimizations_json = response.json()["optimizations"]
logger.info(f"Generated {len(optimizations_json)} candidate optimizations.")
logger.info(f"!lsp|tags|Generated {len(optimizations_json)} candidate optimizations.")
console.rule()
end_time = time.perf_counter()
logger.debug(f"Generating optimizations took {end_time - start_time:.2f} seconds.")
logger.debug(f"!lsp|tags|Generating optimizations took {end_time - start_time:.2f} seconds.")
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think the messaging could be "Generating possible optimizations took {end_time - start_time:.2f} seconds."

or "Candidate generation for optimizations took {end_time - start_time:.2f} seconds."

return self._get_valid_candidates(optimizations_json)
try:
error = response.json()["error"]
Expand Down Expand Up @@ -194,7 +194,6 @@ def optimize_python_code_line_profiler( # noqa: D417
"lsp_mode": is_LSP_enabled(),
}

logger.info("Generating optimized candidates…")
console.rule()
if line_profiler_results == "":
logger.info("No LineProfiler results were provided, Skipping optimization.")
Expand All @@ -209,7 +208,9 @@ def optimize_python_code_line_profiler( # noqa: D417

if response.status_code == 200:
optimizations_json = response.json()["optimizations"]
logger.info(f"Generated {len(optimizations_json)} candidate optimizations using line profiler information.")
logger.info(
f"!lsp|tags|Generated {len(optimizations_json)} candidate optimizations using line profiler information."
)
console.rule()
return self._get_valid_candidates(optimizations_json)
try:
Expand Down Expand Up @@ -331,7 +332,7 @@ def get_new_explanation( # noqa: D417
"original_explanation": original_explanation,
"dependency_code": dependency_code,
}
logger.info("Generating explanation")
logger.info("loading|tags|Generating explanation")
console.rule()
try:
response = self.make_ai_service_request("/explain", payload=payload, timeout=60)
Expand Down
38 changes: 34 additions & 4 deletions codeflash/cli_cmds/console.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
from __future__ import annotations

import logging
import os
from contextlib import contextmanager
from itertools import cycle
from typing import TYPE_CHECKING
from typing import TYPE_CHECKING, Optional

from rich.console import Console
from rich.logging import RichHandler
Expand All @@ -20,17 +19,22 @@

from codeflash.cli_cmds.console_constants import SPINNER_TYPES
from codeflash.cli_cmds.logging_config import BARE_LOGGING_FORMAT
from codeflash.lsp.helpers import is_LSP_enabled
from codeflash.lsp.lsp_logger import enhanced_log
from codeflash.lsp.lsp_message import LspCodeMessage, LspTextMessage

if TYPE_CHECKING:
from collections.abc import Generator

from rich.progress import TaskID

from codeflash.lsp.lsp_message import LspMessage

DEBUG_MODE = logging.getLogger().getEffectiveLevel() == logging.DEBUG

console = Console()

if os.getenv("CODEFLASH_LSP"):
if is_LSP_enabled():
console.quiet = True

logging.basicConfig(
Expand All @@ -42,6 +46,24 @@
logger = logging.getLogger("rich")
logging.getLogger("parso").setLevel(logging.WARNING)

# override the logger to reformat the messages for the lsp
for level in ("info", "debug", "warning", "error"):
real_fn = getattr(logger, level)
setattr(
logger,
level,
lambda msg, *args, _real_fn=real_fn, _level=level, **kwargs: enhanced_log(
msg, _real_fn, _level, *args, **kwargs
),
)


def lsp_log(message: LspMessage) -> None:
if not is_LSP_enabled():
return
json_msg = message.serialize()
logger.info(json_msg)


def paneled_text(
text: str, panel_args: dict[str, str | bool] | None = None, text_args: dict[str, str] | None = None
Expand All @@ -58,7 +80,10 @@ def paneled_text(
console.print(panel)


def code_print(code_str: str) -> None:
def code_print(code_str: str, file_name: Optional[str] = None, function_name: Optional[str] = None) -> None:
if is_LSP_enabled():
lsp_log(LspCodeMessage(code=code_str, file_name=file_name, function_name=function_name))
return
"""Print code with syntax highlighting."""
from rich.syntax import Syntax

Expand All @@ -79,6 +104,11 @@ def progress_bar(
If revert_to_print is True, falls back to printing a single logger.info message
instead of showing a progress bar.
"""
if is_LSP_enabled():
lsp_log(LspTextMessage(text=message, takes_time=True))
yield
return

if revert_to_print:
logger.info(message)

Expand Down
2 changes: 1 addition & 1 deletion codeflash/code_utils/git_worktree_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def create_detached_worktree(module_root: Path) -> Optional[Path]:
)

if not uni_diff_text.strip():
logger.info("No uncommitted changes to copy to worktree.")
logger.info("!lsp|tags|No uncommitted changes to copy to worktree.")
return worktree_dir

# Write the diff to a temporary file
Expand Down
6 changes: 3 additions & 3 deletions codeflash/discovery/functions_to_optimize.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,15 +173,15 @@ def get_functions_to_optimize(
with warnings.catch_warnings():
warnings.simplefilter(action="ignore", category=SyntaxWarning)
if optimize_all:
logger.info("Finding all functions in the module '%s'…", optimize_all)
logger.info("!lsp|tags|Finding all functions in the module '%s'…", optimize_all)
console.rule()
functions = get_all_files_and_functions(Path(optimize_all))
elif replay_test:
functions, trace_file_path = get_all_replay_test_functions(
replay_test=replay_test, test_cfg=test_cfg, project_root_path=project_root
)
elif file is not None:
logger.info("Finding all functions in the file '%s'…", file)
logger.info("!lsp|tags|Finding all functions in the file '%s'…", file)
console.rule()
functions = find_all_functions_in_file(file)
if only_get_this_function is not None:
Expand Down Expand Up @@ -219,7 +219,7 @@ def get_functions_to_optimize(
functions, test_cfg.tests_root, ignore_paths, project_root, module_root, previous_checkpoint_functions
)

logger.info(f"Found {functions_count} function{'s' if functions_count > 1 else ''} to optimize")
logger.info(f"!lsp|tags|Found {functions_count} function{'s' if functions_count > 1 else ''} to optimize")
if optimize_all:
three_min_in_ns = int(1.8e11)
console.rule()
Expand Down
38 changes: 22 additions & 16 deletions codeflash/lsp/beta.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@

from codeflash.api.cfapi import get_codeflash_api_key, get_user_id
from codeflash.cli_cmds.cli import process_pyproject_config
from codeflash.cli_cmds.console import code_print
from codeflash.code_utils.git_worktree_utils import (
create_diff_patch_from_worktree,
get_patches_metadata,
Expand Down Expand Up @@ -157,20 +158,6 @@ def initialize_function_optimization(
return {"functionName": params.functionName, "status": "success"}


@server.feature("discoverFunctionTests")
def discover_function_tests(server: CodeflashLanguageServer, params: FunctionOptimizationParams) -> dict[str, str]:
fto = server.optimizer.current_function_being_optimized
optimizable_funcs = {fto.file_path: [fto]}

devnull_writer = open(os.devnull, "w") # noqa
with contextlib.redirect_stdout(devnull_writer):
function_to_tests, num_discovered_tests = server.optimizer.discover_tests(optimizable_funcs)

server.optimizer.discovered_tests = function_to_tests

return {"functionName": params.functionName, "status": "success", "discovered_tests": num_discovered_tests}


@server.feature("validateProject")
def validate_project(server: CodeflashLanguageServer, _params: FunctionOptimizationParams) -> dict[str, str]:
from codeflash.cli_cmds.cmd_init import is_valid_pyproject_toml
Expand All @@ -194,7 +181,7 @@ def validate_project(server: CodeflashLanguageServer, _params: FunctionOptimizat
except Exception:
return {"status": "error", "message": "Repository has no commits (unborn HEAD)"}

return {"status": "success"}
return {"status": "success", "moduleRoot": args.module_root}


def _initialize_optimizer_if_api_key_is_valid(server: CodeflashLanguageServer) -> dict[str, str]:
Expand Down Expand Up @@ -300,6 +287,12 @@ def perform_function_optimization( # noqa: PLR0911
}

module_prep_result = server.optimizer.prepare_module_for_optimization(current_function.file_path)
if not module_prep_result:
return {
"functionName": params.functionName,
"status": "error",
"message": "Failed to prepare module for optimization",
}

validated_original_code, original_module_ast = module_prep_result

Expand All @@ -308,7 +301,7 @@ def perform_function_optimization( # noqa: PLR0911
function_to_optimize_source_code=validated_original_code[current_function.file_path].source_code,
original_module_ast=original_module_ast,
original_module_path=current_function.file_path,
function_to_tests=server.optimizer.discovered_tests or {},
function_to_tests={},
)

server.optimizer.current_function_optimizer = function_optimizer
Expand All @@ -321,6 +314,19 @@ def perform_function_optimization( # noqa: PLR0911

should_run_experiment, code_context, original_helper_code = initialization_result.unwrap()

code_print(
code_context.read_writable_code.flat,
file_name=current_function.file_path,
function_name=current_function.function_name,
)

optimizable_funcs = {current_function.file_path: [current_function]}

devnull_writer = open(os.devnull, "w") # noqa
with contextlib.redirect_stdout(devnull_writer):
function_to_tests, num_discovered_tests = server.optimizer.discover_tests(optimizable_funcs)
function_optimizer.function_to_tests = function_to_tests

test_setup_result = function_optimizer.generate_and_instrument_tests(
code_context, should_run_experiment=should_run_experiment
)
Expand Down
52 changes: 52 additions & 0 deletions codeflash/lsp/helpers.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,59 @@
import os
import re
from functools import lru_cache

from rich.tree import Tree

from codeflash.models.test_type import TestType

_double_quote_pat = re.compile(r'"(.*?)"')
_single_quote_pat = re.compile(r"'(.*?)'")
worktree_path_regex = re.compile(r'\/[^"]*worktrees\/[^"]\S*')


@lru_cache(maxsize=1)
def is_LSP_enabled() -> bool:
return os.getenv("CODEFLASH_LSP", default="false").lower() == "true"


def tree_to_markdown(tree: Tree, level: int = 0) -> str:
"""Convert a rich Tree into a Markdown bullet list."""
indent = " " * level
if level == 0:
lines: list[str] = [f"{indent}### {tree.label}"]
else:
lines: list[str] = [f"{indent}- {tree.label}"]
for child in tree.children:
lines.extend(tree_to_markdown(child, level + 1).splitlines())
return "\n".join(lines)


def report_to_markdown_table(report: dict[TestType, dict[str, int]], title: str) -> str:
lines = ["| Test Type | Passed ✅ | Failed ❌ |", "|-----------|--------|--------|"]
for test_type in TestType:
if test_type is TestType.INIT_STATE_TEST:
continue
passed = report[test_type]["passed"]
failed = report[test_type]["failed"]
if passed == 0 and failed == 0:
continue
lines.append(f"| {test_type.to_name()} | {passed} | {failed} |")
table = "\n".join(lines)
return f"### {title}\n{table}"


def simplify_worktree_paths(msg: str, highlight: bool = True) -> str: # noqa: FBT001, FBT002
path_in_msg = worktree_path_regex.search(msg)
if path_in_msg:
last_part_of_path = path_in_msg.group(0).split("/")[-1]
if highlight:
last_part_of_path = f"`{last_part_of_path}`"
return msg.replace(path_in_msg.group(0), last_part_of_path)
return msg


def replace_quotes_with_backticks(text: str) -> str:
# double-quoted strings
text = _double_quote_pat.sub(r"`\1`", text)
# single-quoted strings
return _single_quote_pat.sub(r"`\1`", text)
Loading
Loading