Skip to content
Merged
Show file tree
Hide file tree
Changes from 14 commits
Commits
Show all changes
53 commits
Select commit Hold shift + click to select a range
0e75144
todo
aseembits93 Mar 5, 2025
a9f6196
boilerplate ready, need to fill in the gaps
aseembits93 Mar 6, 2025
9ebbd61
wip
aseembits93 Mar 6, 2025
371ae4c
Merge remote-tracking branch 'origin/main' into line-profiler
aseembits93 Mar 10, 2025
f493836
avoid instrument_codeflash_capture
aseembits93 Mar 11, 2025
0220ee6
working mvp, need to parse though
aseembits93 Mar 11, 2025
339362f
undo coverage util modification
aseembits93 Mar 11, 2025
2f5baa9
cleaning up
aseembits93 Mar 11, 2025
de43f6b
Merge remote-tracking branch 'origin/main' into line-profiler
aseembits93 Mar 11, 2025
d171ba7
adding some runconfigs
aseembits93 Mar 12, 2025
8646196
line profiler results are saved in a temp file, need to pass to ai se…
aseembits93 Mar 12, 2025
51b8e27
Merge remote-tracking branch 'origin/main' into line-profiler
aseembits93 Mar 12, 2025
4247780
working demo of new opt candidates with lineprof info
aseembits93 Mar 15, 2025
52715ba
Merge remote-tracking branch 'origin/main' into line-profiler
aseembits93 Mar 15, 2025
ca0540e
removing env files
aseembits93 Mar 16, 2025
0a78e7e
concurrent execution of new optim candidates, readonly context testing
aseembits93 Mar 17, 2025
93623cb
still debugging readonly context code
aseembits93 Mar 17, 2025
102de11
works, need to follow type hints
aseembits93 Mar 19, 2025
e292831
Merge remote-tracking branch 'origin/main' into line-profiler
aseembits93 Mar 19, 2025
040a747
exception handling when lprof fails
aseembits93 Mar 19, 2025
7a413f6
feedback, cleanup
aseembits93 Mar 21, 2025
127f466
todo, improve testing
aseembits93 Mar 21, 2025
e224234
cleaning
aseembits93 Mar 21, 2025
3b9bee0
redo newline
aseembits93 Mar 21, 2025
e6a9066
better test, improve testing
aseembits93 Mar 22, 2025
c805624
Merge remote-tracking branch 'origin/main' into line-profiler
aseembits93 Mar 24, 2025
0b77d2a
wip concurrent optimization loop
aseembits93 Mar 24, 2025
81a6b78
Merge remote-tracking branch 'origin/main' into line-profiler
aseembits93 Mar 25, 2025
89e72b7
refactored to make a new category for line profiler tests
aseembits93 Mar 26, 2025
c00e324
works for any level of nested function
aseembits93 Mar 26, 2025
7ffe25f
works, optimization list length will be incorrectly displayed as we a…
aseembits93 Mar 29, 2025
d24d24c
fix for indentation in line profiler output
aseembits93 Mar 29, 2025
82f4138
cleaning up
aseembits93 Mar 29, 2025
29f4c1a
wrote some tests for instrumentation+testrun+parsing, todo, write som…
aseembits93 Mar 29, 2025
862eae8
Merge branch 'main' into line-profiler
aseembits93 Mar 31, 2025
ee90a08
putting file restore in the finally block
aseembits93 Mar 31, 2025
e17106f
putting file restore in the finally block and also handling failures …
aseembits93 Mar 31, 2025
b3e38f2
quick fix for merge with main
aseembits93 Mar 31, 2025
4af8442
minor fixes
aseembits93 Mar 31, 2025
d8246fc
pathlib for r/w
aseembits93 Mar 31, 2025
97c3cac
set up empty line profile results if there's an error
aseembits93 Mar 31, 2025
6418e7d
better exception handling for line profiler
aseembits93 Mar 31, 2025
275b88b
better type hinting
aseembits93 Mar 31, 2025
4b20465
add line_profiler as a requirement in pyproject toml
aseembits93 Mar 31, 2025
00abefb
more tests
aseembits93 Apr 1, 2025
0ab9dbf
import order issue
aseembits93 Apr 1, 2025
7e764ec
Merge branch 'main' into line-profiler
misrasaurabh1 Apr 1, 2025
a628dae
formatting changes
aseembits93 Apr 1, 2025
2abbfba
moving line profiler instrument tests in a separate file
aseembits93 Apr 1, 2025
b8614e0
tests galore
aseembits93 Apr 1, 2025
ab06721
Merge remote-tracking branch 'origin/main' into line-profiler
aseembits93 Apr 1, 2025
b269f87
nested classes handled now
aseembits93 Apr 1, 2025
3cab8ea
change max workers to one
aseembits93 Apr 1, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .env
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
CODEFLASH_AIS_SERVER=local
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Never commit.env files.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

that's right, reason i'm doing it right now is that I usually call git clean for removing concolic test dirs which accidentally deletes the env file/s. it won't be in the final pr of course.

CODEFLASH_CFAPI_SERVER=prod
25 changes: 25 additions & 0 deletions .idea/runConfigurations/bubble_sort_deps_local.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

25 changes: 25 additions & 0 deletions .idea/runConfigurations/main.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

25 changes: 25 additions & 0 deletions .idea/runConfigurations/main_local.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

66 changes: 66 additions & 0 deletions codeflash/api/aiservice.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,72 @@ def optimize_python_code(
console.rule()
return []

def optimize_python_code_line_profiler(
self,
source_code: str,
dependency_code: str,
trace_id: str,
line_profiler_results: str,
num_candidates: int = 10,
experiment_metadata: ExperimentMetadata | None = None,
) -> list[OptimizedCandidate]:
"""Optimize the given python code for performance by making a request to the Django endpoint.

Parameters
----------
- source_code (str): The python code to optimize.
- dependency_code (str): The dependency code used as read-only context for the optimization
- trace_id (str): Trace id of optimization run
- num_candidates (int): Number of optimization variants to generate. Default is 10.
- experiment_metadata (Optional[ExperimentalMetadata, None]): Any available experiment metadata for this optimization

Returns
-------
- List[OptimizationCandidate]: A list of Optimization Candidates.

"""
payload = {
"source_code": source_code,
"dependency_code": dependency_code,
"num_variants": num_candidates,
"line_profiler_results": line_profiler_results,
"trace_id": trace_id,
"python_version": platform.python_version(),
"experiment_metadata": experiment_metadata,
"codeflash_version": codeflash_version,
}

logger.info("Generating optimized candidates…")
console.rule()
try:
response = self.make_ai_service_request("/optimize-line-profiler", payload=payload, timeout=600)
except requests.exceptions.RequestException as e:
logger.exception(f"Error generating optimized candidates: {e}")
ph("cli-optimize-error-caught", {"error": str(e)})
return []

if response.status_code == 200:
optimizations_json = response.json()["optimizations"]
logger.info(f"Generated {len(optimizations_json)} candidates.")
console.rule()
return [
OptimizedCandidate(
source_code=opt["source_code"],
explanation=opt["explanation"],
optimization_id=opt["optimization_id"],
)
for opt in optimizations_json
]
try:
error = response.json()["error"]
except Exception:
error = response.text
logger.error(f"Error generating optimized candidates: {response.status_code} - {error}")
ph("cli-optimize-error-response", {"response_status_code": response.status_code, "error": error})
console.rule()
return []


def log_results(
self,
function_trace_id: str,
Expand Down
84 changes: 84 additions & 0 deletions codeflash/code_utils/code_replacer.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from codeflash.cli_cmds.console import logger
from codeflash.code_utils.code_extractor import add_needed_imports_from_module
from codeflash.models.models import FunctionParent
import isort

if TYPE_CHECKING:
from pathlib import Path
Expand Down Expand Up @@ -336,3 +337,86 @@ def function_to_optimize_original_worktree_fqn(
+ "."
+ function_to_optimize.qualified_name
)


def add_decorator_cst(module_node, function_name, decorator_name):
"""Adds a decorator to a function definition in a LibCST module node."""

class AddDecoratorTransformer(cst.CSTTransformer):
def leave_FunctionDef(self, original_node, updated_node):
if original_node.name.value == function_name:
new_decorator = cst.Decorator(
decorator=cst.Name(value=decorator_name)
)

updated_decorators = list(updated_node.decorators)
updated_decorators.insert(0, new_decorator)

return updated_node.with_changes(decorators=updated_decorators)
return updated_node

transformer = AddDecoratorTransformer()
updated_module = module_node.visit(transformer)
return updated_module

def add_decorator_imports(file_paths, fn_list, db_file):
"""Adds a decorator to a function in a Python file."""
for file_path, fn_name in zip(file_paths, fn_list):
#open file
with open(file_path, "r", encoding="utf-8") as file:
file_contents = file.read()

# parse to cst
module_node = cst.parse_module(file_contents)
# add decorator
module_node = add_decorator_cst(module_node, fn_name, 'profile')
# add imports
# Create a transformer to add the import
transformer = ImportAdder("from line_profiler import profile")

# Apply the transformer to add the import
module_node = module_node.visit(transformer)
modified_code = isort.code(module_node.code, float_to_top=True)
# write to file
with open(file_path, "w", encoding="utf-8") as file:
file.write(modified_code)
#do this only for the main file and not the helper files, can use libcst but will go just with some simple string manipulation
with open(file_paths[0],'r') as f:
file_contents = f.readlines()
for idx, line in enumerate(file_contents):
if 'from line_profiler import profile' in line:
file_contents.insert(idx+1, f"profile.enable(output_prefix='{db_file}')\n")
break
with open(file_paths[0],'w') as f:
f.writelines(file_contents)




class ImportAdder(cst.CSTTransformer):
def __init__(self, import_statement='from line_profiler import profile'):
self.import_statement = import_statement
self.has_import = False

def leave_Module(self, original_node, updated_node):
# If the import is already there, don't add it again
if self.has_import:
return updated_node

# Parse the import statement into a CST node
import_node = cst.parse_statement(self.import_statement)

# Add the import to the module's body
return updated_node.with_changes(
body=[import_node] + list(updated_node.body)
)

def visit_Import(self, node):
pass

def visit_ImportFrom(self, node):
# Check if the profile is already imported from line_profiler
if node.module and node.module.value == "line_profiler":
for import_alias in node.names:
if import_alias.name.value == "profile":
self.has_import = True
5 changes: 5 additions & 0 deletions codeflash/code_utils/coverage_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,3 +59,8 @@ def prepare_coverage_files() -> tuple[Path, Path]:
coveragerc_content = f"[run]\n branch = True\ndata_file={coverage_database_file}\n"
coveragercfile.write_text(coveragerc_content)
return coverage_database_file, coveragercfile

def prepare_lprofiler_files(prefix="") -> tuple[Path]:
"""Prepare line profiler output file."""
lprofiler_database_file = get_run_tmp_file(prefix)
return lprofiler_database_file
1 change: 1 addition & 0 deletions codeflash/models/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -218,6 +218,7 @@ class OriginalCodeBaseline(BaseModel):
benchmarking_test_results: TestResults
runtime: int
coverage_results: Optional[CoverageData]
lprof_results: str


class CoverageStatus(Enum):
Expand Down
Loading
Loading