diff --git a/.github/workflows/pre-commit.yaml b/.github/workflows/pre-commit.yaml new file mode 100644 index 000000000..bc0a20ae8 --- /dev/null +++ b/.github/workflows/pre-commit.yaml @@ -0,0 +1,19 @@ +name: Lint +on: + pull_request: + push: + branches: + - main + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + lint: + name: Run pre-commit hooks + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + - uses: pre-commit/action@v3.0.1 \ No newline at end of file diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 000000000..9c2955e4b --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,7 @@ +repos: + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: "v0.11.0" + hooks: + - id: ruff + args: [--fix, --exit-non-zero-on-fix, --config=pyproject.toml] + - id: ruff-format \ No newline at end of file diff --git a/codeflash/api/aiservice.py b/codeflash/api/aiservice.py index 23c1b442a..ed61e8c58 100644 --- a/codeflash/api/aiservice.py +++ b/codeflash/api/aiservice.py @@ -73,7 +73,7 @@ def make_ai_service_request( # response.raise_for_status() # Will raise an HTTPError if the HTTP request returned an unsuccessful status code return response - def optimize_python_code( + def optimize_python_code( # noqa: D417 self, source_code: str, dependency_code: str, @@ -139,7 +139,7 @@ def optimize_python_code( console.rule() return [] - def optimize_python_code_line_profiler( + def optimize_python_code_line_profiler( # noqa: D417 self, source_code: str, dependency_code: str, @@ -208,7 +208,7 @@ def optimize_python_code_line_profiler( console.rule() return [] - def log_results( + def log_results( # noqa: D417 self, function_trace_id: str, speedup_ratio: dict[str, float | None] | None, @@ -240,7 +240,7 @@ def log_results( except requests.exceptions.RequestException as e: logger.exception(f"Error logging features: {e}") - def generate_regression_tests( + def generate_regression_tests( # noqa: D417 self, source_code_being_tested: str, function_to_optimize: FunctionToOptimize, @@ -270,10 +270,9 @@ def generate_regression_tests( - Dict[str, str] | None: The generated regression tests and instrumented tests, or None if an error occurred. """ - assert test_framework in [ - "pytest", - "unittest", - ], f"Invalid test framework, got {test_framework} but expected 'pytest' or 'unittest'" + assert test_framework in ["pytest", "unittest"], ( + f"Invalid test framework, got {test_framework} but expected 'pytest' or 'unittest'" + ) payload = { "source_code_being_tested": source_code_being_tested, "function_to_optimize": function_to_optimize, @@ -308,7 +307,7 @@ def generate_regression_tests( error = response.json()["error"] logger.error(f"Error generating tests: {response.status_code} - {error}") ph("cli-testgen-error-response", {"response_status_code": response.status_code, "error": error}) - return None + return None # noqa: TRY300 except Exception: logger.error(f"Error generating tests: {response.status_code} - {response.text}") ph("cli-testgen-error-response", {"response_status_code": response.status_code, "error": response.text}) diff --git a/codeflash/benchmarking/codeflash_trace.py b/codeflash/benchmarking/codeflash_trace.py index 35232f954..20743fd56 100644 --- a/codeflash/benchmarking/codeflash_trace.py +++ b/codeflash/benchmarking/codeflash_trace.py @@ -4,7 +4,7 @@ import sqlite3 import threading import time -from typing import Callable +from typing import Any, Callable from codeflash.picklepatch.pickle_patcher import PicklePatcher @@ -69,7 +69,7 @@ def write_function_timings(self) -> None: "(function_name, class_name, module_name, file_path, benchmark_function_name, " "benchmark_module_path, benchmark_line_number, function_time_ns, overhead_time_ns, args, kwargs) " "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", - self.function_calls_data + self.function_calls_data, ) self._connection.commit() self.function_calls_data = [] @@ -100,9 +100,10 @@ def __call__(self, func: Callable) -> Callable: The wrapped function """ - func_id = (func.__module__,func.__name__) + func_id = (func.__module__, func.__name__) + @functools.wraps(func) - def wrapper(*args, **kwargs): + def wrapper(*args, **kwargs) -> Any: # noqa: ANN002, ANN003, ANN401 # Initialize thread-local active functions set if it doesn't exist if not hasattr(self._thread_local, "active_functions"): self._thread_local.active_functions = set() @@ -139,9 +140,19 @@ def wrapper(*args, **kwargs): self._thread_local.active_functions.remove(func_id) overhead_time = time.thread_time_ns() - end_time self.function_calls_data.append( - (func.__name__, class_name, func.__module__, func.__code__.co_filename, - benchmark_function_name, benchmark_module_path, benchmark_line_number, execution_time, - overhead_time, None, None) + ( + func.__name__, + class_name, + func.__module__, + func.__code__.co_filename, + benchmark_function_name, + benchmark_module_path, + benchmark_line_number, + execution_time, + overhead_time, + None, + None, + ) ) return result @@ -155,9 +166,19 @@ def wrapper(*args, **kwargs): self._thread_local.active_functions.remove(func_id) overhead_time = time.thread_time_ns() - end_time self.function_calls_data.append( - (func.__name__, class_name, func.__module__, func.__code__.co_filename, - benchmark_function_name, benchmark_module_path, benchmark_line_number, execution_time, - overhead_time, None, None) + ( + func.__name__, + class_name, + func.__module__, + func.__code__.co_filename, + benchmark_function_name, + benchmark_module_path, + benchmark_line_number, + execution_time, + overhead_time, + None, + None, + ) ) return result # Flush to database every 100 calls @@ -168,12 +189,24 @@ def wrapper(*args, **kwargs): self._thread_local.active_functions.remove(func_id) overhead_time = time.thread_time_ns() - end_time self.function_calls_data.append( - (func.__name__, class_name, func.__module__, func.__code__.co_filename, - benchmark_function_name, benchmark_module_path, benchmark_line_number, execution_time, - overhead_time, pickled_args, pickled_kwargs) + ( + func.__name__, + class_name, + func.__module__, + func.__code__.co_filename, + benchmark_function_name, + benchmark_module_path, + benchmark_line_number, + execution_time, + overhead_time, + pickled_args, + pickled_kwargs, + ) ) return result + return wrapper + # Create a singleton instance codeflash_trace = CodeflashTrace() diff --git a/codeflash/benchmarking/instrument_codeflash_trace.py b/codeflash/benchmarking/instrument_codeflash_trace.py index 044b0b0a4..761e91f71 100644 --- a/codeflash/benchmarking/instrument_codeflash_trace.py +++ b/codeflash/benchmarking/instrument_codeflash_trace.py @@ -1,9 +1,16 @@ -from pathlib import Path +from __future__ import annotations + +from typing import TYPE_CHECKING, Optional, Union import isort import libcst as cst -from codeflash.discovery.functions_to_optimize import FunctionToOptimize +if TYPE_CHECKING: + from pathlib import Path + + from libcst import BaseStatement, ClassDef, FlattenSentinel, FunctionDef, RemovalSentinel + + from codeflash.discovery.functions_to_optimize import FunctionToOptimize class AddDecoratorTransformer(cst.CSTTransformer): @@ -13,39 +20,37 @@ def __init__(self, target_functions: set[tuple[str, str]]) -> None: self.added_codeflash_trace = False self.class_name = "" self.function_name = "" - self.decorator = cst.Decorator( - decorator=cst.Name(value="codeflash_trace") - ) + self.decorator = cst.Decorator(decorator=cst.Name(value="codeflash_trace")) - def leave_ClassDef(self, original_node, updated_node): + def leave_ClassDef( + self, original_node: ClassDef, updated_node: ClassDef + ) -> Union[BaseStatement, FlattenSentinel[BaseStatement], RemovalSentinel]: if self.class_name == original_node.name.value: - self.class_name = "" # Even if nested classes are not visited, this function is still called on them + self.class_name = "" # Even if nested classes are not visited, this function is still called on them return updated_node - def visit_ClassDef(self, node): - if self.class_name: # Don't go into nested class + def visit_ClassDef(self, node: ClassDef) -> Optional[bool]: + if self.class_name: # Don't go into nested class return False - self.class_name = node.name.value + self.class_name = node.name.value # noqa: RET503 - def visit_FunctionDef(self, node): - if self.function_name: # Don't go into nested function + def visit_FunctionDef(self, node: FunctionDef) -> Optional[bool]: + if self.function_name: # Don't go into nested function return False - self.function_name = node.name.value + self.function_name = node.name.value # noqa: RET503 - def leave_FunctionDef(self, original_node, updated_node): + def leave_FunctionDef(self, original_node: FunctionDef, updated_node: FunctionDef) -> FunctionDef: if self.function_name == original_node.name.value: self.function_name = "" if (self.class_name, original_node.name.value) in self.target_functions: # Add the new decorator after any existing decorators, so it gets executed first - updated_decorators = list(updated_node.decorators) + [self.decorator] + updated_decorators = [*list(updated_node.decorators), self.decorator] self.added_codeflash_trace = True - return updated_node.with_changes( - decorators=updated_decorators - ) + return updated_node.with_changes(decorators=updated_decorators) return updated_node - def leave_Module(self, original_node: cst.Module, updated_node: cst.Module) -> cst.Module: + def leave_Module(self, original_node: cst.Module, updated_node: cst.Module) -> cst.Module: # noqa: ARG002 # Create import statement for codeflash_trace if not self.added_codeflash_trace: return updated_node @@ -53,17 +58,10 @@ def leave_Module(self, original_node: cst.Module, updated_node: cst.Module) -> c body=[ cst.ImportFrom( module=cst.Attribute( - value=cst.Attribute( - value=cst.Name(value="codeflash"), - attr=cst.Name(value="benchmarking") - ), - attr=cst.Name(value="codeflash_trace") + value=cst.Attribute(value=cst.Name(value="codeflash"), attr=cst.Name(value="benchmarking")), + attr=cst.Name(value="codeflash_trace"), ), - names=[ - cst.ImportAlias( - name=cst.Name(value="codeflash_trace") - ) - ] + names=[cst.ImportAlias(name=cst.Name(value="codeflash_trace"))], ) ] ) @@ -73,12 +71,13 @@ def leave_Module(self, original_node: cst.Module, updated_node: cst.Module) -> c return updated_node.with_changes(body=new_body) + def add_codeflash_decorator_to_code(code: str, functions_to_optimize: list[FunctionToOptimize]) -> str: """Add codeflash_trace to a function. Args: code: The source code as a string - function_to_optimize: The FunctionToOptimize instance containing function details + functions_to_optimize: List of FunctionToOptimize instances containing function details Returns: The modified source code as a string @@ -91,25 +90,18 @@ def add_codeflash_decorator_to_code(code: str, functions_to_optimize: list[Funct class_name = function_to_optimize.parents[0].name target_functions.add((class_name, function_to_optimize.function_name)) - transformer = AddDecoratorTransformer( - target_functions = target_functions, - ) + transformer = AddDecoratorTransformer(target_functions=target_functions) module = cst.parse_module(code) modified_module = module.visit(transformer) return modified_module.code -def instrument_codeflash_trace_decorator( - file_to_funcs_to_optimize: dict[Path, list[FunctionToOptimize]] -) -> None: +def instrument_codeflash_trace_decorator(file_to_funcs_to_optimize: dict[Path, list[FunctionToOptimize]]) -> None: """Instrument codeflash_trace decorator to functions to optimize.""" for file_path, functions_to_optimize in file_to_funcs_to_optimize.items(): original_code = file_path.read_text(encoding="utf-8") - new_code = add_codeflash_decorator_to_code( - original_code, - functions_to_optimize - ) + new_code = add_codeflash_decorator_to_code(original_code, functions_to_optimize) # Modify the code modified_code = isort.code(code=new_code, float_to_top=True) diff --git a/codeflash/benchmarking/plugin/plugin.py b/codeflash/benchmarking/plugin/plugin.py index 313817041..45fabef14 100644 --- a/codeflash/benchmarking/plugin/plugin.py +++ b/codeflash/benchmarking/plugin/plugin.py @@ -20,7 +20,7 @@ def __init__(self) -> None: self.project_root = None self.benchmark_timings = [] - def setup(self, trace_path:str, project_root:str) -> None: + def setup(self, trace_path: str, project_root: str) -> None: try: # Open connection self.project_root = project_root @@ -35,7 +35,7 @@ def setup(self, trace_path:str, project_root:str) -> None: "benchmark_time_ns INTEGER)" ) self._connection.commit() - self.close() # Reopen only at the end of pytest session + self.close() # Reopen only at the end of pytest session except Exception as e: print(f"Database setup error: {e}") if self._connection: @@ -55,14 +55,15 @@ def write_benchmark_timings(self) -> None: # Insert data into the benchmark_timings table cur.executemany( "INSERT INTO benchmark_timings (benchmark_module_path, benchmark_function_name, benchmark_line_number, benchmark_time_ns) VALUES (?, ?, ?, ?)", - self.benchmark_timings + self.benchmark_timings, ) self._connection.commit() - self.benchmark_timings = [] # Clear the benchmark timings list + self.benchmark_timings = [] # Clear the benchmark timings list except Exception as e: print(f"Error writing to benchmark timings database: {e}") self._connection.rollback() raise + def close(self) -> None: if self._connection: self._connection.close() @@ -185,7 +186,7 @@ def get_benchmark_timings(trace_path: Path) -> dict[BenchmarkKey, int]: # Pytest hooks @pytest.hookimpl - def pytest_sessionfinish(self, session, exitstatus): + def pytest_sessionfinish(self, session, exitstatus) -> None: # noqa: ANN001, ARG002 """Execute after whole test run is completed.""" # Write any remaining benchmark timings to the database codeflash_trace.close() @@ -195,29 +196,24 @@ def pytest_sessionfinish(self, session, exitstatus): self.close() @staticmethod - def pytest_addoption(parser): - parser.addoption( - "--codeflash-trace", - action="store_true", - default=False, - help="Enable CodeFlash tracing" - ) + def pytest_addoption(parser: pytest.Parser) -> None: + parser.addoption("--codeflash-trace", action="store_true", default=False, help="Enable CodeFlash tracing") @staticmethod - def pytest_plugin_registered(plugin, manager): + def pytest_plugin_registered(plugin, manager) -> None: # noqa: ANN001 # Not necessary since run with -p no:benchmark, but just in case if hasattr(plugin, "name") and plugin.name == "pytest-benchmark": manager.unregister(plugin) @staticmethod - def pytest_configure(config): + def pytest_configure(config: pytest.Config) -> None: """Register the benchmark marker.""" config.addinivalue_line( - "markers", - "benchmark: mark test as a benchmark that should be run with codeflash tracing" + "markers", "benchmark: mark test as a benchmark that should be run with codeflash tracing" ) + @staticmethod - def pytest_collection_modifyitems(config, items): + def pytest_collection_modifyitems(config: pytest.Config, items: list[pytest.Item]) -> None: # Skip tests that don't have the benchmark fixture if not config.getoption("--codeflash-trace"): return @@ -239,27 +235,30 @@ def pytest_collection_modifyitems(config, items): item.add_marker(skip_no_benchmark) # Benchmark fixture - class Benchmark: - def __init__(self, request): + class Benchmark: # noqa: D106 + def __init__(self, request: pytest.FixtureRequest) -> None: self.request = request - def __call__(self, func, *args, **kwargs): + def __call__(self, func, *args, **kwargs): # type: ignore # noqa: ANN001, ANN002, ANN003, ANN204, PGH003 """Handle both direct function calls and decorator usage.""" if args or kwargs: # Used as benchmark(func, *args, **kwargs) return self._run_benchmark(func, *args, **kwargs) + # Used as @benchmark decorator - def wrapped_func(*args, **kwargs): + def wrapped_func(*args, **kwargs): # noqa: ANN002, ANN003, ANN202 return func(*args, **kwargs) - result = self._run_benchmark(func) + + self._run_benchmark(func) return wrapped_func - def _run_benchmark(self, func, *args, **kwargs): + def _run_benchmark(self, func, *args, **kwargs): # noqa: ANN001, ANN002, ANN003, ANN202 """Actual benchmark implementation.""" - benchmark_module_path = module_name_from_file_path(Path(str(self.request.node.fspath)), - Path(codeflash_benchmark_plugin.project_root)) + benchmark_module_path = module_name_from_file_path( + Path(str(self.request.node.fspath)), Path(codeflash_benchmark_plugin.project_root) + ) benchmark_function_name = self.request.node.name - line_number = int(str(sys._getframe(2).f_lineno)) # 2 frames up in the call stack + line_number = int(str(sys._getframe(2).f_lineno)) # 2 frames up in the call stack # noqa: SLF001 # Set env vars os.environ["CODEFLASH_BENCHMARK_FUNCTION_NAME"] = benchmark_function_name os.environ["CODEFLASH_BENCHMARK_MODULE_PATH"] = benchmark_module_path @@ -278,16 +277,18 @@ def _run_benchmark(self, func, *args, **kwargs): codeflash_trace.function_call_count = 0 # Add to the benchmark timings buffer codeflash_benchmark_plugin.benchmark_timings.append( - (benchmark_module_path, benchmark_function_name, line_number, end - start)) + (benchmark_module_path, benchmark_function_name, line_number, end - start) + ) return result @staticmethod @pytest.fixture - def benchmark(request): + def benchmark(request: pytest.FixtureRequest) -> object: if not request.config.getoption("--codeflash-trace"): return None return CodeFlashBenchmarkPlugin.Benchmark(request) + codeflash_benchmark_plugin = CodeFlashBenchmarkPlugin() diff --git a/codeflash/benchmarking/replay_test.py b/codeflash/benchmarking/replay_test.py index ee1107241..a9b3f2e29 100644 --- a/codeflash/benchmarking/replay_test.py +++ b/codeflash/benchmarking/replay_test.py @@ -16,7 +16,12 @@ def get_next_arg_and_return( - trace_file: str, benchmark_function_name:str, function_name: str, file_path: str, class_name: str | None = None, num_to_get: int = 256 + trace_file: str, + benchmark_function_name: str, + function_name: str, + file_path: str, + class_name: str | None = None, + num_to_get: int = 256, ) -> Generator[Any]: db = sqlite3.connect(trace_file) cur = db.cursor() @@ -42,10 +47,10 @@ def get_function_alias(module: str, function_name: str) -> str: def create_trace_replay_test_code( - trace_file: str, - functions_data: list[dict[str, Any]], - test_framework: str = "pytest", - max_run_count=256 + trace_file: str, + functions_data: list[dict[str, Any]], + test_framework: str = "pytest", + max_run_count=256, # noqa: ANN001 ) -> str: """Create a replay test for functions based on trace data. @@ -83,8 +88,9 @@ def create_trace_replay_test_code( imports += "\n".join(function_imports) - functions_to_optimize = sorted({func.get("function_name") for func in functions_data - if func.get("function_name") != "__init__"}) + functions_to_optimize = sorted( + {func.get("function_name") for func in functions_data if func.get("function_name") != "__init__"} + ) metadata = f"""functions = {functions_to_optimize} trace_file_path = r"{trace_file}" """ @@ -111,7 +117,8 @@ def create_trace_replay_test_code( else: instance = args[0] # self ret = instance{method_name}(*args[1:], **kwargs) - """) + """ + ) test_class_method_body = textwrap.dedent( """\ @@ -142,7 +149,6 @@ def create_trace_replay_test_code( self = "" for func in functions_data: - module_name = func.get("module_name") function_name = func.get("function_name") class_name = func.get("class_name") @@ -206,7 +212,10 @@ def create_trace_replay_test_code( return imports + "\n" + metadata + "\n" + test_template -def generate_replay_test(trace_file_path: Path, output_dir: Path, test_framework: str = "pytest", max_run_count: int = 100) -> int: + +def generate_replay_test( + trace_file_path: Path, output_dir: Path, test_framework: str = "pytest", max_run_count: int = 100 +) -> int: """Generate multiple replay tests from the traced function calls, grouped by benchmark. Args: @@ -226,9 +235,7 @@ def generate_replay_test(trace_file_path: Path, output_dir: Path, test_framework cursor = conn.cursor() # Get distinct benchmark file paths - cursor.execute( - "SELECT DISTINCT benchmark_module_path FROM benchmark_function_timings" - ) + cursor.execute("SELECT DISTINCT benchmark_module_path FROM benchmark_function_timings") benchmark_files = cursor.fetchall() # Generate a test for each benchmark file @@ -236,29 +243,29 @@ def generate_replay_test(trace_file_path: Path, output_dir: Path, test_framework benchmark_module_path = benchmark_file[0] # Get all benchmarks and functions associated with this file path cursor.execute( - "SELECT DISTINCT benchmark_function_name, function_name, class_name, module_name, file_path, benchmark_line_number FROM benchmark_function_timings " + "SELECT DISTINCT benchmark_function_name, function_name, class_name, module_name, file_path, benchmark_line_number FROM benchmark_function_timings " "WHERE benchmark_module_path = ?", - (benchmark_module_path,) + (benchmark_module_path,), ) functions_data = [] for row in cursor.fetchall(): benchmark_function_name, function_name, class_name, module_name, file_path, benchmark_line_number = row # Add this function to our list - functions_data.append({ - "function_name": function_name, - "class_name": class_name, - "file_path": file_path, - "module_name": module_name, - "benchmark_function_name": benchmark_function_name, - "benchmark_module_path": benchmark_module_path, - "benchmark_line_number": benchmark_line_number, - "function_properties": inspect_top_level_functions_or_methods( - file_name=Path(file_path), - function_or_method_name=function_name, - class_name=class_name, - ) - }) + functions_data.append( + { + "function_name": function_name, + "class_name": class_name, + "file_path": file_path, + "module_name": module_name, + "benchmark_function_name": benchmark_function_name, + "benchmark_module_path": benchmark_module_path, + "benchmark_line_number": benchmark_line_number, + "function_properties": inspect_top_level_functions_or_methods( + file_name=Path(file_path), function_or_method_name=function_name, class_name=class_name + ), + } + ) if not functions_data: logger.info(f"No benchmark test functions found in {benchmark_module_path}") diff --git a/codeflash/benchmarking/trace_benchmarks.py b/codeflash/benchmarking/trace_benchmarks.py index 8d14068e7..e59b06656 100644 --- a/codeflash/benchmarking/trace_benchmarks.py +++ b/codeflash/benchmarking/trace_benchmarks.py @@ -9,7 +9,9 @@ from codeflash.code_utils.compat import SAFE_SYS_EXECUTABLE -def trace_benchmarks_pytest(benchmarks_root: Path, tests_root:Path, project_root: Path, trace_file: Path, timeout:int = 300) -> None: +def trace_benchmarks_pytest( + benchmarks_root: Path, tests_root: Path, project_root: Path, trace_file: Path, timeout: int = 300 +) -> None: benchmark_env = os.environ.copy() if "PYTHONPATH" not in benchmark_env: benchmark_env["PYTHONPATH"] = str(project_root) @@ -43,6 +45,4 @@ def trace_benchmarks_pytest(benchmarks_root: Path, tests_root:Path, project_root error_section = match.group(1) if match else result.stdout else: error_section = result.stdout - logger.warning( - f"Error collecting benchmarks - Pytest Exit code: {result.returncode}, {error_section}" - ) + logger.warning(f"Error collecting benchmarks - Pytest Exit code: {result.returncode}, {error_section}") diff --git a/codeflash/benchmarking/utils.py b/codeflash/benchmarking/utils.py index da09cd57a..5dae99444 100644 --- a/codeflash/benchmarking/utils.py +++ b/codeflash/benchmarking/utils.py @@ -15,8 +15,9 @@ from codeflash.models.models import BenchmarkKey -def validate_and_format_benchmark_table(function_benchmark_timings: dict[str, dict[BenchmarkKey, int]], - total_benchmark_timings: dict[BenchmarkKey, int]) -> dict[str, list[tuple[BenchmarkKey, float, float, float]]]: +def validate_and_format_benchmark_table( + function_benchmark_timings: dict[str, dict[BenchmarkKey, int]], total_benchmark_timings: dict[BenchmarkKey, int] +) -> dict[str, list[tuple[BenchmarkKey, float, float, float]]]: function_to_result = {} # Process each function's benchmark data for func_path, test_times in function_benchmark_timings.items(): @@ -41,12 +42,11 @@ def validate_and_format_benchmark_table(function_benchmark_timings: dict[str, di def print_benchmark_table(function_to_results: dict[str, list[tuple[BenchmarkKey, float, float, float]]]) -> None: - try: terminal_width = int(shutil.get_terminal_size().columns * 0.9) except Exception: terminal_width = 120 # Fallback width - console = Console(width = terminal_width) + console = Console(width=terminal_width) for func_path, sorted_tests in function_to_results.items(): console.print() function_name = func_path.split(":")[-1] @@ -67,30 +67,18 @@ def print_benchmark_table(function_to_results: dict[str, list[tuple[BenchmarkKey test_function = benchmark_key.function_name if total_time == 0.0: - table.add_row( - module_path, - test_function, - "N/A", - "N/A", - "N/A" - ) + table.add_row(module_path, test_function, "N/A", "N/A", "N/A") else: - table.add_row( - module_path, - test_function, - f"{total_time:.3f}", - f"{func_time:.3f}", - f"{percentage:.2f}" - ) + table.add_row(module_path, test_function, f"{total_time:.3f}", f"{func_time:.3f}", f"{percentage:.2f}") # Print the table console.print(table) def process_benchmark_data( - replay_performance_gain: dict[BenchmarkKey, float], - fto_benchmark_timings: dict[BenchmarkKey, int], - total_benchmark_timings: dict[BenchmarkKey, int] + replay_performance_gain: dict[BenchmarkKey, float], + fto_benchmark_timings: dict[BenchmarkKey, int], + total_benchmark_timings: dict[BenchmarkKey, int], ) -> Optional[ProcessedBenchmarkInfo]: """Process benchmark data and generate detailed benchmark information. @@ -109,19 +97,25 @@ def process_benchmark_data( benchmark_details = [] for benchmark_key, og_benchmark_timing in fto_benchmark_timings.items(): - total_benchmark_timing = total_benchmark_timings.get(benchmark_key, 0) if total_benchmark_timing == 0: continue # Skip benchmarks with zero timing # Calculate expected new benchmark timing - expected_new_benchmark_timing = total_benchmark_timing - og_benchmark_timing + ( - 1 / (replay_performance_gain[benchmark_key] + 1) - ) * og_benchmark_timing + expected_new_benchmark_timing = ( + total_benchmark_timing + - og_benchmark_timing + + (1 / (replay_performance_gain[benchmark_key] + 1)) * og_benchmark_timing + ) # Calculate speedup - benchmark_speedup_percent = performance_gain(original_runtime_ns=total_benchmark_timing, optimized_runtime_ns=int(expected_new_benchmark_timing)) * 100 + benchmark_speedup_percent = ( + performance_gain( + original_runtime_ns=total_benchmark_timing, optimized_runtime_ns=int(expected_new_benchmark_timing) + ) + * 100 + ) benchmark_details.append( BenchmarkDetail( @@ -129,7 +123,7 @@ def process_benchmark_data( test_function=benchmark_key.function_name, original_timing=humanize_runtime(int(total_benchmark_timing)), expected_new_timing=humanize_runtime(int(expected_new_benchmark_timing)), - speedup_percent=benchmark_speedup_percent + speedup_percent=benchmark_speedup_percent, ) ) diff --git a/codeflash/cli_cmds/cli.py b/codeflash/cli_cmds/cli.py index c0e90ad4b..cce7208da 100644 --- a/codeflash/cli_cmds/cli.py +++ b/codeflash/cli_cmds/cli.py @@ -62,9 +62,13 @@ def parse_args() -> Namespace: ) parser.add_argument("-v", "--verbose", action="store_true", help="Print verbose debug logs") parser.add_argument("--version", action="store_true", help="Print the version of codeflash") - parser.add_argument("--benchmark", action="store_true", help="Trace benchmark tests and calculate optimization impact on benchmarks") parser.add_argument( - "--benchmarks-root", type=str, help="Path to the directory of the project, where all the pytest-benchmark tests are located." + "--benchmark", action="store_true", help="Trace benchmark tests and calculate optimization impact on benchmarks" + ) + parser.add_argument( + "--benchmarks-root", + type=str, + help="Path to the directory of the project, where all the pytest-benchmark tests are located.", ) args: Namespace = parser.parse_args() return process_and_validate_cmd_args(args) @@ -134,7 +138,9 @@ def process_pyproject_config(args: Namespace) -> Namespace: assert Path(args.tests_root).is_dir(), f"--tests-root {args.tests_root} must be a valid directory" if args.benchmark: assert args.benchmarks_root is not None, "--benchmarks-root must be specified when running with --benchmark" - assert Path(args.benchmarks_root).is_dir(), f"--benchmarks-root {args.benchmarks_root} must be a valid directory" + assert Path(args.benchmarks_root).is_dir(), ( + f"--benchmarks-root {args.benchmarks_root} must be a valid directory" + ) assert Path(args.benchmarks_root).resolve().is_relative_to(Path(args.tests_root).resolve()), ( f"--benchmarks-root {args.benchmarks_root} must be a subdirectory of --tests-root {args.tests_root}" ) diff --git a/codeflash/cli_cmds/cli_common.py b/codeflash/cli_cmds/cli_common.py index 942b8f634..8e203c766 100644 --- a/codeflash/cli_cmds/cli_common.py +++ b/codeflash/cli_cmds/cli_common.py @@ -43,7 +43,7 @@ def inquirer_wrapper(func: Callable[..., str | bool], *args: str | bool, **kwarg return func(*new_args, **new_kwargs) -def split_string_to_cli_width(string: str, is_confirm: bool = False) -> list[str]: +def split_string_to_cli_width(string: str, is_confirm: bool = False) -> list[str]: # noqa: FBT001, FBT002 cli_width, _ = shutil.get_terminal_size() # split string to lines that accommodate "[?] " prefix cli_width -= len("[?] ") @@ -74,7 +74,7 @@ def inquirer_wrapper_path(*args: str, **kwargs: str) -> dict[str, str] | None: new_kwargs["message"] = last_message new_args.append(args[0]) - return cast(dict[str, str], inquirer.prompt([inquirer.Path(*new_args, **new_kwargs)])) + return cast("dict[str, str]", inquirer.prompt([inquirer.Path(*new_args, **new_kwargs)])) def split_string_to_fit_width(string: str, width: int) -> list[str]: diff --git a/codeflash/cli_cmds/cmd_init.py b/codeflash/cli_cmds/cmd_init.py index a40641166..93d713402 100644 --- a/codeflash/cli_cmds/cmd_init.py +++ b/codeflash/cli_cmds/cmd_init.py @@ -7,7 +7,7 @@ import sys from enum import Enum, auto from pathlib import Path -from typing import TYPE_CHECKING, Any, cast, Union +from typing import TYPE_CHECKING, Any, Union, cast import click import git @@ -34,7 +34,7 @@ from argparse import Namespace CODEFLASH_LOGO: str = ( - f"{LF}" + f"{LF}" # noqa: ISC003 r" _ ___ _ _ " + f"{LF}" r" | | / __)| | | | " + f"{LF}" r" ____ ___ _ | | ____ | |__ | | ____ ___ | | _ " + f"{LF}" @@ -126,7 +126,8 @@ def ask_run_end_to_end_test(args: Namespace) -> None: def should_modify_pyproject_toml() -> bool: - """Check if the current directory contains a valid pyproject.toml file with codeflash config + """Check if the current directory contains a valid pyproject.toml file with codeflash config. + If it does, ask the user if they want to re-configure it. """ from rich.prompt import Confirm @@ -144,12 +145,11 @@ def should_modify_pyproject_toml() -> bool: if "tests_root" not in config or config["tests_root"] is None or not Path(config["tests_root"]).is_dir(): return True - create_toml = Confirm.ask( + return Confirm.ask( "✅ A valid Codeflash config already exists in this project. Do you want to re-configure it?", default=False, show_default=True, ) - return create_toml def collect_setup_info() -> SetupInfo: @@ -469,7 +469,7 @@ def check_for_toml_or_setup_file() -> str | None: return cast("str", project_name) -def install_github_actions(override_formatter_check: bool = False) -> None: +def install_github_actions(override_formatter_check: bool = False) -> None: # noqa: FBT001, FBT002 try: config, config_file_path = parse_config_file(override_formatter_check=override_formatter_check) @@ -564,7 +564,7 @@ def install_github_actions(override_formatter_check: bool = False) -> None: apologize_and_exit() -def determine_dependency_manager(pyproject_data: dict[str, Any]) -> DependencyManager: +def determine_dependency_manager(pyproject_data: dict[str, Any]) -> DependencyManager: # noqa: PLR0911 """Determine which dependency manager is being used based on pyproject.toml contents.""" if (Path.cwd() / "poetry.lock").exists(): return DependencyManager.POETRY @@ -642,7 +642,10 @@ def get_github_action_working_directory(toml_path: Path, git_root: Path) -> str: def customize_codeflash_yaml_content( - optimize_yml_content: str, config: tuple[dict[str, Any], Path], git_root: Path, benchmark_mode: bool = False + optimize_yml_content: str, + config: tuple[dict[str, Any], Path], + git_root: Path, + benchmark_mode: bool = False, # noqa: FBT001, FBT002 ) -> str: module_path = str(Path(config["module_root"]).relative_to(git_root) / "**") optimize_yml_content = optimize_yml_content.replace("{{ codeflash_module_path }}", module_path) @@ -879,7 +882,7 @@ def test_sort(self): input = list(reversed(range(100))) output = sorter(input) self.assertEqual(output, list(range(100))) -""" +""" # noqa: PTH119 elif args.test_framework == "pytest": bubble_sort_test_content = f"""from {Path(args.module_root).name}.bubble_sort import sorter @@ -960,10 +963,8 @@ def ask_for_telemetry() -> bool: """Prompt the user to enable or disable telemetry.""" from rich.prompt import Confirm - enable_telemetry = Confirm.ask( + return Confirm.ask( "⚡️ Would you like to enable telemetry to help us improve the Codeflash experience?", default=True, show_default=True, ) - - return enable_telemetry diff --git a/codeflash/cli_cmds/console.py b/codeflash/cli_cmds/console.py index b4bfda3ff..fe2fdcdd1 100644 --- a/codeflash/cli_cmds/console.py +++ b/codeflash/cli_cmds/console.py @@ -12,7 +12,6 @@ MofNCompleteColumn, Progress, SpinnerColumn, - TaskProgressColumn, TextColumn, TimeElapsedColumn, TimeRemainingColumn, @@ -31,15 +30,7 @@ console = Console() logging.basicConfig( level=logging.INFO, - handlers=[ - RichHandler( - rich_tracebacks=True, - markup=False, - console=console, - show_path=False, - show_time=False, - ) - ], + handlers=[RichHandler(rich_tracebacks=True, markup=False, console=console, show_path=False, show_time=False)], format=BARE_LOGGING_FORMAT, ) @@ -48,9 +39,7 @@ def paneled_text( - text: str, - panel_args: dict[str, str | bool] | None = None, - text_args: dict[str, str] | None = None, + text: str, panel_args: dict[str, str | bool] | None = None, text_args: dict[str, str] | None = None ) -> None: """Print text in a panel.""" from rich.panel import Panel @@ -77,9 +66,7 @@ def code_print(code_str: str) -> None: @contextmanager -def progress_bar( - message: str, *, transient: bool = False -) -> Generator[TaskID, None, None]: +def progress_bar(message: str, *, transient: bool = False) -> Generator[TaskID, None, None]: """Display a progress bar with a spinner and elapsed time.""" progress = Progress( SpinnerColumn(next(spinners)), @@ -94,18 +81,12 @@ def progress_bar( @contextmanager -def test_files_progress_bar( - total: int, description: str -) -> Generator[tuple[Progress, TaskID], None, None]: +def test_files_progress_bar(total: int, description: str) -> Generator[tuple[Progress, TaskID], None, None]: """Progress bar for test files.""" with Progress( SpinnerColumn(next(spinners)), TextColumn("[progress.description]{task.description}"), - BarColumn( - complete_style="cyan", - finished_style="green", - pulse_style="yellow", - ), + BarColumn(complete_style="cyan", finished_style="green", pulse_style="yellow"), MofNCompleteColumn(), TimeElapsedColumn(), TimeRemainingColumn(), diff --git a/codeflash/cli_cmds/logging_config.py b/codeflash/cli_cmds/logging_config.py index e546836fc..8bd4a48d9 100644 --- a/codeflash/cli_cmds/logging_config.py +++ b/codeflash/cli_cmds/logging_config.py @@ -27,7 +27,7 @@ def set_level(level: int, *, echo_setting: bool = True) -> None: ], force=True, ) - logging.info("Verbose DEBUG logging enabled") + logging.info("Verbose DEBUG logging enabled") # noqa: LOG015 else: - logging.info("Logging level set to INFO") + logging.info("Logging level set to INFO") # noqa: LOG015 console.rule() diff --git a/codeflash/code_utils/checkpoint.py b/codeflash/code_utils/checkpoint.py index c924665d7..8a333c3fe 100644 --- a/codeflash/code_utils/checkpoint.py +++ b/codeflash/code_utils/checkpoint.py @@ -1,17 +1,21 @@ -import argparse +from __future__ import annotations + import datetime import json import sys import time import uuid from pathlib import Path -from typing import Any, Optional +from typing import TYPE_CHECKING, Any, Optional import click +if TYPE_CHECKING: + import argparse + class CodeflashRunCheckpoint: - def __init__(self, module_root: Path, checkpoint_dir: Path = Path("/tmp")) -> None: + def __init__(self, module_root: Path, checkpoint_dir: Path = Path("/tmp")) -> None: # noqa: S108 self.module_root = module_root self.checkpoint_dir = Path(checkpoint_dir) # Create a unique checkpoint file name @@ -31,7 +35,7 @@ def _initialize_checkpoint_file(self) -> None: "last_updated": time.time(), } - with open(self.checkpoint_path, "w") as f: + with self.checkpoint_path.open("w") as f: f.write(json.dumps(metadata) + "\n") def add_function_to_checkpoint( @@ -59,7 +63,7 @@ def add_function_to_checkpoint( **additional_info, } - with open(self.checkpoint_path, "a") as f: + with self.checkpoint_path.open("a") as f: f.write(json.dumps(function_data) + "\n") # Update the metadata last_updated timestamp @@ -113,8 +117,8 @@ def get_all_historical_functions(module_root: Path, checkpoint_dir: Path) -> dic first_line = next(f) metadata = json.loads(first_line) if metadata.get("last_updated"): - last_updated = datetime.datetime.fromtimestamp(metadata["last_updated"]) - if datetime.datetime.now() - last_updated >= datetime.timedelta(days=7): + last_updated = datetime.datetime.fromtimestamp(metadata["last_updated"]) # noqa: DTZ006 + if datetime.datetime.now() - last_updated >= datetime.timedelta(days=7): # noqa: DTZ005 to_delete.append(file) continue if metadata.get("module_root") != str(module_root): @@ -131,8 +135,8 @@ def get_all_historical_functions(module_root: Path, checkpoint_dir: Path) -> dic def ask_should_use_checkpoint_get_functions(args: argparse.Namespace) -> Optional[dict[str, dict[str, str]]]: previous_checkpoint_functions = None - if args.all and (sys.platform == "linux" or sys.platform == "darwin") and Path("/tmp").is_dir(): - previous_checkpoint_functions = get_all_historical_functions(args.module_root, Path("/tmp")) + if args.all and (sys.platform == "linux" or sys.platform == "darwin") and Path("/tmp").is_dir(): # noqa: S108 #TODO: use the temp dir from codeutils-compat.py + previous_checkpoint_functions = get_all_historical_functions(args.module_root, Path("/tmp")) # noqa: S108 if previous_checkpoint_functions and click.confirm( "Previous Checkpoint detected from an incomplete optimization run, shall I continue the optimization from that point?", default=True, diff --git a/codeflash/code_utils/code_extractor.py b/codeflash/code_utils/code_extractor.py index bcbc0e29d..8a2a89e95 100644 --- a/codeflash/code_utils/code_extractor.py +++ b/codeflash/code_utils/code_extractor.py @@ -1,8 +1,8 @@ +# ruff: noqa: ARG002 from __future__ import annotations import ast -from pathlib import Path -from typing import TYPE_CHECKING, Dict, Optional, Set +from typing import TYPE_CHECKING, Optional import libcst as cst import libcst.matchers as m @@ -11,22 +11,24 @@ from libcst.helpers import calculate_module_and_package from codeflash.cli_cmds.console import logger -from codeflash.models.models import FunctionParent, FunctionSource +from codeflash.models.models import FunctionParent if TYPE_CHECKING: + from pathlib import Path + from libcst.helpers import ModuleNameAndPackage from codeflash.discovery.functions_to_optimize import FunctionToOptimize + from codeflash.models.models import FunctionSource -from typing import List, Union class GlobalAssignmentCollector(cst.CSTVisitor): """Collects all global assignment statements.""" - def __init__(self): + def __init__(self) -> None: super().__init__() - self.assignments: Dict[str, cst.Assign] = {} - self.assignment_order: List[str] = [] + self.assignments: dict[str, cst.Assign] = {} + self.assignment_order: list[str] = [] # Track scope depth to identify global assignments self.scope_depth = 0 self.if_else_depth = 0 @@ -71,11 +73,11 @@ def visit_Assign(self, node: cst.Assign) -> Optional[bool]: class GlobalAssignmentTransformer(cst.CSTTransformer): """Transforms global assignments in the original file with those from the new file.""" - def __init__(self, new_assignments: Dict[str, cst.Assign], new_assignment_order: List[str]): + def __init__(self, new_assignments: dict[str, cst.Assign], new_assignment_order: list[str]) -> None: super().__init__() self.new_assignments = new_assignments self.new_assignment_order = new_assignment_order - self.processed_assignments: Set[str] = set() + self.processed_assignments: set[str] = set() self.scope_depth = 0 self.if_else_depth = 0 @@ -123,10 +125,11 @@ def leave_Module(self, original_node: cst.Module, updated_node: cst.Module) -> c new_statements = list(updated_node.body) # Find assignments to append - assignments_to_append = [] - for name in self.new_assignment_order: - if name not in self.processed_assignments and name in self.new_assignments: - assignments_to_append.append(self.new_assignments[name]) + assignments_to_append = [ + self.new_assignments[name] + for name in self.new_assignment_order + if name not in self.processed_assignments and name in self.new_assignments + ] if assignments_to_append: # Add a blank line before appending new assignments if needed @@ -135,20 +138,20 @@ def leave_Module(self, original_node: cst.Module, updated_node: cst.Module) -> c new_statements.pop() # Remove the Pass statement but keep the empty line # Add the new assignments - for assignment in assignments_to_append: - new_statements.append( - cst.SimpleStatementLine( - [assignment], - leading_lines=[cst.EmptyLine()] - ) - ) + new_statements.extend( + [ + cst.SimpleStatementLine([assignment], leading_lines=[cst.EmptyLine()]) + for assignment in assignments_to_append + ] + ) return updated_node.with_changes(body=new_statements) + class GlobalStatementCollector(cst.CSTVisitor): """Visitor that collects all global statements (excluding imports and functions/classes).""" - def __init__(self): + def __init__(self) -> None: super().__init__() self.global_statements = [] self.in_function_or_class = False @@ -181,7 +184,7 @@ def visit_SimpleStatementLine(self, node: cst.SimpleStatementLine) -> None: class LastImportFinder(cst.CSTVisitor): """Finds the position of the last import statement in the module.""" - def __init__(self): + def __init__(self) -> None: super().__init__() self.last_import_line = 0 self.current_line = 0 @@ -196,7 +199,7 @@ def visit_SimpleStatementLine(self, node: cst.SimpleStatementLine) -> None: class ImportInserter(cst.CSTTransformer): """Transformer that inserts global statements after the last import.""" - def __init__(self, global_statements: List[cst.SimpleStatementLine], last_import_line: int): + def __init__(self, global_statements: list[cst.SimpleStatementLine], last_import_line: int) -> None: super().__init__() self.global_statements = global_statements self.last_import_line = last_import_line @@ -211,7 +214,7 @@ def leave_SimpleStatementLine( # If we're right after the last import and haven't inserted yet if self.current_line == self.last_import_line and not self.inserted: self.inserted = True - return cst.Module(body=[updated_node] + self.global_statements) + return cst.Module(body=[updated_node, *self.global_statements]) return cst.Module(body=[updated_node]) @@ -225,7 +228,7 @@ def leave_Module(self, original_node: cst.Module, updated_node: cst.Module) -> c return updated_node -def extract_global_statements(source_code: str) -> List[cst.SimpleStatementLine]: +def extract_global_statements(source_code: str) -> list[cst.SimpleStatementLine]: """Extract global statements from source code.""" module = cst.parse_module(source_code) collector = GlobalStatementCollector() @@ -240,6 +243,7 @@ def find_last_import_line(target_code: str) -> int: module.visit(finder) return finder.last_import_line + class FutureAliasedImportTransformer(cst.CSTTransformer): def leave_ImportFrom( self, original_node: cst.ImportFrom, updated_node: cst.ImportFrom @@ -287,8 +291,7 @@ def add_global_assignments(src_module_code: str, dst_module_code: str) -> str: transformer = GlobalAssignmentTransformer(new_collector.assignments, new_collector.assignment_order) transformed_module = original_module.visit(transformer) - dst_module_code = transformed_module.code - return dst_module_code + return transformed_module.code def add_needed_imports_from_module( @@ -359,9 +362,10 @@ def add_needed_imports_from_module( def get_code(functions_to_optimize: list[FunctionToOptimize]) -> tuple[str | None, set[tuple[str, str]]]: - """Return the code for a function or methods in a Python module. functions_to_optimize is either a singleton - FunctionToOptimize instance, which represents either a function at the module level or a method of a class at the - module level, or it represents a list of methods of the same class. + """Return the code for a function or methods in a Python module. + + functions_to_optimize is either a singleton FunctionToOptimize instance, which represents either a function at the + module level or a method of a class at the module level, or it represents a list of methods of the same class. """ if ( not functions_to_optimize @@ -429,7 +433,7 @@ def find_target(node_list: list[ast.stmt], name_parts: tuple[str, str] | tuple[s return find_target(target.body, name_parts[1:]) - with open(file_path, encoding="utf8") as file: + with file_path.open(encoding="utf8") as file: source_code: str = file.read() try: module_node: ast.Module = ast.parse(source_code) diff --git a/codeflash/code_utils/code_replacer.py b/codeflash/code_utils/code_replacer.py index ccb935f42..eb367bdfa 100644 --- a/codeflash/code_utils/code_replacer.py +++ b/codeflash/code_utils/code_replacer.py @@ -8,7 +8,7 @@ import libcst as cst from codeflash.cli_cmds.console import logger -from codeflash.code_utils.code_extractor import add_needed_imports_from_module, add_global_assignments +from codeflash.code_utils.code_extractor import add_global_assignments, add_needed_imports_from_module from codeflash.models.models import FunctionParent if TYPE_CHECKING: @@ -82,7 +82,7 @@ def visit_ClassDef(self, node: cst.ClassDef) -> bool: return True - def leave_ClassDef(self, node: cst.ClassDef) -> None: + def leave_ClassDef(self, node: cst.ClassDef) -> None: # noqa: ARG002 if self.current_class: self.current_class = None @@ -104,7 +104,7 @@ def __init__( ) self.current_class = None - def visit_FunctionDef(self, node: cst.FunctionDef) -> bool: + def visit_FunctionDef(self, node: cst.FunctionDef) -> bool: # noqa: ARG002 return False def leave_FunctionDef(self, original_node: cst.FunctionDef, updated_node: cst.FunctionDef) -> cst.FunctionDef: @@ -133,7 +133,7 @@ def leave_ClassDef(self, original_node: cst.ClassDef, updated_node: cst.ClassDef ) return updated_node - def leave_Module(self, original_node: cst.Module, updated_node: cst.Module) -> cst.Module: + def leave_Module(self, original_node: cst.Module, updated_node: cst.Module) -> cst.Module: # noqa: ARG002 node = updated_node max_function_index = None class_index = None diff --git a/codeflash/code_utils/code_utils.py b/codeflash/code_utils/code_utils.py index 507e79f74..2a0ddc310 100644 --- a/codeflash/code_utils/code_utils.py +++ b/codeflash/code_utils/code_utils.py @@ -10,10 +10,14 @@ from codeflash.cli_cmds.console import logger + def encoded_tokens_len(s: str) -> int: - '''Function for returning the approximate length of the encoded tokens - It's an approximation of BPE encoding (https://cdn.openai.com/better-language-models/language_models_are_unsupervised_multitask_learners.pdf)''' - return int(len(s)*0.25) + """Return the approximate length of the encoded tokens. + + It's an approximation of BPE encoding (https://cdn.openai.com/better-language-models/language_models_are_unsupervised_multitask_learners.pdf). + """ + return int(len(s) * 0.25) + def get_qualified_name(module_name: str, full_qualified_name: str) -> str: if not full_qualified_name: diff --git a/codeflash/code_utils/config_parser.py b/codeflash/code_utils/config_parser.py index 79a39168b..7b6243a75 100644 --- a/codeflash/code_utils/config_parser.py +++ b/codeflash/code_utils/config_parser.py @@ -32,7 +32,8 @@ def find_pyproject_toml(config_file: Path | None = None) -> Path: def parse_config_file( - config_file_path: Path | None = None, override_formatter_check: bool = False + config_file_path: Path | None = None, + override_formatter_check: bool = False, # noqa: FBT001, FBT002 ) -> tuple[dict[str, Any], Path]: config_file_path = find_pyproject_toml(config_file_path) try: @@ -58,41 +59,40 @@ def parse_config_file( bool_keys = {"disable-telemetry": False, "disable-imports-sorting": False, "benchmark": False} list_str_keys = {"formatter-cmds": ["black $file"]} - for key in str_keys: + for key, default_value in str_keys.items(): if key in config: config[key] = str(config[key]) else: - config[key] = str_keys[key] - for key in bool_keys: + config[key] = default_value + for key, default_value in bool_keys.items(): if key in config: config[key] = bool(config[key]) else: - config[key] = bool_keys[key] + config[key] = default_value for key in path_keys: if key in config: config[key] = str((Path(config_file_path).parent / Path(config[key])).resolve()) - for key in list_str_keys: + for key, default_value in list_str_keys.items(): if key in config: config[key] = [str(cmd) for cmd in config[key]] else: - config[key] = list_str_keys[key] + config[key] = default_value for key in path_list_keys: if key in config: config[key] = [str((Path(config_file_path).parent / path).resolve()) for path in config[key]] - else: # Default to empty list + else: config[key] = [] assert config["test-framework"] in {"pytest", "unittest"}, ( "In pyproject.toml, Codeflash only supports the 'test-framework' as pytest and unittest." ) - if len(config["formatter-cmds"]) > 0: - #see if this is happening during GitHub actions setup - if not override_formatter_check: - assert config["formatter-cmds"][0] != "your-formatter $file", ( - "The formatter command is not set correctly in pyproject.toml. Please set the " - "formatter command in the 'formatter-cmds' key. More info - https://docs.codeflash.ai/configuration" - ) + # see if this is happening during GitHub actions setup + if len(config["formatter-cmds"]) > 0 and not override_formatter_check: + assert config["formatter-cmds"][0] != "your-formatter $file", ( + "The formatter command is not set correctly in pyproject.toml. Please set the " + "formatter command in the 'formatter-cmds' key. More info - https://docs.codeflash.ai/configuration" + ) for key in list(config.keys()): if "-" in key: config[key.replace("-", "_")] = config[key] diff --git a/codeflash/code_utils/env_utils.py b/codeflash/code_utils/env_utils.py index 9f303a4f6..41ef89351 100644 --- a/codeflash/code_utils/env_utils.py +++ b/codeflash/code_utils/env_utils.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os from functools import lru_cache from typing import Optional diff --git a/codeflash/code_utils/git_utils.py b/codeflash/code_utils/git_utils.py index 2982b05fa..8333c1099 100644 --- a/codeflash/code_utils/git_utils.py +++ b/codeflash/code_utils/git_utils.py @@ -21,7 +21,7 @@ from git import Repo -def get_git_diff(repo_directory: Path = Path.cwd(), uncommitted_changes: bool = False) -> dict[str, list[int]]: +def get_git_diff(repo_directory: Path = Path.cwd(), uncommitted_changes: bool = False) -> dict[str, list[int]]: # noqa: B008, FBT001, FBT002 repository = git.Repo(repo_directory, search_parent_directories=True) commit = repository.head.commit if uncommitted_changes: @@ -117,7 +117,7 @@ def confirm_proceeding_with_no_git_repo() -> str | bool: return True -def check_and_push_branch(repo: git.Repo, wait_for_push: bool = False) -> bool: +def check_and_push_branch(repo: git.Repo, wait_for_push: bool = False) -> bool: # noqa: FBT001, FBT002 current_branch = repo.active_branch.name origin = repo.remote(name="origin") diff --git a/codeflash/code_utils/github_utils.py b/codeflash/code_utils/github_utils.py index 2b053a326..0be50dda4 100644 --- a/codeflash/code_utils/github_utils.py +++ b/codeflash/code_utils/github_utils.py @@ -1,6 +1,6 @@ -from typing import Optional +from __future__ import annotations -from git import Repo +from typing import TYPE_CHECKING, Optional from codeflash.api.cfapi import is_github_app_installed_on_repo from codeflash.cli_cmds.cli_common import apologize_and_exit @@ -8,6 +8,9 @@ from codeflash.code_utils.compat import LF from codeflash.code_utils.git_utils import get_repo_owner_and_name +if TYPE_CHECKING: + from git import Repo + def get_github_secrets_page_url(repo: Optional[Repo] = None) -> str: owner, repo_name = get_repo_owner_and_name(repo) @@ -27,5 +30,6 @@ def require_github_app_or_exit(owner: str, repo: str) -> None: ) apologize_and_exit() + def github_pr_url(owner: str, repo: str, pr_number: str) -> str: return f"https://github.com/{owner}/{repo}/pull/{pr_number}" diff --git a/codeflash/code_utils/line_profile_utils.py b/codeflash/code_utils/line_profile_utils.py index 21768cf68..935e30356 100644 --- a/codeflash/code_utils/line_profile_utils.py +++ b/codeflash/code_utils/line_profile_utils.py @@ -1,19 +1,26 @@ -"""Adapted from line_profiler (https://github.com/pyutils/line_profiler) written by Enthought, Inc. (BSD License)""" +"""Adapted from line_profiler (https://github.com/pyutils/line_profiler) written by Enthought, Inc. (BSD License).""" + +from __future__ import annotations + from collections import defaultdict from pathlib import Path -from typing import Union +from typing import TYPE_CHECKING, Union import isort import libcst as cst from codeflash.code_utils.code_utils import get_run_tmp_file +if TYPE_CHECKING: + from codeflash.discovery.functions_to_optimize import FunctionToOptimize + from codeflash.models.models import CodeOptimizationContext + class LineProfilerDecoratorAdder(cst.CSTTransformer): """Transformer that adds a decorator to a function with a specific qualified name.""" - #TODO we don't support nested functions yet so they can only be inside classes, dont use qualified names, instead use the structure - def __init__(self, qualified_name: str, decorator_name: str): + # TODO we don't support nested functions yet so they can only be inside classes, dont use qualified names, instead use the structure + def __init__(self, qualified_name: str, decorator_name: str) -> None: """Initialize the transformer. Args: @@ -32,7 +39,7 @@ def visit_ClassDef(self, node: cst.ClassDef) -> None: # Track when we enter a class self.context_stack.append(node.name.value) - def leave_ClassDef(self, original_node: cst.ClassDef, updated_node: cst.ClassDef) -> cst.ClassDef: + def leave_ClassDef(self, original_node: cst.ClassDef, updated_node: cst.ClassDef) -> cst.ClassDef: # noqa: ARG002 # Pop the context when we leave a class self.context_stack.pop() return updated_node @@ -42,27 +49,20 @@ def visit_FunctionDef(self, node: cst.FunctionDef) -> None: self.context_stack.append(node.name.value) def leave_FunctionDef(self, original_node: cst.FunctionDef, updated_node: cst.FunctionDef) -> cst.FunctionDef: - function_name = original_node.name.value - # Check if the current context path matches our target qualified name - if self.context_stack==self.qualified_name_parts: + if self.context_stack == self.qualified_name_parts: # Check if the decorator is already present has_decorator = any( - self._is_target_decorator(decorator.decorator) - for decorator in original_node.decorators + self._is_target_decorator(decorator.decorator) for decorator in original_node.decorators ) # Only add the decorator if it's not already there if not has_decorator: - new_decorator = cst.Decorator( - decorator=cst.Name(value=self.decorator_name) - ) + new_decorator = cst.Decorator(decorator=cst.Name(value=self.decorator_name)) # Add our new decorator to the existing decorators - updated_decorators = [new_decorator] + list(updated_node.decorators) - updated_node = updated_node.with_changes( - decorators=tuple(updated_decorators) - ) + updated_decorators = [new_decorator, *list(updated_node.decorators)] + updated_node = updated_node.with_changes(decorators=tuple(updated_decorators)) # Pop the context when we leave a function self.context_stack.pop() @@ -76,8 +76,9 @@ def _is_target_decorator(self, decorator_node: Union[cst.Name, cst.Attribute, cs return decorator_node.func.value == self.decorator_name return False + class ProfileEnableTransformer(cst.CSTTransformer): - def __init__(self,filename): + def __init__(self, filename: str) -> None: # Flag to track if we found the import statement self.found_import = False # Track indentation of the import statement @@ -86,12 +87,14 @@ def __init__(self,filename): def leave_ImportFrom(self, original_node: cst.ImportFrom, updated_node: cst.ImportFrom) -> cst.ImportFrom: # Check if this is the line profiler import statement - if (isinstance(original_node.module, cst.Name) and - original_node.module.value == "line_profiler" and - any(name.name.value == "profile" and - (not name.asname or name.asname.name.value == "codeflash_line_profile") - for name in original_node.names)): - + if ( + isinstance(original_node.module, cst.Name) + and original_node.module.value == "line_profiler" + and any( + name.name.value == "profile" and (not name.asname or name.asname.name.value == "codeflash_line_profile") + for name in original_node.names + ) + ): self.found_import = True # Get the indentation from the original node if hasattr(original_node, "leading_lines"): @@ -100,7 +103,7 @@ def leave_ImportFrom(self, original_node: cst.ImportFrom, updated_node: cst.Impo return updated_node - def leave_Module(self, original_node: cst.Module, updated_node: cst.Module) -> cst.Module: + def leave_Module(self, original_node: cst.Module, updated_node: cst.Module) -> cst.Module: # noqa: ARG002 if not self.found_import: return updated_node @@ -112,22 +115,23 @@ def leave_Module(self, original_node: cst.Module, updated_node: cst.Module) -> c for i, stmt in enumerate(new_body): if isinstance(stmt, cst.SimpleStatementLine): for small_stmt in stmt.body: - if isinstance(small_stmt, cst.ImportFrom): - if (isinstance(small_stmt.module, cst.Name) and - small_stmt.module.value == "line_profiler" and - any(name.name.value == "profile" and - (not name.asname or name.asname.name.value == "codeflash_line_profile") - for name in small_stmt.names)): - import_index = i - break + if isinstance(small_stmt, cst.ImportFrom) and ( + isinstance(small_stmt.module, cst.Name) + and small_stmt.module.value == "line_profiler" + and any( + name.name.value == "profile" + and (not name.asname or name.asname.name.value == "codeflash_line_profile") + for name in small_stmt.names + ) + ): + import_index = i + break if import_index is not None: break if import_index is not None: # Create the new enable statement to insert after the import - enable_statement = cst.parse_statement( - f"codeflash_line_profile.enable(output_prefix='{self.filename}')" - ) + enable_statement = cst.parse_statement(f"codeflash_line_profile.enable(output_prefix='{self.filename}')") # Insert the new statement after the import statement new_body.insert(import_index + 1, enable_statement) @@ -135,26 +139,22 @@ def leave_Module(self, original_node: cst.Module, updated_node: cst.Module) -> c # Create a new module with the updated body return updated_node.with_changes(body=new_body) -def add_decorator_to_qualified_function(module, qualified_name, decorator_name): + +def add_decorator_to_qualified_function(module: cst.Module, qualified_name: str, decorator_name: str) -> cst.Module: """Add a decorator to a function with the exact qualified name in the source code. Args: - module: The Python source code as a string. + module: The Python source code as a CST module. qualified_name: The fully qualified name of the function to add the decorator to (e.g., "MyClass.nested_func.target_func"). decorator_name: The name of the decorator to add. Returns: - The modified source code as a string. + The modified CST module. """ - # Parse the source code into a CST - - # Apply our transformer transformer = LineProfilerDecoratorAdder(qualified_name, decorator_name) - modified_module = module.visit(transformer) + return module.visit(transformer) - # Convert the modified CST back to source code - return modified_module def add_profile_enable(original_code: str, line_profile_output_file: str) -> str: # TODO modify by using a libcst transformer @@ -165,11 +165,11 @@ def add_profile_enable(original_code: str, line_profile_output_file: str) -> str class ImportAdder(cst.CSTTransformer): - def __init__(self, import_statement): + def __init__(self, import_statement) -> None: # noqa: ANN001 self.import_statement = import_statement self.has_import = False - def leave_Module(self, original_node, updated_node): + def leave_Module(self, original_node, updated_node): # noqa: ANN001, ANN201, ARG002 # If the import is already there, don't add it again if self.has_import: return updated_node @@ -178,11 +178,9 @@ def leave_Module(self, original_node, updated_node): import_node = cst.parse_statement(self.import_statement) # Add the import to the module's body - return updated_node.with_changes( - body=[import_node] + list(updated_node.body) - ) + return updated_node.with_changes(body=[import_node, *list(updated_node.body)]) - def visit_ImportFrom(self, node): + def visit_ImportFrom(self, node) -> None: # noqa: ANN001 # Check if the profile is already imported from line_profiler if node.module and node.module.value == "line_profiler": for import_alias in node.names: @@ -190,17 +188,17 @@ def visit_ImportFrom(self, node): self.has_import = True -def add_decorator_imports(function_to_optimize, code_context): - """Adds a profile decorator to a function in a Python file and all its helper functions.""" - #self.function_to_optimize, file_path_to_helper_classes, self.test_cfg.tests_root - #grouped iteration, file to fns to optimize, from line_profiler import profile as codeflash_line_profile +def add_decorator_imports(function_to_optimize: FunctionToOptimize, code_context: CodeOptimizationContext) -> Path: + """Add a profile decorator to a function in a Python file and all its helper functions.""" + # self.function_to_optimize, file_path_to_helper_classes, self.test_cfg.tests_root + # grouped iteration, file to fns to optimize, from line_profiler import profile as codeflash_line_profile file_paths = defaultdict(list) line_profile_output_file = get_run_tmp_file(Path("baseline_lprof")) file_paths[function_to_optimize.file_path].append(function_to_optimize.qualified_name) for elem in code_context.helper_functions: file_paths[elem.file_path].append(elem.qualified_name) - for file_path,fns_present in file_paths.items(): - #open file + for file_path, fns_present in file_paths.items(): + # open file file_contents = file_path.read_text("utf-8") # parse to cst module_node = cst.parse_module(file_contents) @@ -214,10 +212,10 @@ def add_decorator_imports(function_to_optimize, code_context): module_node = module_node.visit(transformer) modified_code = isort.code(module_node.code, float_to_top=True) # write to file - with open(file_path, "w", encoding="utf-8") as file: + with file_path.open("w", encoding="utf-8") as file: file.write(modified_code) - #Adding profile.enable line for changing the savepath of the data, do this only for the main file and not the helper files + # Adding profile.enable line for changing the savepath of the data, do this only for the main file and not the helper files file_contents = function_to_optimize.file_path.read_text("utf-8") - modified_code = add_profile_enable(file_contents,str(line_profile_output_file)) - function_to_optimize.file_path.write_text(modified_code,"utf-8") + modified_code = add_profile_enable(file_contents, str(line_profile_output_file)) + function_to_optimize.file_path.write_text(modified_code, "utf-8") return line_profile_output_file diff --git a/codeflash/code_utils/shell_utils.py b/codeflash/code_utils/shell_utils.py index 760b4f9d3..dc1b1e647 100644 --- a/codeflash/code_utils/shell_utils.py +++ b/codeflash/code_utils/shell_utils.py @@ -1,10 +1,15 @@ +from __future__ import annotations + import os import re from pathlib import Path -from typing import Optional +from typing import TYPE_CHECKING, Optional from codeflash.code_utils.compat import LF -from codeflash.either import Failure, Result, Success +from codeflash.either import Failure, Success + +if TYPE_CHECKING: + from codeflash.either import Result if os.name == "nt": # Windows SHELL_RC_EXPORT_PATTERN = re.compile(r"^set CODEFLASH_API_KEY=(cf-.*)$", re.MULTILINE) @@ -17,7 +22,7 @@ def read_api_key_from_shell_config() -> Optional[str]: try: shell_rc_path = get_shell_rc_path() - with open(shell_rc_path, encoding="utf8") as shell_rc: + with open(shell_rc_path, encoding="utf8") as shell_rc: # noqa: PTH123 shell_contents = shell_rc.read() matches = SHELL_RC_EXPORT_PATTERN.findall(shell_contents) return matches[-1] if matches else None @@ -40,15 +45,14 @@ def get_api_key_export_line(api_key: str) -> str: return f"{SHELL_RC_EXPORT_PREFIX}{api_key}" -def save_api_key_to_rc(api_key) -> Result[str, str]: +def save_api_key_to_rc(api_key: str) -> Result[str, str]: shell_rc_path = get_shell_rc_path() api_key_line = get_api_key_export_line(api_key) try: - with open(shell_rc_path, "r+", encoding="utf8") as shell_file: + with open(shell_rc_path, "r+", encoding="utf8") as shell_file: # noqa: PTH123 shell_contents = shell_file.read() - if os.name == "nt": # on Windows, we're writing a batch file - if not shell_contents: - shell_contents = "@echo off" + if os.name == "nt" and not shell_contents: # on windows we're writing to a batch file + shell_contents = "@echo off" existing_api_key = read_api_key_from_shell_config() if existing_api_key: diff --git a/codeflash/code_utils/tabulate.py b/codeflash/code_utils/tabulate.py index c75dcd03e..bc42cd031 100644 --- a/codeflash/code_utils/tabulate.py +++ b/codeflash/code_utils/tabulate.py @@ -1,15 +1,18 @@ -"""Adapted from tabulate (https://github.com/astanin/python-tabulate) written by Sergey Astanin and contributors (MIT License)""" +"""Adapted from tabulate (https://github.com/astanin/python-tabulate) written by Sergey Astanin and contributors (MIT License).""" """Pretty-print tabular data.""" +# ruff: noqa +import dataclasses +import math +import re import warnings from collections import namedtuple from collections.abc import Iterable -from itertools import chain, zip_longest as izip_longest from functools import reduce -import re -import math -import dataclasses +from itertools import chain +from itertools import zip_longest as izip_longest + import wcwidth # optional wide-character (CJK) support __all__ = ["tabulate", "tabulate_formats"] @@ -32,12 +35,12 @@ # It is purposely an unprintable character, very unlikely to be used in a table SEPARATING_LINE = "\001" -Line = namedtuple("Line", ["begin", "hline", "sep", "end"]) +Line = namedtuple("Line", ["begin", "hline", "sep", "end"]) # noqa: PYI024 -DataRow = namedtuple("DataRow", ["begin", "sep", "end"]) +DataRow = namedtuple("DataRow", ["begin", "sep", "end"]) # noqa: PYI024 -TableFormat = namedtuple( +TableFormat = namedtuple( # noqa: PYI024 "TableFormat", [ "lineabove", @@ -59,8 +62,7 @@ def _is_separating_line_value(value): def _is_separating_line(row): row_type = type(row) is_sl = (row_type == list or row_type == str) and ( - (len(row) >= 1 and _is_separating_line_value(row[0])) - or (len(row) >= 2 and _is_separating_line_value(row[1])) + (len(row) >= 1 and _is_separating_line_value(row[0])) or (len(row) >= 2 and _is_separating_line_value(row[1])) ) return is_sl @@ -68,26 +70,28 @@ def _is_separating_line(row): def _pipe_segment_with_colons(align, colwidth): """Return a segment of a horizontal line with optional colons which - indicate column's alignment (as in `pipe` output format).""" + indicate column's alignment (as in `pipe` output format). + """ w = colwidth if align in {"right", "decimal"}: return ("-" * (w - 1)) + ":" - elif align == "center": + if align == "center": return ":" + ("-" * (w - 2)) + ":" - elif align == "left": + if align == "left": return ":" + ("-" * (w - 1)) - else: - return "-" * w + return "-" * w def _pipe_line_with_colons(colwidths, colaligns): """Return a horizontal line with optional colons to indicate column's - alignment (as in `pipe` output format).""" + alignment (as in `pipe` output format). + """ if not colaligns: # e.g. printing an empty data frame (github issue #15) colaligns = [""] * len(colwidths) segments = [_pipe_segment_with_colons(a, w) for a, w in zip(colaligns, colwidths)] return "|" + "|".join(segments) + "|" + _table_formats = { "simple": TableFormat( lineabove=Line("", "-", " ", ""), @@ -111,16 +115,12 @@ def _pipe_line_with_colons(colwidths, colaligns): ), } -tabulate_formats = list(sorted(_table_formats.keys())) +tabulate_formats = sorted(_table_formats.keys()) # The table formats for which multiline cells will be folded into subsequent # table rows. The key is the original format specified at the API. The value is # the format that will be used to represent the original format. -multiline_formats = { - "plain": "plain", - "pipe": "pipe", - -} +multiline_formats = {"plain": "plain", "pipe": "pipe"} _multiline_codes = re.compile(r"\r|\n|\r\n") _multiline_codes_bytes = re.compile(b"\r|\n|\r\n") @@ -152,9 +152,8 @@ def _pipe_line_with_colons(colwidths, colaligns): _ansi_codes_bytes = re.compile(_ansi_escape_pat.encode("utf8"), re.VERBOSE) _ansi_color_reset_code = "\033[0m" -_float_with_thousands_separators = re.compile( - r"^(([+-]?[0-9]{1,3})(?:,([0-9]{3}))*)?(?(1)\.[0-9]*|\.[0-9]+)?$" -) +_float_with_thousands_separators = re.compile(r"^(([+-]?[0-9]{1,3})(?:,([0-9]{3}))*)?(?(1)\.[0-9]*|\.[0-9]+)?$") + def _isnumber_with_thousands_separator(string): try: @@ -202,16 +201,12 @@ def _isint(string, inttype=int): (hasattr(string, "is_integer") or hasattr(string, "__array__")) and str(type(string)).startswith("= 0: - return len(string) - pos - 1 - else: - return -1 # no point - else: - return -1 # not a number + pos = string.rfind(".") + pos = string.lower().rfind("e") if pos < 0 else pos + if pos >= 0: + return len(string) - pos - 1 + return -1 # no point + return -1 # not a number def _padleft(width, s): @@ -281,8 +264,8 @@ def _padnone(ignore_width, s): def _strip_ansi(s): if isinstance(s, str): return _ansi_codes.sub(r"\4", s) - else: # a bytestring - return _ansi_codes_bytes.sub(r"\4", s) + # a bytestring + return _ansi_codes_bytes.sub(r"\4", s) def _visible_width(s): @@ -292,15 +275,14 @@ def _visible_width(s): len_fn = len if isinstance(s, (str, bytes)): return len_fn(_strip_ansi(s)) - else: - return len_fn(str(s)) + return len_fn(str(s)) def _is_multiline(s): if isinstance(s, str): return bool(re.search(_multiline_codes, s)) - else: # a bytestring - return bool(re.search(_multiline_codes_bytes, s)) + # a bytestring + return bool(re.search(_multiline_codes_bytes, s)) def _multiline_width(multiline_s, line_width_fn=len): @@ -384,65 +366,40 @@ def _align_column( is_multiline=False, preserve_whitespace=False, ): - strings, padfn = _align_column_choose_padfn( - strings, alignment, has_invisible, preserve_whitespace - ) - width_fn = _align_column_choose_width_fn( - has_invisible, enable_widechars, is_multiline - ) + strings, padfn = _align_column_choose_padfn(strings, alignment, has_invisible, preserve_whitespace) + width_fn = _align_column_choose_width_fn(has_invisible, enable_widechars, is_multiline) s_widths = list(map(width_fn, strings)) maxwidth = max(max(_flat_list(s_widths)), minwidth) # TODO: refactor column alignment in single-line and multiline modes if is_multiline: if not enable_widechars and not has_invisible: - padded_strings = [ - "\n".join([padfn(maxwidth, s) for s in ms.splitlines()]) - for ms in strings - ] + padded_strings = ["\n".join([padfn(maxwidth, s) for s in ms.splitlines()]) for ms in strings] else: # enable wide-character width corrections s_lens = [[len(s) for s in re.split("[\r\n]", ms)] for ms in strings] - visible_widths = [ - [maxwidth - (w - l) for w, l in zip(mw, ml)] - for mw, ml in zip(s_widths, s_lens) - ] + visible_widths = [[maxwidth - (w - l) for w, l in zip(mw, ml)] for mw, ml in zip(s_widths, s_lens)] # wcswidth and _visible_width don't count invisible characters; # padfn doesn't need to apply another correction padded_strings = [ "\n".join([padfn(w, s) for s, w in zip((ms.splitlines() or ms), mw)]) for ms, mw in zip(strings, visible_widths) ] - else: # single-line cell values - if not enable_widechars and not has_invisible: - padded_strings = [padfn(maxwidth, s) for s in strings] - else: - # enable wide-character width corrections - s_lens = list(map(len, strings)) - visible_widths = [maxwidth - (w - l) for w, l in zip(s_widths, s_lens)] - # wcswidth and _visible_width don't count invisible characters; - # padfn doesn't need to apply another correction - padded_strings = [padfn(w, s) for s, w in zip(strings, visible_widths)] + elif not enable_widechars and not has_invisible: + padded_strings = [padfn(maxwidth, s) for s in strings] + else: + # enable wide-character width corrections + s_lens = list(map(len, strings)) + visible_widths = [maxwidth - (w - l) for w, l in zip(s_widths, s_lens)] + # wcswidth and _visible_width don't count invisible characters; + # padfn doesn't need to apply another correction + padded_strings = [padfn(w, s) for s, w in zip(strings, visible_widths)] return padded_strings def _more_generic(type1, type2): - types = { - type(None): 0, - bool: 1, - int: 2, - float: 3, - bytes: 4, - str: 5, - } - invtypes = { - 5: str, - 4: bytes, - 3: float, - 2: int, - 1: bool, - 0: type(None), - } + types = {type(None): 0, bool: 1, int: 2, float: 3, bytes: 4, str: 5} + invtypes = {5: str, 4: bytes, 3: float, 2: int, 1: bool, 0: type(None)} moregeneric = max(types.get(type1, 5), types.get(type2, 5)) return invtypes[moregeneric] @@ -460,26 +417,20 @@ def _format(val, valtype, floatfmt, intfmt, missingval="", has_invisible=True): if valtype is str: return f"{val}" - elif valtype is int: + if valtype is int: if isinstance(val, str): val_striped = val.encode("unicode_escape").decode("utf-8") - colored = re.search( - r"(\\[xX]+[0-9a-fA-F]+\[\d+[mM]+)([0-9.]+)(\\.*)$", val_striped - ) + colored = re.search(r"(\\[xX]+[0-9a-fA-F]+\[\d+[mM]+)([0-9.]+)(\\.*)$", val_striped) if colored: total_groups = len(colored.groups()) if total_groups == 3: digits = colored.group(2) if digits.isdigit(): - val_new = ( - colored.group(1) - + format(int(digits), intfmt) - + colored.group(3) - ) + val_new = colored.group(1) + format(int(digits), intfmt) + colored.group(3) val = val_new.encode("utf-8").decode("unicode_escape") intfmt = "" return format(val, intfmt) - elif valtype is bytes: + if valtype is bytes: try: return str(val, "ascii") except (TypeError, UnicodeDecodeError): @@ -490,35 +441,29 @@ def _format(val, valtype, floatfmt, intfmt, missingval="", has_invisible=True): raw_val = _strip_ansi(val) formatted_val = format(float(raw_val), floatfmt) return val.replace(raw_val, formatted_val) - else: - if isinstance(val, str) and "," in val: - val = val.replace(",", "") # handle thousands-separators - return format(float(val), floatfmt) + if isinstance(val, str) and "," in val: + val = val.replace(",", "") # handle thousands-separators + return format(float(val), floatfmt) else: return f"{val}" -def _align_header( - header, alignment, width, visible_width, is_multiline=False, width_fn=None -): - "Pad string header to width chars given known visible_width of the header." +def _align_header(header, alignment, width, visible_width, is_multiline=False, width_fn=None): + """Pad string header to width chars given known visible_width of the header.""" if is_multiline: header_lines = re.split(_multiline_codes, header) - padded_lines = [ - _align_header(h, alignment, width, width_fn(h)) for h in header_lines - ] + padded_lines = [_align_header(h, alignment, width, width_fn(h)) for h in header_lines] return "\n".join(padded_lines) # else: not multiline ninvisible = len(header) - visible_width width += ninvisible if alignment == "left": return _padright(width, header) - elif alignment == "center": + if alignment == "center": return _padboth(width, header) - elif not alignment: + if not alignment: return f"{header}" - else: - return _padleft(width, header) + return _padleft(width, header) def _remove_separating_lines(rows): @@ -531,11 +476,11 @@ def _remove_separating_lines(rows): else: sans_rows.append(row) return sans_rows, separating_lines - else: - return rows, None + return rows, None + def _bool(val): - "A wrapper around standard bool() which doesn't throw on NumPy arrays" + """A wrapper around standard bool() which doesn't throw on NumPy arrays""" try: return bool(val) except ValueError: # val is likely to be a numpy array with many elements @@ -556,23 +501,18 @@ def _normalize_tabular_data(tabular_data, headers, showindex="default"): index = None if hasattr(tabular_data, "keys") and hasattr(tabular_data, "values"): # dict-like and pandas.DataFrame? - if hasattr(tabular_data.values, "__call__"): + if callable(tabular_data.values): # likely a conventional dict keys = tabular_data.keys() try: - rows = list( - izip_longest(*tabular_data.values()) - ) # columns have to be transposed + rows = list(izip_longest(*tabular_data.values())) # columns have to be transposed except TypeError: # not iterable raise TypeError(err_msg) elif hasattr(tabular_data, "index"): # values is a property, has .index => it's likely a pandas.DataFrame (pandas 0.11.0) keys = list(tabular_data) - if ( - showindex in {"default", "always", True} - and tabular_data.index.name is not None - ): + if showindex in {"default", "always", True} and tabular_data.index.name is not None: if isinstance(tabular_data.index.name, list): keys[:0] = tabular_data.index.name else: @@ -596,19 +536,10 @@ def _normalize_tabular_data(tabular_data, headers, showindex="default"): if headers == "keys" and not rows: # an empty table (issue #81) headers = [] - elif ( - headers == "keys" - and hasattr(tabular_data, "dtype") - and getattr(tabular_data.dtype, "names") - ): + elif headers == "keys" and hasattr(tabular_data, "dtype") and tabular_data.dtype.names: # numpy record array headers = tabular_data.dtype.names - elif ( - headers == "keys" - and len(rows) > 0 - and isinstance(rows[0], tuple) - and hasattr(rows[0], "_fields") - ): + elif headers == "keys" and len(rows) > 0 and isinstance(rows[0], tuple) and hasattr(rows[0], "_fields"): # namedtuple headers = list(map(str, rows[0]._fields)) elif len(rows) > 0 and hasattr(rows[0], "keys") and hasattr(rows[0], "values"): @@ -639,9 +570,7 @@ def _normalize_tabular_data(tabular_data, headers, showindex="default"): else: headers = [] elif headers: - raise ValueError( - "headers for a list of dicts is not a dict or a keyword" - ) + raise ValueError("headers for a list of dicts is not a dict or a keyword") rows = [[row.get(k) for k in keys] for row in rows] elif ( @@ -654,11 +583,7 @@ def _normalize_tabular_data(tabular_data, headers, showindex="default"): # print tabulate(cursor, headers='keys') headers = [column[0] for column in tabular_data.description] - elif ( - dataclasses is not None - and len(rows) > 0 - and dataclasses.is_dataclass(rows[0]) - ): + elif dataclasses is not None and len(rows) > 0 and dataclasses.is_dataclass(rows[0]): # Python's dataclass field_names = [field.name for field in dataclasses.fields(rows[0])] if headers == "keys": @@ -698,6 +623,7 @@ def _normalize_tabular_data(tabular_data, headers, showindex="default"): return rows, headers, headers_pad + def _to_str(s, encoding="utf8", errors="ignore"): if isinstance(s, bytes): return s.decode(encoding=encoding, errors=errors) @@ -727,9 +653,7 @@ def tabulate( if tabular_data is None: tabular_data = [] - list_of_lists, headers, headers_pad = _normalize_tabular_data( - tabular_data, headers, showindex=showindex - ) + list_of_lists, headers, headers_pad = _normalize_tabular_data(tabular_data, headers, showindex=showindex) list_of_lists, separating_lines = _remove_separating_lines(list_of_lists) # PrettyTable formatting does not use any extra padding. @@ -771,11 +695,7 @@ def tabulate( has_invisible = _ansi_codes.search(plain_text) is not None enable_widechars = wcwidth is not None and WIDE_CHARS_MODE - if ( - not isinstance(tablefmt, TableFormat) - and tablefmt in multiline_formats - and _is_multiline(plain_text) - ): + if not isinstance(tablefmt, TableFormat) and tablefmt in multiline_formats and _is_multiline(plain_text): tablefmt = multiline_formats.get(tablefmt, tablefmt) is_multiline = True else: @@ -787,17 +707,13 @@ def tabulate( numparses = _expand_numparse(disable_numparse, len(cols)) coltypes = [_column_type(col, numparse=np) for col, np in zip(cols, numparses)] if isinstance(floatfmt, str): # old version - float_formats = len(cols) * [ - floatfmt - ] # just duplicate the string to use in each column + float_formats = len(cols) * [floatfmt] # just duplicate the string to use in each column else: # if floatfmt is list, tuple etc we have one per column float_formats = list(floatfmt) if len(float_formats) < len(cols): float_formats.extend((len(cols) - len(float_formats)) * [_DEFAULT_FLOATFMT]) if isinstance(intfmt, str): # old version - int_formats = len(cols) * [ - intfmt - ] # just duplicate the string to use in each column + int_formats = len(cols) * [intfmt] # just duplicate the string to use in each column else: # if intfmt is list, tuple etc we have one per column int_formats = list(intfmt) if len(int_formats) < len(cols): @@ -810,9 +726,7 @@ def tabulate( missing_vals.extend((len(cols) - len(missing_vals)) * [_DEFAULT_MISSINGVAL]) cols = [ [_format(v, ct, fl_fmt, int_fmt, miss_v, has_invisible) for v in c] - for c, ct, fl_fmt, int_fmt, miss_v in zip( - cols, coltypes, float_formats, int_formats, missing_vals - ) + for c, ct, fl_fmt, int_fmt, miss_v in zip(cols, coltypes, float_formats, int_formats, missing_vals) ] # align columns @@ -833,26 +747,16 @@ def tabulate( for idx, align in enumerate(colalign): if not idx < len(aligns): break - elif align != "global": + if align != "global": aligns[idx] = align - minwidths = ( - [width_fn(h) + min_padding for h in headers] if headers else [0] * len(cols) - ) + minwidths = [width_fn(h) + min_padding for h in headers] if headers else [0] * len(cols) aligns_copy = aligns.copy() # Reset alignments in copy of alignments list to "left" for 'colon_grid' format, # which enforces left alignment in the text output of the data. if tablefmt == "colon_grid": aligns_copy = ["left"] * len(cols) cols = [ - _align_column( - c, - a, - minw, - has_invisible, - enable_widechars, - is_multiline, - preserve_whitespace, - ) + _align_column(c, a, minw, has_invisible, enable_widechars, is_multiline, preserve_whitespace) for c, a, minw in zip(cols, aligns_copy, minwidths) ] @@ -879,14 +783,11 @@ def tabulate( hidx = headers_pad + idx if not hidx < len(aligns_headers): break - elif align == "same" and hidx < len(aligns): # same as column align + if align == "same" and hidx < len(aligns): # same as column align aligns_headers[hidx] = aligns[hidx] elif align != "global": aligns_headers[hidx] = align - minwidths = [ - max(minw, max(width_fn(cl) for cl in c)) - for minw, c in zip(minwidths, t_cols) - ] + minwidths = [max(minw, max(width_fn(cl) for cl in c)) for minw, c in zip(minwidths, t_cols)] headers = [ _align_header(h, a, minw, width_fn(h), is_multiline, width_fn) for h, a, minw in zip(headers, aligns_headers, minwidths) @@ -901,16 +802,7 @@ def tabulate( ra_default = rowalign if isinstance(rowalign, str) else None rowaligns = _expand_iterable(rowalign, len(rows), ra_default) - return _format_table( - tablefmt, - headers, - aligns_headers, - rows, - minwidths, - aligns, - is_multiline, - rowaligns=rowaligns, - ) + return _format_table(tablefmt, headers, aligns_headers, rows, minwidths, aligns, is_multiline, rowaligns=rowaligns) def _expand_numparse(disable_numparse, column_count): @@ -919,15 +811,13 @@ def _expand_numparse(disable_numparse, column_count): for index in disable_numparse: numparses[index] = False return numparses - else: - return [not disable_numparse] * column_count + return [not disable_numparse] * column_count def _expand_iterable(original, num_desired, default): if isinstance(original, Iterable) and not isinstance(original, str): return original + [default] * (num_desired - len(original)) - else: - return [default] * num_desired + return [default] * num_desired def _pad_row(cells, padding): @@ -937,8 +827,7 @@ def _pad_row(cells, padding): pad = " " * padding padded_cells = [pad + cell + pad for cell in cells] return padded_cells - else: - return cells + return cells def _build_simple_row(padded_cells, rowfmt): @@ -949,35 +838,34 @@ def _build_simple_row(padded_cells, rowfmt): def _build_row(padded_cells, colwidths, colaligns, rowfmt): if not rowfmt: return None - if hasattr(rowfmt, "__call__"): + if callable(rowfmt): return rowfmt(padded_cells, colwidths, colaligns) - else: - return _build_simple_row(padded_cells, rowfmt) + return _build_simple_row(padded_cells, rowfmt) + def _append_basic_row(lines, padded_cells, colwidths, colaligns, rowfmt, rowalign=None): # NOTE: rowalign is ignored and exists for api compatibility with _append_multiline_row lines.append(_build_row(padded_cells, colwidths, colaligns, rowfmt)) return lines + def _build_line(colwidths, colaligns, linefmt): - "Return a string which represents a horizontal line." + """Return a string which represents a horizontal line.""" if not linefmt: return None - if hasattr(linefmt, "__call__"): + if callable(linefmt): return linefmt(colwidths, colaligns) - else: - begin, fill, sep, end = linefmt - cells = [fill * w for w in colwidths] - return _build_simple_row(cells, (begin, sep, end)) + begin, fill, sep, end = linefmt + cells = [fill * w for w in colwidths] + return _build_simple_row(cells, (begin, sep, end)) def _append_line(lines, colwidths, colaligns, linefmt): lines.append(_build_line(colwidths, colaligns, linefmt)) return lines -def _format_table( - fmt, headers, headersaligns, rows, colwidths, colaligns, is_multiline, rowaligns -): + +def _format_table(fmt, headers, headersaligns, rows, colwidths, colaligns, is_multiline, rowaligns): lines = [] hidden = fmt.with_header_hide if (headers and fmt.with_header_hide) else [] pad = fmt.padding @@ -1001,31 +889,13 @@ def _format_table( # initial rows with a line below for row, ralign in zip(rows[:-1], rowaligns): if row != SEPARATING_LINE: - append_row( - lines, - pad_row(row, pad), - padded_widths, - colaligns, - fmt.datarow, - rowalign=ralign, - ) + append_row(lines, pad_row(row, pad), padded_widths, colaligns, fmt.datarow, rowalign=ralign) _append_line(lines, padded_widths, colaligns, fmt.linebetweenrows) # the last row without a line below - append_row( - lines, - pad_row(rows[-1], pad), - padded_widths, - colaligns, - fmt.datarow, - rowalign=rowaligns[-1], - ) + append_row(lines, pad_row(rows[-1], pad), padded_widths, colaligns, fmt.datarow, rowalign=rowaligns[-1]) else: separating_line = ( - fmt.linebetweenrows - or fmt.linebelowheader - or fmt.linebelow - or fmt.lineabove - or Line("", "", "", "") + fmt.linebetweenrows or fmt.linebelowheader or fmt.linebelow or fmt.lineabove or Line("", "", "", "") ) for row in rows: # test to see if either the 1st column or the 2nd column (account for showindex) has @@ -1033,9 +903,7 @@ def _format_table( if _is_separating_line(row): _append_line(lines, padded_widths, colaligns, separating_line) else: - append_row( - lines, pad_row(row, pad), padded_widths, colaligns, fmt.datarow - ) + append_row(lines, pad_row(row, pad), padded_widths, colaligns, fmt.datarow) if fmt.linebelow and "linebelow" not in hidden: _append_line(lines, padded_widths, colaligns, fmt.linebelow) @@ -1043,5 +911,5 @@ def _format_table( if headers or rows: output = "\n".join(lines) return output - else: # a completely empty table - return "" + # a completely empty table + return "" diff --git a/codeflash/code_utils/time_utils.py b/codeflash/code_utils/time_utils.py index ad3b5f642..aaf74fc93 100644 --- a/codeflash/code_utils/time_utils.py +++ b/codeflash/code_utils/time_utils.py @@ -24,14 +24,14 @@ def humanize_runtime(time_in_ns: int) -> str: runtime_human = "%.3g" % (time_micro / (1000**2)) elif units in {"minutes", "minute"}: runtime_human = "%.3g" % (time_micro / (60 * 1000**2)) - elif units in {"hour", "hours"}: #hours + elif units in {"hour", "hours"}: # hours runtime_human = "%.3g" % (time_micro / (3600 * 1000**2)) - else: #days - runtime_human = "%.3g" % (time_micro / (24*3600 * 1000**2)) + else: # days + runtime_human = "%.3g" % (time_micro / (24 * 3600 * 1000**2)) runtime_human_parts = str(runtime_human).split(".") if len(runtime_human_parts[0]) == 1: - if runtime_human_parts[0]=='1' and len(runtime_human_parts)>1: - units = units+'s' + if runtime_human_parts[0] == "1" and len(runtime_human_parts) > 1: + units = units + "s" if len(runtime_human_parts) == 1: runtime_human = f"{runtime_human_parts[0]}.00" elif len(runtime_human_parts[1]) >= 2: diff --git a/codeflash/context/code_context_extractor.py b/codeflash/context/code_context_extractor.py index bf55c7575..0f97a983c 100644 --- a/codeflash/context/code_context_extractor.py +++ b/codeflash/context/code_context_extractor.py @@ -3,18 +3,18 @@ import os from collections import defaultdict from itertools import chain -from pathlib import Path +from pathlib import Path # noqa: TC003 import jedi import libcst as cst -from jedi.api.classes import Name -from libcst import CSTNode +from jedi.api.classes import Name # noqa: TC002 +from libcst import CSTNode # noqa: TC002 from codeflash.cli_cmds.console import logger from codeflash.code_utils.code_extractor import add_needed_imports_from_module, find_preexisting_objects -from codeflash.code_utils.code_utils import get_qualified_name, path_belongs_to_site_packages, encoded_tokens_len +from codeflash.code_utils.code_utils import encoded_tokens_len, get_qualified_name, path_belongs_to_site_packages from codeflash.context.unused_definition_remover import remove_unused_definitions_by_function_names -from codeflash.discovery.functions_to_optimize import FunctionToOptimize +from codeflash.discovery.functions_to_optimize import FunctionToOptimize # noqa: TC001 from codeflash.models.models import ( CodeContextType, CodeOptimizationContext, @@ -136,13 +136,13 @@ def extract_code_string_context_from_files( helpers_of_fto: dict[Path, set[FunctionSource]], helpers_of_helpers: dict[Path, set[FunctionSource]], project_root_path: Path, - remove_docstrings: bool = False, + remove_docstrings: bool = False, # noqa: FBT001, FBT002 code_context_type: CodeContextType = CodeContextType.READ_ONLY, ) -> CodeString: """Extract code context from files containing target functions and their helpers. This function processes two sets of files: 1. Files containing the function to optimize (fto) and their first-degree helpers - 2. Files containing only helpers of helpers (with no overlap with the first set) + 2. Files containing only helpers of helpers (with no overlap with the first set). For each file, it extracts relevant code based on the specified context type, adds necessary imports, and combines them. @@ -157,15 +157,15 @@ def extract_code_string_context_from_files( Returns: CodeString containing the extracted code context with necessary imports - """ + """ # noqa: D205 # Rearrange to remove overlaps, so we only access each file path once helpers_of_helpers_no_overlap = defaultdict(set) - for file_path in helpers_of_helpers: + for file_path, function_sources in helpers_of_helpers.items(): if file_path in helpers_of_fto: # Remove duplicates within the same file path, in case a helper of helper is also a helper of fto helpers_of_helpers[file_path] -= helpers_of_fto[file_path] else: - helpers_of_helpers_no_overlap[file_path] = helpers_of_helpers[file_path] + helpers_of_helpers_no_overlap[file_path] = function_sources final_code_string_context = "" @@ -242,7 +242,7 @@ def extract_code_markdown_context_from_files( helpers_of_fto: dict[Path, set[FunctionSource]], helpers_of_helpers: dict[Path, set[FunctionSource]], project_root_path: Path, - remove_docstrings: bool = False, + remove_docstrings: bool = False, # noqa: FBT001, FBT002 code_context_type: CodeContextType = CodeContextType.READ_ONLY, ) -> CodeStringsMarkdown: """Extract code context from files containing target functions and their helpers, formatting them as markdown. @@ -268,12 +268,12 @@ def extract_code_markdown_context_from_files( """ # Rearrange to remove overlaps, so we only access each file path once helpers_of_helpers_no_overlap = defaultdict(set) - for file_path in helpers_of_helpers: + for file_path, function_sources in helpers_of_helpers.items(): if file_path in helpers_of_fto: # Remove duplicates within the same file path, in case a helper of helper is also a helper of fto helpers_of_helpers[file_path] -= helpers_of_fto[file_path] else: - helpers_of_helpers_no_overlap[file_path] = helpers_of_helpers[file_path] + helpers_of_helpers_no_overlap[file_path] = function_sources code_context_markdown = CodeStringsMarkdown() # Extract code from file paths that contain fto and first degree helpers. helpers of helpers may also be included if they are in the same files for file_path, function_sources in helpers_of_fto.items(): @@ -370,7 +370,7 @@ def get_function_to_optimize_as_function_source( and name.full_name.startswith(name.module_name) and get_qualified_name(name.module_name, name.full_name) == function_to_optimize.qualified_name ): - function_source = FunctionSource( + return FunctionSource( file_path=function_to_optimize.file_path, qualified_name=function_to_optimize.qualified_name, fully_qualified_name=name.full_name, @@ -378,12 +378,11 @@ def get_function_to_optimize_as_function_source( source_code=name.get_line_code(), jedi_definition=name, ) - return function_source - except Exception as e: + except Exception as e: # noqa: PERF203 logger.exception(f"Error while getting function source: {e}") continue raise ValueError( - f"Could not find function {function_to_optimize.function_name} in {function_to_optimize.file_path}" + f"Could not find function {function_to_optimize.function_name} in {function_to_optimize.file_path}" # noqa: EM102 ) @@ -405,7 +404,7 @@ def get_function_sources_from_jedi( for name in names: try: definitions: list[Name] = name.goto(follow_imports=True, follow_builtin_imports=False) - except Exception: # noqa: BLE001 + except Exception: logger.debug(f"Error while getting definitions for {qualified_function_name}") definitions = [] if definitions: @@ -448,13 +447,13 @@ def is_dunder_method(name: str) -> bool: def get_section_names(node: cst.CSTNode) -> list[str]: - """Returns the section attribute names (e.g., body, orelse) for a given node if they exist.""" + """Returns the section attribute names (e.g., body, orelse) for a given node if they exist.""" # noqa: D401 possible_sections = ["body", "orelse", "finalbody", "handlers"] return [sec for sec in possible_sections if hasattr(node, sec)] def remove_docstring_from_body(indented_block: cst.IndentedBlock) -> cst.CSTNode: - """Removes the docstring from an indented block if it exists""" + """Removes the docstring from an indented block if it exists.""" # noqa: D401 if not isinstance(indented_block.body[0], cst.SimpleStatementLine): return indented_block first_stmt = indented_block.body[0].body[0] @@ -467,8 +466,8 @@ def parse_code_and_prune_cst( code: str, code_context_type: CodeContextType, target_functions: set[str], - helpers_of_helper_functions: set[str] = set(), - remove_docstrings: bool = False, + helpers_of_helper_functions: set[str] = set(), # noqa: B006 + remove_docstrings: bool = False, # noqa: FBT001, FBT002 ) -> str: """Create a read-only version of the code by parsing and filtering the code to keep only class contextual information, and other module scoped variables.""" module = cst.parse_module(code) @@ -483,7 +482,7 @@ def parse_code_and_prune_cst( module, target_functions, helpers_of_helper_functions, remove_docstrings=remove_docstrings ) else: - raise ValueError(f"Unknown code_context_type: {code_context_type}") + raise ValueError(f"Unknown code_context_type: {code_context_type}") # noqa: EM102 if not found_target: raise ValueError("No target functions found in the provided code") @@ -492,7 +491,7 @@ def parse_code_and_prune_cst( return "" -def prune_cst_for_read_writable_code( +def prune_cst_for_read_writable_code( # noqa: PLR0911 node: cst.CSTNode, target_functions: set[str], prefix: str = "" ) -> tuple[cst.CSTNode | None, bool]: """Recursively filter the node and its children to build the read-writable codeblock. This contains nodes that lead to target functions. @@ -518,7 +517,7 @@ def prune_cst_for_read_writable_code( return None, False # Assuming always an IndentedBlock if not isinstance(node.body, cst.IndentedBlock): - raise ValueError("ClassDef body is not an IndentedBlock") + raise ValueError("ClassDef body is not an IndentedBlock") # noqa: TRY004 class_prefix = f"{prefix}.{node.name.value}" if prefix else node.name.value new_body = [] found_target = False @@ -572,14 +571,14 @@ def prune_cst_for_read_writable_code( return (node.with_changes(**updates) if updates else node), True -def prune_cst_for_read_only_code( +def prune_cst_for_read_only_code( # noqa: PLR0911 node: cst.CSTNode, target_functions: set[str], helpers_of_helper_functions: set[str], prefix: str = "", - remove_docstrings: bool = False, + remove_docstrings: bool = False, # noqa: FBT001, FBT002 ) -> tuple[cst.CSTNode | None, bool]: - """Recursively filter the node for read-only context: + """Recursively filter the node for read-only context. Returns: (filtered_node, found_target): @@ -611,7 +610,7 @@ def prune_cst_for_read_only_code( return None, False # Assuming always an IndentedBlock if not isinstance(node.body, cst.IndentedBlock): - raise ValueError("ClassDef body is not an IndentedBlock") + raise ValueError("ClassDef body is not an IndentedBlock") # noqa: TRY004 class_prefix = f"{prefix}.{node.name.value}" if prefix else node.name.value @@ -676,14 +675,14 @@ def prune_cst_for_read_only_code( return None, False -def prune_cst_for_testgen_code( +def prune_cst_for_testgen_code( # noqa: PLR0911 node: cst.CSTNode, target_functions: set[str], helpers_of_helper_functions: set[str], prefix: str = "", - remove_docstrings: bool = False, + remove_docstrings: bool = False, # noqa: FBT001, FBT002 ) -> tuple[cst.CSTNode | None, bool]: - """Recursively filter the node for testgen context: + """Recursively filter the node for testgen context. Returns: (filtered_node, found_target): @@ -716,7 +715,7 @@ def prune_cst_for_testgen_code( return None, False # Assuming always an IndentedBlock if not isinstance(node.body, cst.IndentedBlock): - raise ValueError("ClassDef body is not an IndentedBlock") + raise ValueError("ClassDef body is not an IndentedBlock") # noqa: TRY004 class_prefix = f"{prefix}.{node.name.value}" if prefix else node.name.value diff --git a/codeflash/context/unused_definition_remover.py b/codeflash/context/unused_definition_remover.py index bfcbbaead..86835e128 100644 --- a/codeflash/context/unused_definition_remover.py +++ b/codeflash/context/unused_definition_remover.py @@ -1,6 +1,7 @@ from __future__ import annotations from dataclasses import dataclass, field +from typing import Optional import libcst as cst @@ -35,7 +36,9 @@ def extract_names_from_targets(target: cst.CSTNode) -> list[str]: return names -def collect_top_level_definitions(node: cst.CSTNode, definitions: dict[str, UsageInfo] = None) -> dict[str, UsageInfo]: +def collect_top_level_definitions( + node: cst.CSTNode, definitions: Optional[dict[str, UsageInfo]] = None +) -> dict[str, UsageInfo]: """Recursively collect all top-level variable, function, and class definitions.""" if definitions is None: definitions = {} @@ -142,13 +145,13 @@ def visit_FunctionDef(self, node: cst.FunctionDef) -> None: self.function_depth += 1 def _collect_annotation_dependencies(self, annotation: cst.Annotation) -> None: - """Extract dependencies from type annotations""" + """Extract dependencies from type annotations.""" if hasattr(annotation, "annotation"): # Extract names from annotation (could be Name, Attribute, Subscript, etc.) self._extract_names_from_annotation(annotation.annotation) def _extract_names_from_annotation(self, node: cst.CSTNode) -> None: - """Extract names from a type annotation node""" + """Extract names from a type annotation node.""" # Simple name reference like 'int', 'str', or custom type if isinstance(node, cst.Name): name = node.value @@ -170,7 +173,7 @@ def _extract_names_from_annotation(self, node: cst.CSTNode) -> None: self._extract_names_from_annotation(node.value) # No need to check the attribute name itself as it's likely not a top-level definition - def leave_FunctionDef(self, original_node: cst.FunctionDef) -> None: + def leave_FunctionDef(self, original_node: cst.FunctionDef) -> None: # noqa: ARG002 self.function_depth -= 1 if self.function_depth == 0 and self.class_depth == 0: @@ -187,7 +190,7 @@ def visit_ClassDef(self, node: cst.ClassDef) -> None: self.class_depth += 1 - def leave_ClassDef(self, original_node: cst.ClassDef) -> None: + def leave_ClassDef(self, original_node: cst.ClassDef) -> None: # noqa: ARG002 self.class_depth -= 1 if self.class_depth == 0: @@ -210,7 +213,7 @@ def visit_Assign(self, node: cst.Assign) -> None: # Use the first tracked name as the current top-level name (for dependency tracking) self.current_top_level_name = tracked_names[0] - def leave_Assign(self, original_node: cst.Assign) -> None: + def leave_Assign(self, original_node: cst.Assign) -> None: # noqa: ARG002 if self.processing_variable: self.processing_variable = False self.current_variable_names.clear() @@ -302,8 +305,8 @@ def mark_as_used_recursively(self, name: str) -> None: self.mark_as_used_recursively(dep) -def remove_unused_definitions_recursively( - node: cst.CSTNode, definitions: dict[str, UsageInfo] +def remove_unused_definitions_recursively( # noqa: PLR0911 + node: cst.CSTNode, definitions: dict[str, UsageInfo] ) -> tuple[cst.CSTNode | None, bool]: """Recursively filter the node to remove unused definitions. @@ -358,7 +361,10 @@ def remove_unused_definitions_recursively( names = extract_names_from_targets(target.target) for name in names: class_var_name = f"{class_name}.{name}" - if class_var_name in definitions and definitions[class_var_name].used_by_qualified_function: + if ( + class_var_name in definitions + and definitions[class_var_name].used_by_qualified_function + ): var_used = True method_or_var_used = True break diff --git a/codeflash/discovery/discover_unit_tests.py b/codeflash/discovery/discover_unit_tests.py index f729b90d2..258092850 100644 --- a/codeflash/discovery/discover_unit_tests.py +++ b/codeflash/discovery/discover_unit_tests.py @@ -1,3 +1,4 @@ +# ruff: noqa: SLF001 from __future__ import annotations import hashlib @@ -12,8 +13,8 @@ from typing import TYPE_CHECKING, Callable, Optional import jedi +import pytest from pydantic.dataclasses import dataclass -from pytest import ExitCode from codeflash.cli_cmds.console import console, logger, test_files_progress_bar from codeflash.code_utils.code_utils import get_run_tmp_file, module_name_from_file_path @@ -177,11 +178,11 @@ def discover_tests_pytest( error_section = match.group(1) if match else result.stdout logger.warning( - f"Failed to collect tests. Pytest Exit code: {exitcode}={ExitCode(exitcode).name}\n {error_section}" + f"Failed to collect tests. Pytest Exit code: {exitcode}={pytest.ExitCode(exitcode).name}\n {error_section}" ) elif 0 <= exitcode <= 5: - logger.warning(f"Failed to collect tests. Pytest Exit code: {exitcode}={ExitCode(exitcode).name}") + logger.warning(f"Failed to collect tests. Pytest Exit code: {exitcode}={pytest.ExitCode(exitcode).name}") else: logger.warning(f"Failed to collect tests. Pytest Exit code: {exitcode}") console.rule() diff --git a/codeflash/discovery/functions_to_optimize.py b/codeflash/discovery/functions_to_optimize.py index 8f5ba65eb..8aa052ab0 100644 --- a/codeflash/discovery/functions_to_optimize.py +++ b/codeflash/discovery/functions_to_optimize.py @@ -48,7 +48,7 @@ def __init__(self) -> None: super().__init__() self.has_return_statement: bool = False - def visit_Return(self, node: cst.Return) -> None: + def visit_Return(self, node: cst.Return) -> None: # noqa: ARG002 self.has_return_statement = True @@ -158,9 +158,9 @@ def get_functions_to_optimize( module_root: Path, previous_checkpoint_functions: dict[str, dict[str, str]] | None = None, ) -> tuple[dict[Path, list[FunctionToOptimize]], int]: - assert ( - sum([bool(optimize_all), bool(replay_test), bool(file)]) <= 1 - ), "Only one of optimize_all, replay_test, or file should be provided" + assert sum([bool(optimize_all), bool(replay_test), bool(file)]) <= 1, ( + "Only one of optimize_all, replay_test, or file should be provided" + ) functions: dict[str, list[FunctionToOptimize]] with warnings.catch_warnings(): warnings.simplefilter(action="ignore", category=SyntaxWarning) @@ -208,7 +208,7 @@ def get_functions_to_optimize( three_min_in_ns = int(1.8e11) console.rule() logger.info( - f"It might take about {humanize_runtime(functions_count*three_min_in_ns)} to fully optimize this project. Codeflash " + f"It might take about {humanize_runtime(functions_count * three_min_in_ns)} to fully optimize this project. Codeflash " f"will keep opening pull requests as it finds optimizations." ) return filtered_modified_functions, functions_count @@ -217,7 +217,7 @@ def get_functions_to_optimize( def get_functions_within_git_diff() -> dict[str, list[FunctionToOptimize]]: modified_lines: dict[str, list[int]] = get_git_diff(uncommitted_changes=False) modified_functions: dict[str, list[FunctionToOptimize]] = {} - for path_str in modified_lines: + for path_str, lines_in_file in modified_lines.items(): path = Path(path_str) if not path.exists(): continue @@ -235,7 +235,7 @@ def get_functions_within_git_diff() -> dict[str, list[FunctionToOptimize]]: for function_to_optimize in function_lines.functions if (start_line := function_to_optimize.starting_line) is not None and (end_line := function_to_optimize.ending_line) is not None - and any(start_line <= line <= end_line for line in modified_lines[path_str]) + and any(start_line <= line <= end_line for line in lines_in_file) ] return modified_functions @@ -290,25 +290,25 @@ def get_all_replay_test_functions( ) if class_name: # If there is a class name, append it to the module path - function = class_name + "." + function_name + qualified_function_name = class_name + "." + function_name file_path_parts = module_path_parts[:-1] # Exclude the class name else: - function = function_name + qualified_function_name = function_name file_path_parts = module_path_parts file_path = Path(project_root_path, *file_path_parts).with_suffix(".py") if not file_path.exists(): continue - file_to_functions_map[file_path].append((function, function_name, class_name)) - for file_path, functions in file_to_functions_map.items(): + file_to_functions_map[file_path].append((qualified_function_name, function_name, class_name)) + for file_path, functions_in_file in file_to_functions_map.items(): all_valid_functions: dict[Path, list[FunctionToOptimize]] = find_all_functions_in_file(file_path=file_path) filtered_list = [] - for function in functions: - function_name, function_name_only, class_name = function + for func_data in functions_in_file: + qualified_name_to_match, _, _ = func_data filtered_list.extend( [ valid_function for valid_function in all_valid_functions[file_path] - if valid_function.qualified_name == function_name + if valid_function.qualified_name == qualified_name_to_match ] ) if filtered_list: @@ -320,7 +320,7 @@ def get_all_replay_test_functions( def is_git_repo(file_path: str) -> bool: try: git.Repo(file_path, search_parent_directories=True) - return True + return True # noqa: TRY300 except git.InvalidGitRepositoryError: return False @@ -400,7 +400,7 @@ def visit_ClassDef(self, node: ast.ClassDef) -> None: def inspect_top_level_functions_or_methods( file_name: Path, function_or_method_name: str, class_name: str | None = None, line_no: int | None = None ) -> FunctionProperties | None: - with open(file_name, encoding="utf8") as file: + with file_name.open(encoding="utf8") as file: try: ast_module = ast.parse(file.read()) except Exception: @@ -426,9 +426,10 @@ def filter_functions( project_root: Path, module_root: Path, previous_checkpoint_functions: dict[Path, dict[str, Any]] | None = None, - disable_logs: bool = False, + disable_logs: bool = False, # noqa: FBT001, FBT002 ) -> tuple[dict[Path, list[FunctionToOptimize]], int]: blocklist_funcs = get_blocklisted_functions() + logger.debug(f"Blocklisted functions: {blocklist_funcs}") # Remove any function that we don't want to optimize # Ignore files with submodule path, cache the submodule paths diff --git a/codeflash/discovery/pytest_new_process_discovery.py b/codeflash/discovery/pytest_new_process_discovery.py index 2d8583255..9d695b52a 100644 --- a/codeflash/discovery/pytest_new_process_discovery.py +++ b/codeflash/discovery/pytest_new_process_discovery.py @@ -1,3 +1,4 @@ +# ruff: noqa import sys from typing import Any @@ -16,7 +17,7 @@ def pytest_collection_finish(self, session) -> None: collected_tests.extend(session.items) pytest_rootdir = session.config.rootdir - def pytest_collection_modifyitems(config, items): + def pytest_collection_modifyitems(self, items) -> None: skip_benchmark = pytest.mark.skip(reason="Skipping benchmark tests") for item in items: if "benchmark" in item.fixturenames: @@ -42,8 +43,8 @@ def parse_pytest_collection_results(pytest_tests: list[Any]) -> list[dict[str, s exitcode = pytest.main( [tests_root, "-p no:logging", "--collect-only", "-m", "not skip"], plugins=[PytestCollectionPlugin()] ) - except Exception as e: # noqa: BLE001 - print(f"Failed to collect tests: {e!s}") # noqa: T201 + except Exception as e: + print(f"Failed to collect tests: {e!s}") exitcode = -1 tests = parse_pytest_collection_results(collected_tests) import pickle diff --git a/codeflash/github/PrComment.py b/codeflash/github/PrComment.py index 1e66c5608..8dbfc54b8 100644 --- a/codeflash/github/PrComment.py +++ b/codeflash/github/PrComment.py @@ -1,12 +1,12 @@ -from __future__ import annotations -from typing import Union, Optional +from __future__ import annotations # noqa: N999 + +from typing import Optional, Union from pydantic import BaseModel from pydantic.dataclasses import dataclass from codeflash.code_utils.time_utils import humanize_runtime -from codeflash.models.models import BenchmarkDetail -from codeflash.models.models import TestResults +from codeflash.models.models import BenchmarkDetail, TestResults @dataclass(frozen=True, config={"arbitrary_types_allowed": True}) @@ -44,5 +44,5 @@ def to_json(self) -> dict[str, Union[dict[str, dict[str, int]], int, str, Option class FileDiffContent(BaseModel): - oldContent: str - newContent: str + oldContent: str # noqa: N815 + newContent: str # noqa: N815 diff --git a/codeflash/main.py b/codeflash/main.py index 02b13d5aa..9eb22dde1 100644 --- a/codeflash/main.py +++ b/codeflash/main.py @@ -1,4 +1,6 @@ -"""Thanks for being curious about how codeflash works! If you might want to work with us on finally making performance a +"""Thanks for being curious about how codeflash works!. + +If you might want to work with us on finally making performance a solved problem, please reach out to us at careers@codeflash.ai. We're hiring! """ diff --git a/codeflash/models/ExperimentMetadata.py b/codeflash/models/ExperimentMetadata.py index bc8e2272c..c284a84f7 100644 --- a/codeflash/models/ExperimentMetadata.py +++ b/codeflash/models/ExperimentMetadata.py @@ -1,3 +1,5 @@ +from __future__ import annotations # noqa: N999 + from typing import Optional from pydantic import BaseModel diff --git a/codeflash/models/models.py b/codeflash/models/models.py index 32add0a94..b250d2474 100644 --- a/codeflash/models/models.py +++ b/codeflash/models/models.py @@ -59,24 +59,29 @@ class FunctionSource: def __eq__(self, other: object) -> bool: if not isinstance(other, FunctionSource): return False - return (self.file_path == other.file_path and - self.qualified_name == other.qualified_name and - self.fully_qualified_name == other.fully_qualified_name and - self.only_function_name == other.only_function_name and - self.source_code == other.source_code) + return ( + self.file_path == other.file_path + and self.qualified_name == other.qualified_name + and self.fully_qualified_name == other.fully_qualified_name + and self.only_function_name == other.only_function_name + and self.source_code == other.source_code + ) def __hash__(self) -> int: - return hash((self.file_path, self.qualified_name, self.fully_qualified_name, - self.only_function_name, self.source_code)) + return hash( + (self.file_path, self.qualified_name, self.fully_qualified_name, self.only_function_name, self.source_code) + ) + class BestOptimization(BaseModel): candidate: OptimizedCandidate helper_functions: list[FunctionSource] runtime: int - replay_performance_gain: Optional[dict[BenchmarkKey,float]] = None + replay_performance_gain: Optional[dict[BenchmarkKey, float]] = None winning_behavioral_test_results: TestResults winning_benchmarking_test_results: TestResults - winning_replay_benchmarking_test_results : Optional[TestResults] = None + winning_replay_benchmarking_test_results: Optional[TestResults] = None + @dataclass(frozen=True) class BenchmarkKey: @@ -86,6 +91,7 @@ class BenchmarkKey: def __str__(self) -> str: return f"{self.module_path}::{self.function_name}" + @dataclass class BenchmarkDetail: benchmark_name: str @@ -107,9 +113,10 @@ def to_dict(self) -> dict[str, any]: "test_function": self.test_function, "original_timing": self.original_timing, "expected_new_timing": self.expected_new_timing, - "speedup_percent": self.speedup_percent + "speedup_percent": self.speedup_percent, } + @dataclass class ProcessedBenchmarkInfo: benchmark_details: list[BenchmarkDetail] @@ -124,9 +131,9 @@ def to_string(self) -> str: return result def to_dict(self) -> dict[str, list[dict[str, any]]]: - return { - "benchmark_details": [detail.to_dict() for detail in self.benchmark_details] - } + return {"benchmark_details": [detail.to_dict() for detail in self.benchmark_details]} + + class CodeString(BaseModel): code: Annotated[str, AfterValidator(validate_python_code)] file_path: Optional[Path] = None @@ -151,7 +158,8 @@ class CodeOptimizationContext(BaseModel): read_writable_code: str = Field(min_length=1) read_only_context_code: str = "" helper_functions: list[FunctionSource] - preexisting_objects: set[tuple[str, tuple[FunctionParent,...]]] + preexisting_objects: set[tuple[str, tuple[FunctionParent, ...]]] + class CodeContextType(str, Enum): READ_WRITABLE = "READ_WRITABLE" @@ -347,6 +355,7 @@ def create_empty(cls, file_path: Path, function_name: str, code_context: CodeOpt status=CoverageStatus.NOT_FOUND, ) + @dataclass class FunctionCoverage: """Represents the coverage data for a specific function in a source file.""" @@ -364,7 +373,8 @@ class TestingMode(enum.Enum): PERFORMANCE = "performance" LINE_PROFILE = "line_profile" -#TODO this class is duplicated in codeflash_capture + +# TODO this class is duplicated in codeflash_capture class VerificationType(str, Enum): FUNCTION_CALL = ( "function_call" # Correctness verification for a test function, checks input values and output values) @@ -473,14 +483,20 @@ def merge(self, other: TestResults) -> None: raise ValueError(msg) self.test_result_idx[k] = v + original_len - def group_by_benchmarks(self, benchmark_keys:list[BenchmarkKey], benchmark_replay_test_dir: Path, project_root: Path) -> dict[BenchmarkKey, TestResults]: + def group_by_benchmarks( + self, benchmark_keys: list[BenchmarkKey], benchmark_replay_test_dir: Path, project_root: Path + ) -> dict[BenchmarkKey, TestResults]: """Group TestResults by benchmark for calculating improvements for each benchmark.""" test_results_by_benchmark = defaultdict(TestResults) benchmark_module_path = {} for benchmark_key in benchmark_keys: - benchmark_module_path[benchmark_key] = module_name_from_file_path(benchmark_replay_test_dir.resolve() / f"test_{benchmark_key.module_path.replace('.', '_')}__replay_test_", project_root) + benchmark_module_path[benchmark_key] = module_name_from_file_path( + benchmark_replay_test_dir.resolve() + / f"test_{benchmark_key.module_path.replace('.', '_')}__replay_test_", + project_root, + ) for test_result in self.test_results: - if (test_result.test_type == TestType.REPLAY_TEST): + if test_result.test_type == TestType.REPLAY_TEST: for benchmark_key, module_path in benchmark_module_path.items(): if test_result.id.test_module_path.startswith(module_path): test_results_by_benchmark[benchmark_key].add(test_result) @@ -559,7 +575,7 @@ def total_passed_runtime(self) -> int: :return: The runtime in nanoseconds. """ - #TODO this doesn't look at the intersection of tests of baseline and original + # TODO this doesn't look at the intersection of tests of baseline and original return sum( [min(usable_runtime_data) for _, usable_runtime_data in self.usable_runtime_data_by_test_case().items()] ) @@ -589,7 +605,7 @@ def __eq__(self, other: object) -> bool: if len(self) != len(other): return False original_recursion_limit = sys.getrecursionlimit() - cast(TestResults, other) + cast("TestResults", other) for test_result in self: other_test_result = other.get_by_unique_invocation_loop_id(test_result.unique_invocation_loop_id) if other_test_result is None: diff --git a/codeflash/optimization/function_context.py b/codeflash/optimization/function_context.py index d55aa2dec..c7fbe461e 100644 --- a/codeflash/optimization/function_context.py +++ b/codeflash/optimization/function_context.py @@ -1,9 +1,12 @@ from __future__ import annotations -from jedi.api.classes import Name +from typing import TYPE_CHECKING from codeflash.code_utils.code_utils import get_qualified_name +if TYPE_CHECKING: + from jedi.api.classes import Name + def belongs_to_method(name: Name, class_name: str, method_name: str) -> bool: """Check if the given name belongs to the specified method.""" @@ -14,9 +17,8 @@ def belongs_to_function(name: Name, function_name: str) -> bool: """Check if the given jedi Name is a direct child of the specified function.""" if name.name == function_name: # Handles function definition and recursive function calls return False - if name := name.parent(): - if name.type == "function": - return name.name == function_name + if (name := name.parent()) and name.type == "function": + return name.name == function_name return False @@ -31,12 +33,14 @@ def belongs_to_class(name: Name, class_name: str) -> bool: def belongs_to_function_qualified(name: Name, qualified_function_name: str) -> bool: """Check if the given jedi Name is a direct child of the specified function, matched by qualified function name.""" try: - if name.full_name.startswith(name.module_name) and get_qualified_name(name.module_name, name.full_name) == qualified_function_name: + if ( + name.full_name.startswith(name.module_name) + and get_qualified_name(name.module_name, name.full_name) == qualified_function_name + ): # Handles function definition and recursive function calls return False - if name := name.parent(): - if name.type == "function": - return get_qualified_name(name.module_name, name.full_name) == qualified_function_name - return False + if (name := name.parent()) and name.type == "function": + return get_qualified_name(name.module_name, name.full_name) == qualified_function_name + return False # noqa: TRY300 except ValueError: return False diff --git a/codeflash/optimization/function_optimizer.py b/codeflash/optimization/function_optimizer.py index dc12b2aac..d74f59ecc 100644 --- a/codeflash/optimization/function_optimizer.py +++ b/codeflash/optimization/function_optimizer.py @@ -124,7 +124,7 @@ def __init__( self.total_benchmark_timings = total_benchmark_timings if total_benchmark_timings else {} self.replay_tests_dir = replay_tests_dir if replay_tests_dir else None - def optimize_function(self) -> Result[BestOptimization, str]: + def optimize_function(self) -> Result[BestOptimization, str]: # noqa: PLR0911 should_run_experiment = self.experiment_id is not None logger.debug(f"Function Trace ID: {self.function_trace_id}") ph("cli-optimize-function-start", {"function_trace_id": self.function_trace_id}) @@ -582,9 +582,9 @@ def log_successful_optimization( def write_code_and_helpers(original_code: str, original_helper_code: dict[Path, str], path: Path) -> None: with path.open("w", encoding="utf8") as f: f.write(original_code) - for module_abspath in original_helper_code: + for module_abspath, helper_code in original_helper_code.items(): with Path(module_abspath).open("w", encoding="utf8") as f: - f.write(original_helper_code[module_abspath]) + f.write(helper_code) def reformat_code_and_helpers( self, helper_functions: list[FunctionSource], path: Path, original_code: str @@ -701,10 +701,10 @@ def instrument_existing_tests(self, function_to_all_tests: dict[str, list[Functi continue # TODO: this naming logic should be moved to a function and made more standard new_behavioral_test_path = Path( - f"{os.path.splitext(test_file)[0]}__perfinstrumented{os.path.splitext(test_file)[1]}" + f"{os.path.splitext(test_file)[0]}__perfinstrumented{os.path.splitext(test_file)[1]}" # noqa: PTH122 ) new_perf_test_path = Path( - f"{os.path.splitext(test_file)[0]}__perfonlyinstrumented{os.path.splitext(test_file)[1]}" + f"{os.path.splitext(test_file)[0]}__perfonlyinstrumented{os.path.splitext(test_file)[1]}" # noqa: PTH122 ) if injected_behavior_test is not None: with new_behavioral_test_path.open("w", encoding="utf8") as _f: @@ -748,7 +748,7 @@ def generate_tests_and_optimizations( helper_functions: list[FunctionSource], generated_test_paths: list[Path], generated_perf_test_paths: list[Path], - run_experiment: bool = False, + run_experiment: bool = False, # noqa: FBT001, FBT002 ) -> Result[tuple[GeneratedTestsList, dict[str, list[FunctionCalledInTest]], OptimizationSet], str]: assert len(generated_test_paths) == N_TESTS_TO_GENERATE max_workers = N_TESTS_TO_GENERATE + 2 if not run_experiment else N_TESTS_TO_GENERATE + 3 @@ -849,7 +849,7 @@ def establish_original_code_baseline( line_profile_results = {"timings": {}, "unit": 0, "str_out": ""} # For the original function - run the tests and get the runtime, plus coverage with progress_bar(f"Establishing original code baseline for {self.function_to_optimize.function_name}"): - assert (test_framework := self.args.test_framework) in {"pytest", "unittest"} + assert (test_framework := self.args.test_framework) in {"pytest", "unittest"} # noqa: RUF018 success = True test_env = os.environ.copy() @@ -1004,7 +1004,7 @@ def run_optimized_candidate( original_helper_code: dict[Path, str], file_path_to_helper_classes: dict[Path, set[str]], ) -> Result[OptimizedCandidateResult, str]: - assert (test_framework := self.args.test_framework) in {"pytest", "unittest"} + assert (test_framework := self.args.test_framework) in {"pytest", "unittest"} # noqa: RUF018 with progress_bar("Testing optimization candidate"): test_env = os.environ.copy() diff --git a/codeflash/optimization/optimizer.py b/codeflash/optimization/optimizer.py index de2cc1740..ae8ece469 100644 --- a/codeflash/optimization/optimizer.py +++ b/codeflash/optimization/optimizer.py @@ -18,12 +18,12 @@ from codeflash.code_utils import env_utils from codeflash.code_utils.checkpoint import CodeflashRunCheckpoint from codeflash.code_utils.code_replacer import normalize_code, normalize_node -from codeflash.code_utils.code_utils import cleanup_paths, get_run_tmp_file +from codeflash.code_utils.code_utils import cleanup_paths from codeflash.code_utils.static_analysis import analyze_imported_modules, get_first_top_level_function_or_method_ast from codeflash.discovery.discover_unit_tests import discover_unit_tests from codeflash.discovery.functions_to_optimize import get_functions_to_optimize from codeflash.either import is_successful -from codeflash.models.models import BenchmarkKey, TestType, ValidCode +from codeflash.models.models import ValidCode from codeflash.optimization.function_optimizer import FunctionOptimizer from codeflash.telemetry.posthog_cf import ph from codeflash.verification.verification_utils import TestConfig @@ -32,7 +32,7 @@ from argparse import Namespace from codeflash.discovery.functions_to_optimize import FunctionToOptimize - from codeflash.models.models import FunctionCalledInTest + from codeflash.models.models import BenchmarkKey, FunctionCalledInTest class Optimizer: @@ -266,7 +266,14 @@ def run(self) -> None: if function_optimizer: function_optimizer.cleanup_generated_files() + if self.test_cfg.concolic_test_root_dir: + cleanup_paths([self.test_cfg.concolic_test_root_dir]) + def run_with_args(args: Namespace) -> None: - optimizer = Optimizer(args) - optimizer.run() + try: + optimizer = Optimizer(args) + optimizer.run() + except KeyboardInterrupt: + logger.warning("Keyboard interrupt received. Exiting, please wait…") + raise SystemExit from None diff --git a/codeflash/picklepatch/pickle_patcher.py b/codeflash/picklepatch/pickle_patcher.py index cfedd28fd..0e08756ab 100644 --- a/codeflash/picklepatch/pickle_patcher.py +++ b/codeflash/picklepatch/pickle_patcher.py @@ -4,8 +4,11 @@ components with placeholders that provide informative errors when accessed. """ +from __future__ import annotations + +import contextlib import pickle -import types +from typing import Any, ClassVar import dill @@ -20,10 +23,10 @@ class PicklePatcher: """ # Class-level cache of unpicklable types - _unpicklable_types = set() + _unpicklable_types: ClassVar[set[type]] = set() @staticmethod - def dumps(obj, protocol=None, max_depth=100, **kwargs): + def dumps(obj: object, protocol: int | None = None, max_depth: int = 100, **kwargs) -> bytes: # noqa: ANN003 """Safely pickle an object, replacing unpicklable parts with placeholders. Args: @@ -34,11 +37,12 @@ def dumps(obj, protocol=None, max_depth=100, **kwargs): Returns: bytes: Pickled data with placeholders for unpicklable objects + """ return PicklePatcher._recursive_pickle(obj, max_depth, path=[], protocol=protocol, **kwargs) @staticmethod - def loads(pickled_data): + def loads(pickled_data: bytes) -> object: """Unpickle data that may contain placeholders. Args: @@ -46,15 +50,12 @@ def loads(pickled_data): Returns: The unpickled object with placeholders for unpicklable parts + """ - try: - # We use dill for loading since it can handle everything pickle can - return dill.loads(pickled_data) - except Exception as e: - raise + return dill.loads(pickled_data) @staticmethod - def _create_placeholder(obj, error_msg, path): + def _create_placeholder(obj: object, error_msg: str, path: list[str]) -> PicklePlaceholder: """Create a placeholder for an unpicklable object. Args: @@ -64,28 +65,29 @@ def _create_placeholder(obj, error_msg, path): Returns: PicklePlaceholder: A placeholder object + """ obj_type = type(obj) try: obj_str = str(obj)[:100] if hasattr(obj, "__str__") else f"" - except: + except: # noqa: E722 obj_str = f"" print(f"Creating placeholder for {obj_type.__name__} at path {'->'.join(path) or 'root'}: {error_msg}") - placeholder = PicklePlaceholder( - obj_type.__name__, - obj_str, - error_msg, - path - ) + placeholder = PicklePlaceholder(obj_type.__name__, obj_str, error_msg, path) # Add this type to our known unpicklable types cache PicklePatcher._unpicklable_types.add(obj_type) return placeholder @staticmethod - def _pickle(obj, path=None, protocol=None, **kwargs): + def _pickle( + obj: object, + path: list[str] | None = None, # noqa: ARG004 + protocol: int | None = None, + **kwargs: Any, # noqa: ANN401 + ) -> tuple[bool, bytes | str]: """Try to pickle an object using pickle first, then dill. If both fail, create a placeholder. Args: @@ -98,11 +100,12 @@ def _pickle(obj, path=None, protocol=None, **kwargs): tuple: (success, result) where success is a boolean and result is either: - Pickled bytes if successful - Error message if not successful + """ # Try standard pickle first try: return True, pickle.dumps(obj, protocol=protocol, **kwargs) - except (pickle.PickleError, TypeError, AttributeError, ValueError) as e: + except (pickle.PickleError, TypeError, AttributeError, ValueError): # Then try dill (which is more powerful) try: return True, dill.dumps(obj, protocol=protocol, **kwargs) @@ -110,7 +113,13 @@ def _pickle(obj, path=None, protocol=None, **kwargs): return False, str(e) @staticmethod - def _recursive_pickle(obj, max_depth, path=None, protocol=None, **kwargs): + def _recursive_pickle( # noqa: PLR0911 + obj: object, + max_depth: int, + path: list[str] | None = None, + protocol: int | None = None, + **kwargs, # noqa: ANN003 + ) -> bytes: """Recursively try to pickle an object, replacing unpicklable parts with placeholders. Args: @@ -122,6 +131,7 @@ def _recursive_pickle(obj, max_depth, path=None, protocol=None, **kwargs): Returns: bytes: Pickled data with placeholders for unpicklable objects + """ if path is None: path = [] @@ -130,20 +140,12 @@ def _recursive_pickle(obj, max_depth, path=None, protocol=None, **kwargs): # Check if this type is known to be unpicklable if obj_type in PicklePatcher._unpicklable_types: - placeholder = PicklePatcher._create_placeholder( - obj, - "Known unpicklable type", - path - ) + placeholder = PicklePatcher._create_placeholder(obj, "Known unpicklable type", path) return dill.dumps(placeholder, protocol=protocol, **kwargs) # Check for max depth if max_depth <= 0: - placeholder = PicklePatcher._create_placeholder( - obj, - "Max recursion depth exceeded", - path - ) + placeholder = PicklePatcher._create_placeholder(obj, "Max recursion depth exceeded", path) return dill.dumps(placeholder, protocol=protocol, **kwargs) # Try standard pickling @@ -156,9 +158,9 @@ def _recursive_pickle(obj, max_depth, path=None, protocol=None, **kwargs): # Handle different container types if isinstance(obj, dict): return PicklePatcher._handle_dict(obj, max_depth, error_msg, path, protocol=protocol, **kwargs) - elif isinstance(obj, (list, tuple, set)): + if isinstance(obj, (list, tuple, set)): return PicklePatcher._handle_sequence(obj, max_depth, error_msg, path, protocol=protocol, **kwargs) - elif hasattr(obj, "__dict__"): + if hasattr(obj, "__dict__"): result = PicklePatcher._handle_object(obj, max_depth, error_msg, path, protocol=protocol, **kwargs) # If this was a failure, add the type to the cache @@ -172,7 +174,14 @@ def _recursive_pickle(obj, max_depth, path=None, protocol=None, **kwargs): return dill.dumps(placeholder, protocol=protocol, **kwargs) @staticmethod - def _handle_dict(obj_dict, max_depth, error_msg, path, protocol=None, **kwargs): + def _handle_dict( + obj_dict: dict[Any, Any], + max_depth: int, + error_msg: str, # noqa: ARG004 + path: list[str], + protocol: int | None = None, + **kwargs: Any, # noqa: ANN401 + ) -> bytes: """Handle pickling for dictionary objects. Args: @@ -185,12 +194,11 @@ def _handle_dict(obj_dict, max_depth, error_msg, path, protocol=None, **kwargs): Returns: bytes: Pickled data with placeholders for unpicklable objects + """ if not isinstance(obj_dict, dict): placeholder = PicklePatcher._create_placeholder( - obj_dict, - f"Expected a dictionary, got {type(obj_dict).__name__}", - path + obj_dict, f"Expected a dictionary, got {type(obj_dict).__name__}", path ) return dill.dumps(placeholder, protocol=protocol, **kwargs) @@ -205,12 +213,12 @@ def _handle_dict(obj_dict, max_depth, error_msg, path, protocol=None, **kwargs): # If the key can't be pickled, use a string representation try: key_str = str(key)[:50] - except: + except: # noqa: E722 key_str = f"" key_result = f"" # Process the value - value_path = path + [f"[{repr(key)[:20]}]"] + value_path = [*path, f"[{repr(key)[:20]}]"] value_success, value_bytes = PicklePatcher._pickle(value, value_path, protocol, **kwargs) if value_success: @@ -223,18 +231,21 @@ def _handle_dict(obj_dict, max_depth, error_msg, path, protocol=None, **kwargs): ) value_result = dill.loads(value_bytes) except Exception as inner_e: - value_result = PicklePatcher._create_placeholder( - value, - str(inner_e), - value_path - ) + value_result = PicklePatcher._create_placeholder(value, str(inner_e), value_path) result[key_result] = value_result return dill.dumps(result, protocol=protocol, **kwargs) @staticmethod - def _handle_sequence(obj_seq, max_depth, error_msg, path, protocol=None, **kwargs): + def _handle_sequence( + obj_seq: list[Any] | tuple[Any, ...] | set[Any], + max_depth: int, + error_msg: str, # noqa: ARG004 + path: list[str], + protocol: int | None = None, + **kwargs: Any, # noqa: ANN401 + ) -> bytes: """Handle pickling for sequence types (list, tuple, set). Args: @@ -247,11 +258,12 @@ def _handle_sequence(obj_seq, max_depth, error_msg, path, protocol=None, **kwarg Returns: bytes: Pickled data with placeholders for unpicklable objects + """ - result = [] + result: list[Any] = [] for i, item in enumerate(obj_seq): - item_path = path + [f"[{i}]"] + item_path = [*path, f"[{i}]"] # Try to pickle the item directly success, _ = PicklePatcher._pickle(item, item_path, protocol, **kwargs) @@ -267,11 +279,7 @@ def _handle_sequence(obj_seq, max_depth, error_msg, path, protocol=None, **kwarg result.append(dill.loads(item_bytes)) except Exception as inner_e: # If recursive pickling fails, use a placeholder - placeholder = PicklePatcher._create_placeholder( - item, - str(inner_e), - item_path - ) + placeholder = PicklePatcher._create_placeholder(item, str(inner_e), item_path) result.append(placeholder) # Convert back to the original type @@ -279,16 +287,21 @@ def _handle_sequence(obj_seq, max_depth, error_msg, path, protocol=None, **kwarg result = tuple(result) elif isinstance(obj_seq, set): # Try to create a set from the result - try: + + with contextlib.suppress(Exception): result = set(result) - except Exception: - # If we can't create a set (unhashable items), keep it as a list - pass return dill.dumps(result, protocol=protocol, **kwargs) @staticmethod - def _handle_object(obj, max_depth, error_msg, path, protocol=None, **kwargs): + def _handle_object( + obj: object, + max_depth: int, + error_msg: str, + path: list[str], + protocol: int | None = None, + **kwargs: Any, # noqa: ANN401 + ) -> bytes: """Handle pickling for custom objects with __dict__. Args: @@ -301,6 +314,7 @@ def _handle_object(obj, max_depth, error_msg, path, protocol=None, **kwargs): Returns: bytes: Pickled data with placeholders for unpicklable objects + """ # Try to create a new instance of the same class try: @@ -310,7 +324,7 @@ def _handle_object(obj, max_depth, error_msg, path, protocol=None, **kwargs): # Handle __dict__ attributes if they exist if hasattr(obj, "__dict__"): for attr_name, attr_value in obj.__dict__.items(): - attr_path = path + [attr_name] + attr_path = [*path, attr_name] # Try to pickle directly first success, _ = PicklePatcher._pickle(attr_value, attr_path, protocol, **kwargs) @@ -326,11 +340,7 @@ def _handle_object(obj, max_depth, error_msg, path, protocol=None, **kwargs): setattr(new_obj, attr_name, dill.loads(attr_bytes)) except Exception as inner_e: # Use placeholder for unpicklable attribute - placeholder = PicklePatcher._create_placeholder( - attr_value, - str(inner_e), - attr_path - ) + placeholder = PicklePatcher._create_placeholder(attr_value, str(inner_e), attr_path) setattr(new_obj, attr_name, placeholder) # Try to pickle the patched object @@ -338,9 +348,9 @@ def _handle_object(obj, max_depth, error_msg, path, protocol=None, **kwargs): if success: return result # Fall through to placeholder creation - except Exception: + except Exception: # noqa: S110 pass # Fall through to placeholder creation # If we get here, just use a placeholder placeholder = PicklePatcher._create_placeholder(obj, error_msg, path) - return dill.dumps(placeholder, protocol=protocol, **kwargs) \ No newline at end of file + return dill.dumps(placeholder, protocol=protocol, **kwargs) diff --git a/codeflash/picklepatch/pickle_placeholder.py b/codeflash/picklepatch/pickle_placeholder.py index 0d730dabb..50e9c5aa3 100644 --- a/codeflash/picklepatch/pickle_placeholder.py +++ b/codeflash/picklepatch/pickle_placeholder.py @@ -1,8 +1,12 @@ +from __future__ import annotations + +from typing import Any + + class PicklePlaceholderAccessError(Exception): """Custom exception raised when attempting to access an unpicklable object.""" - class PicklePlaceholder: """A placeholder for an object that couldn't be pickled. @@ -10,7 +14,7 @@ class PicklePlaceholder: placeholder will raise a PicklePlaceholderAccessError. """ - def __init__(self, obj_type, obj_str, error_msg, path=None): + def __init__(self, obj_type: str, obj_str: str, error_msg: str, path: list[str] | None = None) -> None: """Initialize a placeholder for an unpicklable object. Args: @@ -26,46 +30,43 @@ def __init__(self, obj_type, obj_str, error_msg, path=None): self.__dict__["error_msg"] = error_msg self.__dict__["path"] = path if path is not None else [] - def __getattr__(self, name): + def __getattr__(self, name) -> Any: # noqa: ANN001, ANN401 """Raise a custom error when any attribute is accessed.""" path_str = ".".join(self.__dict__["path"]) if self.__dict__["path"] else "root object" - raise PicklePlaceholderAccessError( + msg = ( f"Attempt to access unpickleable object: Cannot access attribute '{name}' on unpicklable object at {path_str}. " f"Original type: {self.__dict__['obj_type']}. Error: {self.__dict__['error_msg']}" ) + raise PicklePlaceholderAccessError(msg) - def __setattr__(self, name, value): + def __setattr__(self, name: str, value: Any) -> None: # noqa: ANN401 """Prevent setting attributes.""" self.__getattr__(name) # This will raise our custom error - def __call__(self, *args, **kwargs): + def __call__(self, *args: Any, **kwargs: Any) -> Any: # noqa: ANN401, ARG002 """Raise a custom error when the object is called.""" path_str = ".".join(self.__dict__["path"]) if self.__dict__["path"] else "root object" - raise PicklePlaceholderAccessError( + msg = ( f"Attempt to access unpickleable object: Cannot call unpicklable object at {path_str}. " f"Original type: {self.__dict__['obj_type']}. Error: {self.__dict__['error_msg']}" ) + raise PicklePlaceholderAccessError(msg) - def __repr__(self): + def __repr__(self) -> str: """Return a string representation of the placeholder.""" try: path_str = ".".join(self.__dict__["path"]) if self.__dict__["path"] else "root" return f"" - except: + except: # noqa: E722 return "" - def __str__(self): + def __str__(self) -> str: """Return a string representation of the placeholder.""" return self.__repr__() - def __reduce__(self): + def __reduce__(self) -> tuple: """Make sure pickling of the placeholder itself works correctly.""" return ( PicklePlaceholder, - ( - self.__dict__["obj_type"], - self.__dict__["obj_str"], - self.__dict__["error_msg"], - self.__dict__["path"] - ) + (self.__dict__["obj_type"], self.__dict__["obj_str"], self.__dict__["error_msg"], self.__dict__["path"]), ) diff --git a/codeflash/result/create_pr.py b/codeflash/result/create_pr.py index 502c811eb..8524d397e 100644 --- a/codeflash/result/create_pr.py +++ b/codeflash/result/create_pr.py @@ -78,7 +78,7 @@ def check_create_pr( speedup_pct=explanation.speedup_pct, winning_behavioral_test_results=explanation.winning_behavioral_test_results, winning_benchmarking_test_results=explanation.winning_benchmarking_test_results, - benchmark_details=explanation.benchmark_details + benchmark_details=explanation.benchmark_details, ), existing_tests=existing_tests_source, generated_tests=generated_original_test_source, @@ -125,7 +125,7 @@ def check_create_pr( speedup_pct=explanation.speedup_pct, winning_behavioral_test_results=explanation.winning_behavioral_test_results, winning_benchmarking_test_results=explanation.winning_benchmarking_test_results, - benchmark_details=explanation.benchmark_details + benchmark_details=explanation.benchmark_details, ), existing_tests=existing_tests_source, generated_tests=generated_original_test_source, diff --git a/codeflash/result/explanation.py b/codeflash/result/explanation.py index c6e1fb9dc..bfb061cec 100644 --- a/codeflash/result/explanation.py +++ b/codeflash/result/explanation.py @@ -77,23 +77,23 @@ def to_console_string(self) -> str: test_function, f"{detail.original_timing}", f"{detail.expected_new_timing}", - f"{detail.speedup_percent:.2f}%" + f"{detail.speedup_percent:.2f}%", ) # Convert table to string string_buffer = StringIO() console = Console(file=string_buffer, width=terminal_width) console.print(table) - benchmark_info = cast(StringIO, console.file).getvalue() + "\n" # Cast for mypy + benchmark_info = cast("StringIO", console.file).getvalue() + "\n" # Cast for mypy return ( - f"Optimized {self.function_name} in {self.file_path}\n" - f"{self.perf_improvement_line}\n" - f"Runtime went down from {original_runtime_human} to {best_runtime_human} \n\n" - + (benchmark_info if benchmark_info else "") - + self.raw_explanation_message - + " \n\n" - + "The new optimized code was tested for correctness. The results are listed below.\n" - + f"{TestResults.report_to_string(self.winning_behavioral_test_results.get_test_pass_fail_report_by_type())}\n" + f"Optimized {self.function_name} in {self.file_path}\n" + f"{self.perf_improvement_line}\n" + f"Runtime went down from {original_runtime_human} to {best_runtime_human} \n\n" + + (benchmark_info if benchmark_info else "") + + self.raw_explanation_message + + " \n\n" + + "The new optimized code was tested for correctness. The results are listed below.\n" + + f"{TestResults.report_to_string(self.winning_behavioral_test_results.get_test_pass_fail_report_by_type())}\n" ) def explanation_message(self) -> str: diff --git a/codeflash/telemetry/posthog_cf.py b/codeflash/telemetry/posthog_cf.py index f6c10b8b0..a90ef16c9 100644 --- a/codeflash/telemetry/posthog_cf.py +++ b/codeflash/telemetry/posthog_cf.py @@ -12,7 +12,7 @@ _posthog = None -def initialize_posthog(enabled: bool = True) -> None: +def initialize_posthog(enabled: bool = True) -> None: # noqa: FBT001, FBT002 """Enable or disable PostHog. :param enabled: Whether to enable PostHog. @@ -20,8 +20,8 @@ def initialize_posthog(enabled: bool = True) -> None: if not enabled: return - global _posthog - _posthog = Posthog(project_api_key="phc_aUO790jHd7z1SXwsYCz8dRApxueplZlZWeDSpKc5hol", host="https://us.posthog.com") # type: ignore + global _posthog # noqa: PLW0603 + _posthog = Posthog(project_api_key="phc_aUO790jHd7z1SXwsYCz8dRApxueplZlZWeDSpKc5hol", host="https://us.posthog.com") # type: ignore # noqa: PGH003 _posthog.log.setLevel(logging.CRITICAL) # Suppress PostHog logging ph("cli-telemetry-enabled") @@ -41,6 +41,6 @@ def ph(event: str, properties: dict[str, Any] | None = None) -> None: user_id = get_user_id() if user_id: - _posthog.capture(distinct_id=user_id, event=event, properties=properties) # type: ignore + _posthog.capture(distinct_id=user_id, event=event, properties=properties) # type: ignore # noqa: PGH003 else: logger.debug("Failed to log event to PostHog: User ID could not be retrieved.") diff --git a/codeflash/telemetry/sentry.py b/codeflash/telemetry/sentry.py index 81dee2957..3c35d2fd8 100644 --- a/codeflash/telemetry/sentry.py +++ b/codeflash/telemetry/sentry.py @@ -4,7 +4,7 @@ from sentry_sdk.integrations.logging import LoggingIntegration -def init_sentry(enabled: bool = False, exclude_errors: bool = False) -> None: +def init_sentry(enabled: bool = False, exclude_errors: bool = False) -> None: # noqa: FBT001, FBT002 if enabled: sentry_logging = LoggingIntegration( level=logging.INFO, # Capture info and above as breadcrumbs diff --git a/codeflash/tracer.py b/codeflash/tracer.py index 5d1240868..c06cbe949 100644 --- a/codeflash/tracer.py +++ b/codeflash/tracer.py @@ -76,7 +76,7 @@ def __init__( self, output: str = "codeflash.trace", functions: list[str] | None = None, - disable: bool = False, + disable: bool = False, # noqa: FBT001, FBT002 config_file_path: Path | None = None, max_function_count: int = 256, timeout: int | None = None, # seconds @@ -242,7 +242,7 @@ def __exit__( overflow="ignore", ) - def tracer_logic(self, frame: FrameType, event: str) -> None: + def tracer_logic(self, frame: FrameType, event: str) -> None: # noqa: PLR0911 if event != "call": return if self.timeout is not None and (time.time() - self.start_time) > self.timeout: @@ -400,7 +400,7 @@ def trace_dispatch_call(self, frame: FrameType, t: int) -> int: class_name = arguments["self"].__class__.__name__ elif "cls" in arguments and hasattr(arguments["cls"], "__name__"): class_name = arguments["cls"].__name__ - except Exception: # noqa: BLE001, S110 + except Exception: # noqa: S110 pass fn = (fcode.co_filename, fcode.co_firstlineno, fcode.co_name, class_name) @@ -412,7 +412,7 @@ def trace_dispatch_call(self, frame: FrameType, t: int) -> int: else: timings[fn] = 0, 0, 0, 0, {} return 1 # noqa: TRY300 - except Exception: # noqa: BLE001 + except Exception: # Handle any errors gracefully return 0 @@ -475,7 +475,7 @@ def trace_dispatch_return(self, frame: FrameType, t: int) -> int: cc = cc + 1 if pfn in callers: - callers[pfn] = callers[pfn] + 1 # hack: gather more + callers[pfn] = callers[pfn] + 1 # TODO: gather more # stats such as the amount of time added to ct courtesy # of this specific call, and the contribution to cc # courtesy of this call. @@ -566,7 +566,7 @@ def print_stats(self, sort: str | int | tuple = -1) -> None: # Store with new format new_stats[new_func] = (cc, nc, tt, ct, new_callers) - except Exception as e: # noqa: BLE001 + except Exception as e: console.print(f"Error converting stats for {func}: {e}") continue @@ -603,7 +603,7 @@ def print_stats(self, sort: str | int | tuple = -1) -> None: new_callers[new_caller_func] = count new_timings[new_func] = (cc, ns, tt, ct, new_callers) - except Exception as e: # noqa: BLE001 + except Exception as e: console.print(f"Error converting timings for {func}: {e}") continue @@ -673,7 +673,7 @@ def print_stats(self, sort: str | int | tuple = -1) -> None: console.print(Align.center(table)) - except Exception as e: # noqa: BLE001 + except Exception as e: console.print(f"[bold red]Error in stats processing:[/bold red] {e}") console.print(f"Traced {self.trace_count:,} function calls") self.total_tt = 0 diff --git a/codeflash/tracing/profile_stats.py b/codeflash/tracing/profile_stats.py index 6cf82f3e3..c2ed7cb49 100644 --- a/codeflash/tracing/profile_stats.py +++ b/codeflash/tracing/profile_stats.py @@ -43,7 +43,7 @@ def create_stats(self) -> None: unmapped_callers, ) - def print_stats(self, *amount): + def print_stats(self, *amount) -> pstats.Stats: # noqa: ANN002 # Copied from pstats.Stats.print_stats and modified to print the correct time unit for filename in self.files: print(filename, file=self.stream) @@ -55,7 +55,7 @@ def print_stats(self, *amount): print(indent, self.total_calls, "function calls", end=" ", file=self.stream) if self.total_calls != self.prim_calls: - print("(%d primitive calls)" % self.prim_calls, end=" ", file=self.stream) + print("(%d primitive calls)" % self.prim_calls, end=" ", file=self.stream) # noqa: UP031 time_unit = {"ns": "nanoseconds", "us": "microseconds", "ms": "milliseconds", "s": "seconds"}[self.time_unit] print(f"in {self.total_tt:.3f} {time_unit}", file=self.stream) print(file=self.stream) diff --git a/codeflash/tracing/replay_test.py b/codeflash/tracing/replay_test.py index d5a9559cd..d2b8c07b1 100644 --- a/codeflash/tracing/replay_test.py +++ b/codeflash/tracing/replay_test.py @@ -2,11 +2,15 @@ import sqlite3 import textwrap -from collections.abc import Generator -from typing import Any, Optional +from typing import TYPE_CHECKING, Any, Optional -from codeflash.discovery.functions_to_optimize import FunctionProperties, inspect_top_level_functions_or_methods -from codeflash.tracing.tracing_utils import FunctionModules +from codeflash.discovery.functions_to_optimize import inspect_top_level_functions_or_methods + +if TYPE_CHECKING: + from collections.abc import Generator + + from codeflash.discovery.functions_to_optimize import FunctionProperties + from codeflash.tracing.tracing_utils import FunctionModules def get_next_arg_and_return( @@ -40,7 +44,10 @@ def get_function_alias(module: str, function_name: str) -> str: def create_trace_replay_test( - trace_file: str, functions: list[FunctionModules], test_framework: str = "pytest", max_run_count=100 + trace_file: str, + functions: list[FunctionModules], + test_framework: str = "pytest", + max_run_count=100, # noqa: ANN001 ) -> str: assert test_framework in {"pytest", "unittest"} diff --git a/codeflash/tracing/tracing_utils.py b/codeflash/tracing/tracing_utils.py index 64019f550..2e7096963 100644 --- a/codeflash/tracing/tracing_utils.py +++ b/codeflash/tracing/tracing_utils.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from pathlib import Path from typing import Optional diff --git a/codeflash/update_license_version.py b/codeflash/update_license_version.py index 6aad189b4..3deff1e69 100644 --- a/codeflash/update_license_version.py +++ b/codeflash/update_license_version.py @@ -30,15 +30,15 @@ def main() -> None: # Check if the minor version has changed and update the date if necessary if current_major_minor_version and major_minor_version != current_major_minor_version: # Calculate the new date, which is the current year plus four years - new_year = datetime.now().year + 4 - new_date = f"{new_year}-{datetime.now().strftime('%m-%d')}" + new_year = datetime.now().year + 4 # noqa: DTZ005 + new_date = f"{new_year}-{datetime.now().strftime('%m-%d')}" # noqa: DTZ005 # Define the pattern to search for and the replacement string for the date date_pattern = re.compile(r"(Change Date:\s+)(\d{4}-\d{2}-\d{2})") date_replacement = r"\g<1>" + new_date updated_license_text = date_pattern.sub(date_replacement, updated_license_text) # Write the updated LICENSE file - with open(Path(__file__).parent / "LICENSE", "w", encoding="utf8") as file: + with (Path(__file__).parent / "LICENSE").open("w", encoding="utf8") as file: file.write(updated_license_text) diff --git a/codeflash/verification/codeflash_capture.py b/codeflash/verification/codeflash_capture.py index 4fd2cf079..3d28a027e 100644 --- a/codeflash/verification/codeflash_capture.py +++ b/codeflash/verification/codeflash_capture.py @@ -7,10 +7,13 @@ import os import sqlite3 import time -from pathlib import Path from enum import Enum +from pathlib import Path +from typing import Callable + import dill as pickle + class VerificationType(str, Enum): FUNCTION_CALL = ( "function_call" # Correctness verification for a test function, checks input values and output values) @@ -79,12 +82,12 @@ def get_test_info_from_stack(tests_root: str) -> tuple[str, str | None, str, str return test_module_name, test_class_name, test_name, line_id -def codeflash_capture(function_name: str, tmp_dir_path: str, tests_root: str, is_fto: bool = False): - """Defines decorator to be instrumented onto the init function in the code. Collects info of the test that called this, and captures the state of the instance.""" +def codeflash_capture(function_name: str, tmp_dir_path: str, tests_root: str, is_fto: bool = False) -> Callable: # noqa: FBT001, FBT002 + """Define a decorator to instrument the init function, collect test info, and capture the instance state.""" - def decorator(wrapped): + def decorator(wrapped: Callable) -> Callable: @functools.wraps(wrapped) - def wrapper(*args, **kwargs): + def wrapper(*args, **kwargs) -> None: # noqa: ANN002, ANN003 # Dynamic information retrieved from stack test_module_name, test_class_name, test_name, line_id = get_test_info_from_stack(tests_root) diff --git a/codeflash/verification/comparator.py b/codeflash/verification/comparator.py index 79ae7776c..d86b9ef62 100644 --- a/codeflash/verification/comparator.py +++ b/codeflash/verification/comparator.py @@ -1,3 +1,4 @@ +# ruff: noqa: PGH003 import array import ast import datetime @@ -20,40 +21,40 @@ except ImportError: HAS_NUMPY = False try: - import sqlalchemy + import sqlalchemy # type: ignore HAS_SQLALCHEMY = True except ImportError: HAS_SQLALCHEMY = False try: - import scipy + import scipy # type: ignore HAS_SCIPY = True except ImportError: HAS_SCIPY = False try: - import pandas + import pandas # type: ignore # noqa: ICN001 HAS_PANDAS = True except ImportError: HAS_PANDAS = False try: - import pyrsistent + import pyrsistent # type: ignore HAS_PYRSISTENT = True except ImportError: HAS_PYRSISTENT = False try: - import torch + import torch # type: ignore HAS_TORCH = True except ImportError: HAS_TORCH = False -def comparator(orig: Any, new: Any, superset_obj=False) -> bool: +def comparator(orig: Any, new: Any, superset_obj=False) -> bool: # noqa: ANN001, ANN401, FBT002, PLR0911 """Compare two objects for equality recursively. If superset_obj is True, the new object is allowed to have more keys than the original object. However, the existing keys/values must be equivalent.""" try: if type(orig) is not type(new): @@ -84,7 +85,7 @@ def comparator(orig: Any, new: Any, superset_obj=False) -> bool: frozenset, enum.Enum, type, - range + range, ), ): return orig == new @@ -108,7 +109,7 @@ def comparator(orig: Any, new: Any, superset_obj=False) -> bool: if HAS_SQLALCHEMY: try: insp = sqlalchemy.inspection.inspect(orig) - insp = sqlalchemy.inspection.inspect(new) + insp = sqlalchemy.inspection.inspect(new) # noqa: F841 orig_keys = orig.__dict__ new_keys = new.__dict__ for key in list(orig_keys.keys()): @@ -116,14 +117,14 @@ def comparator(orig: Any, new: Any, superset_obj=False) -> bool: continue if key not in new_keys or not comparator(orig_keys[key], new_keys[key], superset_obj): return False - return True + return True # noqa: TRY300 except sqlalchemy.exc.NoInspectionAvailable: pass # scipy condition because dok_matrix type is also a instance of dict, but dict comparison doesn't work for it if isinstance(orig, dict) and not (HAS_SCIPY and isinstance(orig, scipy.sparse.spmatrix)): if superset_obj: - return all(k in new.keys() and comparator(v, new[k], superset_obj) for k, v in orig.items()) + return all(k in new and comparator(v, new[k], superset_obj) for k, v in orig.items()) if len(orig) != len(new): return False for key in orig: @@ -183,12 +184,12 @@ def comparator(orig: Any, new: Any, superset_obj=False) -> bool: try: if HAS_NUMPY and np.isnan(orig): return np.isnan(new) - except Exception: + except Exception: # noqa: S110 pass try: if HAS_NUMPY and np.isinf(orig): return np.isinf(new) - except Exception: + except Exception: # noqa: S110 pass if HAS_TORCH and isinstance(orig, torch.Tensor): @@ -228,14 +229,14 @@ def comparator(orig: Any, new: Any, superset_obj=False) -> bool: try: if hasattr(orig, "__eq__") and str(type(orig.__eq__)) == "": return orig == new - except Exception: + except Exception: # noqa: S110 pass # For class objects if hasattr(orig, "__dict__") and hasattr(new, "__dict__"): orig_keys = orig.__dict__ new_keys = new.__dict__ - if type(orig_keys) == types.MappingProxyType and type(new_keys) == types.MappingProxyType: + if type(orig_keys) == types.MappingProxyType and type(new_keys) == types.MappingProxyType: # noqa: E721 # meta class objects if orig != new: return False @@ -259,7 +260,7 @@ def comparator(orig: Any, new: Any, superset_obj=False) -> bool: return True # TODO : Add other types here logger.warning(f"Unknown comparator input type: {type(orig)}") - return False + return False # noqa: TRY300 except RecursionError as e: logger.error(f"RecursionError while comparing objects: {e}") sentry_sdk.capture_exception(e) diff --git a/codeflash/verification/concolic_testing.py b/codeflash/verification/concolic_testing.py index c8b6053a0..5792a289d 100644 --- a/codeflash/verification/concolic_testing.py +++ b/codeflash/verification/concolic_testing.py @@ -1,23 +1,26 @@ from __future__ import annotations -import time - import ast import subprocess import tempfile -from argparse import Namespace +import time from pathlib import Path +from typing import TYPE_CHECKING from codeflash.cli_cmds.console import console, logger from codeflash.code_utils.compat import SAFE_SYS_EXECUTABLE from codeflash.code_utils.concolic_utils import clean_concolic_tests from codeflash.code_utils.static_analysis import has_typed_parameters from codeflash.discovery.discover_unit_tests import discover_unit_tests -from codeflash.discovery.functions_to_optimize import FunctionToOptimize -from codeflash.models.models import FunctionCalledInTest from codeflash.telemetry.posthog_cf import ph from codeflash.verification.verification_utils import TestConfig +if TYPE_CHECKING: + from argparse import Namespace + + from codeflash.discovery.functions_to_optimize import FunctionToOptimize + from codeflash.models.models import FunctionCalledInTest + def generate_concolic_tests( test_cfg: TestConfig, args: Namespace, function_to_optimize: FunctionToOptimize, function_to_optimize_ast: ast.AST diff --git a/codeflash/verification/equivalence.py b/codeflash/verification/equivalence.py index b7ce6978a..9d7f5ba2c 100644 --- a/codeflash/verification/equivalence.py +++ b/codeflash/verification/equivalence.py @@ -70,9 +70,12 @@ def compare_test_results(original_results: TestResults, candidate_results: TestR are_equal = False break - if original_test_result.test_type in {TestType.EXISTING_UNIT_TEST, TestType.CONCOLIC_COVERAGE_TEST, TestType.GENERATED_REGRESSION, TestType.REPLAY_TEST} and ( - cdd_test_result.did_pass != original_test_result.did_pass - ): + if original_test_result.test_type in { + TestType.EXISTING_UNIT_TEST, + TestType.CONCOLIC_COVERAGE_TEST, + TestType.GENERATED_REGRESSION, + TestType.REPLAY_TEST, + } and (cdd_test_result.did_pass != original_test_result.did_pass): are_equal = False break sys.setrecursionlimit(original_recursion_limit) diff --git a/codeflash/verification/instrument_codeflash_capture.py b/codeflash/verification/instrument_codeflash_capture.py index c54f3e5d7..d4db6d26e 100644 --- a/codeflash/verification/instrument_codeflash_capture.py +++ b/codeflash/verification/instrument_codeflash_capture.py @@ -2,11 +2,14 @@ import ast from pathlib import Path +from typing import TYPE_CHECKING import isort from codeflash.code_utils.code_utils import get_run_tmp_file -from codeflash.discovery.functions_to_optimize import FunctionToOptimize + +if TYPE_CHECKING: + from codeflash.discovery.functions_to_optimize import FunctionToOptimize def instrument_codeflash_capture( @@ -52,7 +55,12 @@ def instrument_codeflash_capture( def add_codeflash_capture_to_init( - target_classes: set[str], fto_name: str, tmp_dir_path: str, code: str, tests_root: Path, is_fto: bool = False + target_classes: set[str], + fto_name: str, + tmp_dir_path: str, + code: str, + tests_root: Path, + is_fto: bool = False, # noqa: FBT001, FBT002 ) -> str: """Add codeflash_capture decorator to __init__ function in the specified class.""" tree = ast.parse(code) @@ -69,7 +77,12 @@ class InitDecorator(ast.NodeTransformer): """AST transformer that adds codeflash_capture decorator to specific class's __init__.""" def __init__( - self, target_classes: set[str], fto_name: str, tmp_dir_path: str, tests_root: Path, is_fto=False + self, + target_classes: set[str], + fto_name: str, + tmp_dir_path: str, + tests_root: Path, + is_fto=False, # noqa: ANN001, FBT002 ) -> None: self.target_classes = target_classes self.fto_name = fto_name @@ -109,7 +122,7 @@ def visit_ClassDef(self, node: ast.ClassDef) -> ast.ClassDef: func=ast.Name(id="codeflash_capture", ctx=ast.Load()), args=[], keywords=[ - ast.keyword(arg="function_name", value=ast.Constant(value=".".join([node.name, "__init__"]))), + ast.keyword(arg="function_name", value=ast.Constant(value=f"{node.name}.__init__")), ast.keyword(arg="tmp_dir_path", value=ast.Constant(value=self.tmp_dir_path)), ast.keyword(arg="tests_root", value=ast.Constant(value=str(self.tests_root))), ast.keyword(arg="is_fto", value=ast.Constant(value=self.is_fto)), diff --git a/codeflash/verification/parse_line_profile_test_output.py b/codeflash/verification/parse_line_profile_test_output.py index 5e753b932..1877c0654 100644 --- a/codeflash/verification/parse_line_profile_test_output.py +++ b/codeflash/verification/parse_line_profile_test_output.py @@ -1,88 +1,91 @@ -"""Adapted from line_profiler (https://github.com/pyutils/line_profiler) written by Enthought, Inc. (BSD License)""" -import linecache +"""Adapted from line_profiler (https://github.com/pyutils/line_profiler) written by Enthought, Inc. (BSD License).""" + +from __future__ import annotations + import inspect -from codeflash.code_utils.tabulate import tabulate +import linecache import os +from typing import TYPE_CHECKING, Optional + import dill as pickle -from pathlib import Path -from typing import Optional -def show_func(filename, start_lineno, func_name, timings, unit): +from codeflash.code_utils.tabulate import tabulate + +if TYPE_CHECKING: + from pathlib import Path + + +def show_func( + filename: str, start_lineno: int, func_name: str, timings: list[tuple[int, int, float]], unit: float +) -> str: total_hits = sum(t[1] for t in timings) total_time = sum(t[2] for t in timings) out_table = "" table_rows = [] if total_hits == 0: - return '' + return "" scalar = 1 - if os.path.exists(filename): - out_table += f'## Function: {func_name}\n' + if os.path.exists(filename): # noqa: PTH110 + out_table += f"## Function: {func_name}\n" # Clear the cache to ensure that we get up-to-date results. linecache.clearcache() all_lines = linecache.getlines(filename) - sublines = inspect.getblock(all_lines[start_lineno - 1:]) - out_table += '## Total time: %g s\n' % (total_time * unit) + sublines = inspect.getblock(all_lines[start_lineno - 1 :]) + out_table += "## Total time: %g s\n" % (total_time * unit) # Define minimum column sizes so text fits and usually looks consistent - default_column_sizes = { - 'hits': 9, - 'time': 12, - 'perhit': 8, - 'percent': 8, - } + default_column_sizes = {"hits": 9, "time": 12, "perhit": 8, "percent": 8} display = {} # Loop over each line to determine better column formatting. # Fallback to scientific notation if columns are larger than a threshold. for lineno, nhits, time in timings: - if total_time == 0: # Happens rarely on empty function - percent = '' - else: - percent = '%5.1f' % (100 * time / total_time) + percent = "" if total_time == 0 else "%5.1f" % (100 * time / total_time) - time_disp = '%5.1f' % (time * scalar) - if len(time_disp) > default_column_sizes['time']: - time_disp = '%5.1g' % (time * scalar) - perhit_disp = '%5.1f' % (float(time) * scalar / nhits) - if len(perhit_disp) > default_column_sizes['perhit']: - perhit_disp = '%5.1g' % (float(time) * scalar / nhits) - nhits_disp = "%d" % nhits - if len(nhits_disp) > default_column_sizes['hits']: - nhits_disp = '%g' % nhits + time_disp = "%5.1f" % (time * scalar) + if len(time_disp) > default_column_sizes["time"]: + time_disp = "%5.1g" % (time * scalar) + perhit_disp = "%5.1f" % (float(time) * scalar / nhits) + if len(perhit_disp) > default_column_sizes["perhit"]: + perhit_disp = "%5.1g" % (float(time) * scalar / nhits) + nhits_disp = "%d" % nhits # noqa: UP031 + if len(nhits_disp) > default_column_sizes["hits"]: + nhits_disp = f"{nhits:g}" display[lineno] = (nhits_disp, time_disp, perhit_disp, percent) linenos = range(start_lineno, start_lineno + len(sublines)) - empty = ('', '', '', '') - table_cols = ('Hits', 'Time', 'Per Hit', '% Time', 'Line Contents') + empty = ("", "", "", "") + table_cols = ("Hits", "Time", "Per Hit", "% Time", "Line Contents") for lineno, line in zip(linenos, sublines): nhits, time, per_hit, percent = display.get(lineno, empty) - line_ = line.rstrip('\n').rstrip('\r') - if 'def' in line_ or nhits!='': + line_ = line.rstrip("\n").rstrip("\r") + if "def" in line_ or nhits != "": table_rows.append((nhits, time, per_hit, percent, line_)) - pass - out_table += tabulate(headers=table_cols,tabular_data=table_rows,tablefmt="pipe",colglobalalign=None, preserve_whitespace=True) - out_table+='\n' + out_table += tabulate( + headers=table_cols, tabular_data=table_rows, tablefmt="pipe", colglobalalign=None, preserve_whitespace=True + ) + out_table += "\n" return out_table + def show_text(stats: dict) -> str: - """ Show text for the given timings. - """ + """Show text for the given timings.""" out_table = "" - out_table += '# Timer unit: %g s\n' % stats['unit'] - stats_order = sorted(stats['timings'].items()) + out_table += "# Timer unit: {:g} s\n".format(stats["unit"]) + stats_order = sorted(stats["timings"].items()) # Show detailed per-line information for each function. - for (fn, lineno, name), timings in stats_order: - table_md = show_func(fn, lineno, name, stats['timings'][fn, lineno, name], stats['unit']) + for (fn, lineno, name), _timings in stats_order: + table_md = show_func(fn, lineno, name, stats["timings"][fn, lineno, name], stats["unit"]) out_table += table_md return out_table + def parse_line_profile_results(line_profiler_output_file: Optional[Path]) -> dict: line_profiler_output_file = line_profiler_output_file.with_suffix(".lprof") stats_dict = {} if not line_profiler_output_file.exists(): - return {'timings':{},'unit':0, 'str_out':''}, None - else: - with open(line_profiler_output_file,'rb') as f: - stats = pickle.load(f) - stats_dict['timings'] = stats.timings - stats_dict['unit'] = stats.unit - str_out = show_text(stats_dict) - stats_dict['str_out'] = str_out - return stats_dict, None + return {"timings": {}, "unit": 0, "str_out": ""}, None + with line_profiler_output_file.open("rb") as f: + stats = pickle.load(f) + stats_dict["timings"] = stats.timings + stats_dict["unit"] = stats.unit + str_out = show_text(stats_dict) + stats_dict["str_out"] = str_out + return stats_dict, None diff --git a/codeflash/verification/parse_test_output.py b/codeflash/verification/parse_test_output.py index 7f8c34841..b9060f4bc 100644 --- a/codeflash/verification/parse_test_output.py +++ b/codeflash/verification/parse_test_output.py @@ -140,7 +140,7 @@ def parse_sqlite_test_results(sqlite_file_path: Path, test_files: TestFiles, tes test_type = test_files.get_test_type_by_original_file_path(test_file_path) try: ret_val = (pickle.loads(val[7]) if loop_index == 1 else None,) - except Exception: + except Exception: # noqa: S112 continue test_results.add( function_test_invocation=FunctionTestInvocation( @@ -193,7 +193,7 @@ def parse_test_xml( for suite in xml: for testcase in suite: class_name = testcase.classname - test_file_name = suite._elem.attrib.get("file") + test_file_name = suite._elem.attrib.get("file") # noqa: SLF001 if ( test_file_name == f"unittest{os.sep}loader.py" and class_name == "unittest.loader._FailedTest" @@ -278,7 +278,7 @@ def parse_test_xml( test_module_path=test_module_path, test_class_name=test_class, test_function_name=test_function, - function_getting_tested="", # FIXME + function_getting_tested="", # TODO: Fix this iteration_id="", ), file_name=test_file_path, diff --git a/codeflash/verification/pytest_plugin.py b/codeflash/verification/pytest_plugin.py index d9259088d..cb5309af1 100644 --- a/codeflash/verification/pytest_plugin.py +++ b/codeflash/verification/pytest_plugin.py @@ -57,10 +57,8 @@ class UnexpectedError(Exception): parts = line.split() if len(parts) >= 3: # Swap size is in KB in the 3rd column - try: + with contextlib.suppress(ValueError, IndexError): swap_size += int(parts[2]) * 1024 # Convert KB to bytes - except (ValueError, IndexError): - pass # Get total system memory total_memory = os.sysconf("SC_PAGE_SIZE") * os.sysconf("SC_PHYS_PAGES") @@ -216,7 +214,7 @@ def _clear_cache_for_object(obj: Any) -> None: # noqa: ANN401 try: obj_module = inspect.getmodule(obj) module_name = obj_module.__name__.split(".")[0] if obj_module is not None else None - except Exception: # noqa: BLE001 + except Exception: module_name = None if module_name in protected_modules: @@ -237,9 +235,9 @@ def _clear_cache_for_object(obj: Any) -> None: # noqa: ANN401 for _, obj in inspect.getmembers(module): if callable(obj): _clear_cache_for_object(obj) - except Exception: # noqa: BLE001, S110 + except Exception: # noqa: S110 pass - except Exception: # noqa: BLE001, S110 + except Exception: # noqa: S110 pass def _set_nodeid(self, nodeid: str, count: int) -> str: diff --git a/codeflash/verification/test_runner.py b/codeflash/verification/test_runner.py index d4990ef56..58834d1c4 100644 --- a/codeflash/verification/test_runner.py +++ b/codeflash/verification/test_runner.py @@ -65,7 +65,7 @@ def run_behavioral_tests( "--codeflash_loops_scope=session", "--codeflash_min_loops=1", "--codeflash_max_loops=1", - f"--codeflash_seconds={pytest_target_runtime_seconds}", # TODO : This is unnecessary, update the plugin to not ask for this # noqa: E501 + f"--codeflash_seconds={pytest_target_runtime_seconds}", # TODO : This is unnecessary, update the plugin to not ask for this ] result_file_path = get_run_tmp_file(Path("pytest_results.xml")) @@ -150,10 +150,10 @@ def run_line_profile_tests( test_framework: str, *, pytest_target_runtime_seconds: float = TOTAL_LOOPING_TIME, - verbose: bool = False, + verbose: bool = False, # noqa: ARG001 pytest_timeout: int | None = None, - pytest_min_loops: int = 5, - pytest_max_loops: int = 100_000, + pytest_min_loops: int = 5, # noqa: ARG001 + pytest_max_loops: int = 100_000, # noqa: ARG001 line_profiler_output_file: Path | None = None, ) -> tuple[Path, subprocess.CompletedProcess]: if test_framework == "pytest": diff --git a/codeflash/verification/verifier.py b/codeflash/verification/verifier.py index 9f78083a9..8d187f2b1 100644 --- a/codeflash/verification/verifier.py +++ b/codeflash/verification/verifier.py @@ -1,8 +1,7 @@ from __future__ import annotations -import time - import ast +import time from pathlib import Path from typing import TYPE_CHECKING diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 000000000..04cfeae09 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,2689 @@ +# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "ansicon" +version = "1.89.0" +description = "Python wrapper for loading Jason Hood's ANSICON" +optional = false +python-versions = "*" +groups = ["main"] +markers = "platform_system == \"Windows\"" +files = [ + {file = "ansicon-1.89.0-py2.py3-none-any.whl", hash = "sha256:f1def52d17f65c2c9682cf8370c03f541f410c1752d6a14029f97318e4b9dfec"}, + {file = "ansicon-1.89.0.tar.gz", hash = "sha256:e4d039def5768a47e4afec8e89e83ec3ae5a26bf00ad851f914d1240b444d2b1"}, +] + +[[package]] +name = "asttokens" +version = "3.0.0" +description = "Annotate AST trees with source code positions" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2"}, + {file = "asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7"}, +] + +[package.extras] +astroid = ["astroid (>=2,<4)"] +test = ["astroid (>=2,<4)", "pytest", "pytest-cov", "pytest-xdist"] + +[[package]] +name = "attrs" +version = "25.3.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"}, + {file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"}, +] + +[package.extras] +benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] +tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] + +[[package]] +name = "backoff" +version = "1.11.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["main"] +files = [ + {file = "backoff-1.11.1-py2.py3-none-any.whl", hash = "sha256:61928f8fa48d52e4faa81875eecf308eccfb1016b018bb6bd21e05b5d90a96c5"}, + {file = "backoff-1.11.1.tar.gz", hash = "sha256:ccb962a2378418c667b3c979b504fdeb7d9e0d29c0579e3b13b86467177728cb"}, +] + +[[package]] +name = "blessed" +version = "1.21.0" +description = "Easy, practical library for making terminal apps, by providing an elegant, well-documented interface to Colors, Keyboard input, and screen Positioning capabilities." +optional = false +python-versions = ">=2.7" +groups = ["main"] +files = [ + {file = "blessed-1.21.0-py2.py3-none-any.whl", hash = "sha256:f831e847396f5a2eac6c106f4dfadedf46c4f804733574b15fe86d2ed45a9588"}, + {file = "blessed-1.21.0.tar.gz", hash = "sha256:ece8bbc4758ab9176452f4e3a719d70088eb5739798cd5582c9e05f2a28337ec"}, +] + +[package.dependencies] +jinxed = {version = ">=1.1.0", markers = "platform_system == \"Windows\""} +wcwidth = ">=0.1.4" + +[[package]] +name = "cattrs" +version = "24.1.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "cattrs-24.1.3-py3-none-any.whl", hash = "sha256:adf957dddd26840f27ffbd060a6c4dd3b2192c5b7c2c0525ef1bd8131d8a83f5"}, + {file = "cattrs-24.1.3.tar.gz", hash = "sha256:981a6ef05875b5bb0c7fb68885546186d306f10f0f6718fe9b96c226e68821ff"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +msgspec = ["msgspec (>=0.18.5) ; implementation_name == \"cpython\""] +orjson = ["orjson (>=3.9.2) ; implementation_name == \"cpython\""] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2025.4.26" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3"}, + {file = "certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6"}, +] + +[[package]] +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-win32.whl", hash = "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cad5f45b3146325bb38d6855642f6fd609c3f7cad4dbaf75549bf3b904d3184"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2680962a4848b3c4f155dc2ee64505a9c57186d0d56b43123b17ca3de18f0fa"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:36b31da18b8890a76ec181c3cf44326bf2c48e36d393ca1b72b3f484113ea344"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4074c5a429281bf056ddd4c5d3b740ebca4d43ffffe2ef4bf4d2d05114299da"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9e36a97bee9b86ef9a1cf7bb96747eb7a15c2f22bdb5b516434b00f2a599f02"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:1b1bde144d98e446b056ef98e59c256e9294f6b74d7af6846bf5ffdafd687a7d"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:915f3849a011c1f593ab99092f3cecfcb4d65d8feb4a64cf1bf2d22074dc0ec4"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:fb707f3e15060adf5b7ada797624a6c6e0138e2a26baa089df64c68ee98e040f"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:25a23ea5c7edc53e0f29bae2c44fcb5a1aa10591aae107f2a2b2583a9c5cbc64"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:770cab594ecf99ae64c236bc9ee3439c3f46be49796e265ce0cc8bc17b10294f"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-win32.whl", hash = "sha256:6a0289e4589e8bdfef02a80478f1dfcb14f0ab696b5a00e1f4b8a14a307a3c58"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6fc1f5b51fa4cecaa18f2bd7a003f3dd039dd615cd69a2afd6d3b19aed6775f2"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76af085e67e56c8816c3ccf256ebd136def2ed9654525348cfa744b6802b69eb"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e45ba65510e2647721e35323d6ef54c7974959f6081b58d4ef5d87c60c84919a"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:046595208aae0120559a67693ecc65dd75d46f7bf687f159127046628178dc45"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75d10d37a47afee94919c4fab4c22b9bc2a8bf7d4f46f87363bcf0573f3ff4f5"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6333b3aa5a12c26b2a4d4e7335a28f1475e0e5e17d69d55141ee3cab736f66d1"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8323a9b031aa0393768b87f04b4164a40037fb2a3c11ac06a03ffecd3618027"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:24498ba8ed6c2e0b56d4acbf83f2d989720a93b41d712ebd4f4979660db4417b"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:844da2b5728b5ce0e32d863af26f32b5ce61bc4273a9c720a9f3aa9df73b1455"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:65c981bdbd3f57670af8b59777cbfae75364b483fa8a9f420f08094531d54a01"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:3c21d4fca343c805a52c0c78edc01e3477f6dd1ad7c47653241cf2a206d4fc58"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:dc7039885fa1baf9be153a0626e337aa7ec8bf96b0128605fb0d77788ddc1681"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-win32.whl", hash = "sha256:8272b73e1c5603666618805fe821edba66892e2870058c94c53147602eab29c7"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:70f7172939fdf8790425ba31915bfbe8335030f05b9913d7ae00a87d4395620a"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:005fa3432484527f9732ebd315da8da8001593e2cf46a3d817669f062c3d9ed4"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e92fca20c46e9f5e1bb485887d074918b13543b1c2a1185e69bb8d17ab6236a7"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50bf98d5e563b83cc29471fa114366e6806bc06bc7a25fd59641e41445327836"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:721c76e84fe669be19c5791da68232ca2e05ba5185575086e384352e2c309597"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d8fd25b7f4675d0c47cf95b594d4e7b158aca33b76aa63d07186e13c0e0ab7"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3daeac64d5b371dea99714f08ffc2c208522ec6b06fbc7866a450dd446f5c0f"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dccab8d5fa1ef9bfba0590ecf4d46df048d18ffe3eec01eeb73a42e0d9e7a8ba"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:aaf27faa992bfee0264dc1f03f4c75e9fcdda66a519db6b957a3f826e285cf12"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:eb30abc20df9ab0814b5a2524f23d75dcf83cde762c161917a2b4b7b55b1e518"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c72fbbe68c6f32f251bdc08b8611c7b3060612236e960ef848e0a517ddbe76c5"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:982bb1e8b4ffda883b3d0a521e23abcd6fd17418f6d2c4118d257a10199c0ce3"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-win32.whl", hash = "sha256:43e0933a0eff183ee85833f341ec567c0980dae57c464d8a508e1b2ceb336471"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:d11b54acf878eef558599658b0ffca78138c8c3655cf4f3a4a673c437e67732e"}, + {file = "charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0"}, + {file = "charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63"}, +] + +[[package]] +name = "click" +version = "8.1.8" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, + {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev"] +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] +markers = {main = "sys_platform == \"win32\" or platform_system == \"Windows\"", dev = "sys_platform == \"win32\""} + +[[package]] +name = "coverage" +version = "7.8.0" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "coverage-7.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2931f66991175369859b5fd58529cd4b73582461877ecfd859b6549869287ffe"}, + {file = "coverage-7.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52a523153c568d2c0ef8826f6cc23031dc86cffb8c6aeab92c4ff776e7951b28"}, + {file = "coverage-7.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c8a5c139aae4c35cbd7cadca1df02ea8cf28a911534fc1b0456acb0b14234f3"}, + {file = "coverage-7.8.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a26c0c795c3e0b63ec7da6efded5f0bc856d7c0b24b2ac84b4d1d7bc578d676"}, + {file = "coverage-7.8.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:821f7bcbaa84318287115d54becb1915eece6918136c6f91045bb84e2f88739d"}, + {file = "coverage-7.8.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a321c61477ff8ee705b8a5fed370b5710c56b3a52d17b983d9215861e37b642a"}, + {file = "coverage-7.8.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ed2144b8a78f9d94d9515963ed273d620e07846acd5d4b0a642d4849e8d91a0c"}, + {file = "coverage-7.8.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:042e7841a26498fff7a37d6fda770d17519982f5b7d8bf5278d140b67b61095f"}, + {file = "coverage-7.8.0-cp310-cp310-win32.whl", hash = "sha256:f9983d01d7705b2d1f7a95e10bbe4091fabc03a46881a256c2787637b087003f"}, + {file = "coverage-7.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:5a570cd9bd20b85d1a0d7b009aaf6c110b52b5755c17be6962f8ccd65d1dbd23"}, + {file = "coverage-7.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e7ac22a0bb2c7c49f441f7a6d46c9c80d96e56f5a8bc6972529ed43c8b694e27"}, + {file = "coverage-7.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bf13d564d310c156d1c8e53877baf2993fb3073b2fc9f69790ca6a732eb4bfea"}, + {file = "coverage-7.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5761c70c017c1b0d21b0815a920ffb94a670c8d5d409d9b38857874c21f70d7"}, + {file = "coverage-7.8.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5ff52d790c7e1628241ffbcaeb33e07d14b007b6eb00a19320c7b8a7024c040"}, + {file = "coverage-7.8.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d39fc4817fd67b3915256af5dda75fd4ee10621a3d484524487e33416c6f3543"}, + {file = "coverage-7.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b44674870709017e4b4036e3d0d6c17f06a0e6d4436422e0ad29b882c40697d2"}, + {file = "coverage-7.8.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8f99eb72bf27cbb167b636eb1726f590c00e1ad375002230607a844d9e9a2318"}, + {file = "coverage-7.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b571bf5341ba8c6bc02e0baeaf3b061ab993bf372d982ae509807e7f112554e9"}, + {file = "coverage-7.8.0-cp311-cp311-win32.whl", hash = "sha256:e75a2ad7b647fd8046d58c3132d7eaf31b12d8a53c0e4b21fa9c4d23d6ee6d3c"}, + {file = "coverage-7.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:3043ba1c88b2139126fc72cb48574b90e2e0546d4c78b5299317f61b7f718b78"}, + {file = "coverage-7.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bbb5cc845a0292e0c520656d19d7ce40e18d0e19b22cb3e0409135a575bf79fc"}, + {file = "coverage-7.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4dfd9a93db9e78666d178d4f08a5408aa3f2474ad4d0e0378ed5f2ef71640cb6"}, + {file = "coverage-7.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f017a61399f13aa6d1039f75cd467be388d157cd81f1a119b9d9a68ba6f2830d"}, + {file = "coverage-7.8.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0915742f4c82208ebf47a2b154a5334155ed9ef9fe6190674b8a46c2fb89cb05"}, + {file = "coverage-7.8.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a40fcf208e021eb14b0fac6bdb045c0e0cab53105f93ba0d03fd934c956143a"}, + {file = "coverage-7.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a1f406a8e0995d654b2ad87c62caf6befa767885301f3b8f6f73e6f3c31ec3a6"}, + {file = "coverage-7.8.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:77af0f6447a582fdc7de5e06fa3757a3ef87769fbb0fdbdeba78c23049140a47"}, + {file = "coverage-7.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f2d32f95922927186c6dbc8bc60df0d186b6edb828d299ab10898ef3f40052fe"}, + {file = "coverage-7.8.0-cp312-cp312-win32.whl", hash = "sha256:769773614e676f9d8e8a0980dd7740f09a6ea386d0f383db6821df07d0f08545"}, + {file = "coverage-7.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:e5d2b9be5b0693cf21eb4ce0ec8d211efb43966f6657807f6859aab3814f946b"}, + {file = "coverage-7.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ac46d0c2dd5820ce93943a501ac5f6548ea81594777ca585bf002aa8854cacd"}, + {file = "coverage-7.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:771eb7587a0563ca5bb6f622b9ed7f9d07bd08900f7589b4febff05f469bea00"}, + {file = "coverage-7.8.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42421e04069fb2cbcbca5a696c4050b84a43b05392679d4068acbe65449b5c64"}, + {file = "coverage-7.8.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:554fec1199d93ab30adaa751db68acec2b41c5602ac944bb19187cb9a41a8067"}, + {file = "coverage-7.8.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aaeb00761f985007b38cf463b1d160a14a22c34eb3f6a39d9ad6fc27cb73008"}, + {file = "coverage-7.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:581a40c7b94921fffd6457ffe532259813fc68eb2bdda60fa8cc343414ce3733"}, + {file = "coverage-7.8.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f319bae0321bc838e205bf9e5bc28f0a3165f30c203b610f17ab5552cff90323"}, + {file = "coverage-7.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04bfec25a8ef1c5f41f5e7e5c842f6b615599ca8ba8391ec33a9290d9d2db3a3"}, + {file = "coverage-7.8.0-cp313-cp313-win32.whl", hash = "sha256:dd19608788b50eed889e13a5d71d832edc34fc9dfce606f66e8f9f917eef910d"}, + {file = "coverage-7.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:a9abbccd778d98e9c7e85038e35e91e67f5b520776781d9a1e2ee9d400869487"}, + {file = "coverage-7.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:18c5ae6d061ad5b3e7eef4363fb27a0576012a7447af48be6c75b88494c6cf25"}, + {file = "coverage-7.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:95aa6ae391a22bbbce1b77ddac846c98c5473de0372ba5c463480043a07bff42"}, + {file = "coverage-7.8.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e013b07ba1c748dacc2a80e69a46286ff145935f260eb8c72df7185bf048f502"}, + {file = "coverage-7.8.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d766a4f0e5aa1ba056ec3496243150698dc0481902e2b8559314368717be82b1"}, + {file = "coverage-7.8.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad80e6b4a0c3cb6f10f29ae4c60e991f424e6b14219d46f1e7d442b938ee68a4"}, + {file = "coverage-7.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b87eb6fc9e1bb8f98892a2458781348fa37e6925f35bb6ceb9d4afd54ba36c73"}, + {file = "coverage-7.8.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:d1ba00ae33be84066cfbe7361d4e04dec78445b2b88bdb734d0d1cbab916025a"}, + {file = "coverage-7.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f3c38e4e5ccbdc9198aecc766cedbb134b2d89bf64533973678dfcf07effd883"}, + {file = "coverage-7.8.0-cp313-cp313t-win32.whl", hash = "sha256:379fe315e206b14e21db5240f89dc0774bdd3e25c3c58c2c733c99eca96f1ada"}, + {file = "coverage-7.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2e4b6b87bb0c846a9315e3ab4be2d52fac905100565f4b92f02c445c8799e257"}, + {file = "coverage-7.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa260de59dfb143af06dcf30c2be0b200bed2a73737a8a59248fcb9fa601ef0f"}, + {file = "coverage-7.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:96121edfa4c2dfdda409877ea8608dd01de816a4dc4a0523356067b305e4e17a"}, + {file = "coverage-7.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b8af63b9afa1031c0ef05b217faa598f3069148eeee6bb24b79da9012423b82"}, + {file = "coverage-7.8.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89b1f4af0d4afe495cd4787a68e00f30f1d15939f550e869de90a86efa7e0814"}, + {file = "coverage-7.8.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94ec0be97723ae72d63d3aa41961a0b9a6f5a53ff599813c324548d18e3b9e8c"}, + {file = "coverage-7.8.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8a1d96e780bdb2d0cbb297325711701f7c0b6f89199a57f2049e90064c29f6bd"}, + {file = "coverage-7.8.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f1d8a2a57b47142b10374902777e798784abf400a004b14f1b0b9eaf1e528ba4"}, + {file = "coverage-7.8.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cf60dd2696b457b710dd40bf17ad269d5f5457b96442f7f85722bdb16fa6c899"}, + {file = "coverage-7.8.0-cp39-cp39-win32.whl", hash = "sha256:be945402e03de47ba1872cd5236395e0f4ad635526185a930735f66710e1bd3f"}, + {file = "coverage-7.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:90e7fbc6216ecaffa5a880cdc9c77b7418c1dcb166166b78dbc630d07f278cc3"}, + {file = "coverage-7.8.0-pp39.pp310.pp311-none-any.whl", hash = "sha256:b8194fb8e50d556d5849753de991d390c5a1edeeba50f68e3a9253fbd8bf8ccd"}, + {file = "coverage-7.8.0-py3-none-any.whl", hash = "sha256:dbf364b4c5e7bae9250528167dfe40219b62e2d573c854d74be213e1e52069f7"}, + {file = "coverage-7.8.0.tar.gz", hash = "sha256:7a3d62b3b03b4b6fd41a085f3574874cf946cb4604d2b4d3e8dca8cd570ca501"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] + +[[package]] +name = "crosshair-tool" +version = "0.0.86" +description = "Analyze Python code for correctness using symbolic execution." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "crosshair_tool-0.0.86-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:beb62a9b24ad1c2d82a0118d6291b1f870fe46455151d7131975559082f30ef2"}, + {file = "crosshair_tool-0.0.86-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:41d07403dd7d5cd3dddef9bf21c86d358eeb261466551d53c7818c387eec601c"}, + {file = "crosshair_tool-0.0.86-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:654f6cefe2a3b38679b167a487b0b4cc0f721685de08548d9e1fead9817f2367"}, + {file = "crosshair_tool-0.0.86-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:463e57e1dcc79ec618674d0da484411e97f971e3df5ea503ed7d1470e2be2f6d"}, + {file = "crosshair_tool-0.0.86-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb4f8a68a729b1abfd59e1f947401da1ba196c715693763822a1506013694a09"}, + {file = "crosshair_tool-0.0.86-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:932abf48eadb64d458bda7a1dca26c6fb7e0dc29a1ed72d0f6f90d9ae4487315"}, + {file = "crosshair_tool-0.0.86-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5ffaad86ad753aed86b3e4c453cca2db7c511fadf5b852797cfeca98ab448550"}, + {file = "crosshair_tool-0.0.86-cp310-cp310-win32.whl", hash = "sha256:e9505280b9a2c7109373797896b124bcef60c7a0d3dfdb6f398dbb3949e6ecd6"}, + {file = "crosshair_tool-0.0.86-cp310-cp310-win_amd64.whl", hash = "sha256:f8311c31c343c5574dcad0f80057d03710cb3530a65df0b6f694a95d3dac36f6"}, + {file = "crosshair_tool-0.0.86-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:360297b059925f9626509f9eb75ee907adfc993f54370ff58e94e18270767eb0"}, + {file = "crosshair_tool-0.0.86-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:de6b9bca0175f701088699fa4f568b54d8401c4dd2cbec471c5b14a785d1815c"}, + {file = "crosshair_tool-0.0.86-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:372c7f62b9af36da178dd878cdf3c03bc330e3ffffa03e6fadda4a1edd0956ee"}, + {file = "crosshair_tool-0.0.86-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:737fa92e04426545f825ddbf1de68504b58978c4daea6d27755b9ea163502354"}, + {file = "crosshair_tool-0.0.86-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54dfd933456ed4251d6365f2a57614a7a4a6452e2abf35881c9e77d369f13e14"}, + {file = "crosshair_tool-0.0.86-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:cb0e34d620ff69fa28587edb2f140a4b77a2b9dcd68ad11295624cdf646710b2"}, + {file = "crosshair_tool-0.0.86-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4796b0cfda5f8b81c0f06635640c51faf4e91dcce343b32ccc8d9f6075adaf63"}, + {file = "crosshair_tool-0.0.86-cp311-cp311-win32.whl", hash = "sha256:63338585fa8c3ead2204529f58dadde2b2289ef887b7764549773bd87dcb014e"}, + {file = "crosshair_tool-0.0.86-cp311-cp311-win_amd64.whl", hash = "sha256:b91da9e244a7f22da1bace586a9b50fe84f54b3837145a94184b156d0742e897"}, + {file = "crosshair_tool-0.0.86-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e810038191d53231b6529dd0b2302aef595b18b45bd0921453068770bed99f3f"}, + {file = "crosshair_tool-0.0.86-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1afe1cf96a8fbe6461a4226b2a49681c067289652ea8591a101e785f84719829"}, + {file = "crosshair_tool-0.0.86-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:63b05b609f483f335f86da77c239c2f6252b77938b914d172bfabd89f2abee19"}, + {file = "crosshair_tool-0.0.86-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f7a5f2d1704cb3020404fa7e9362a5a749a7ea069f6d6fe2f8940418eccedd26"}, + {file = "crosshair_tool-0.0.86-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b97e4bc405e71c306cc725296ddeb7879d41515e1c5bc7e2c8186cdc258e9cb"}, + {file = "crosshair_tool-0.0.86-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:119d2ce5cebcf22e19031a8c90a0b1c49982c9a9fb061cea166ce0738b269677"}, + {file = "crosshair_tool-0.0.86-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:180ff02fd451e4340654e372a317bc24ee82222f7b2d53bd496729b64c076410"}, + {file = "crosshair_tool-0.0.86-cp312-cp312-win32.whl", hash = "sha256:8e2ba3c0027a9fc4481b1bdc3e8e6942f6c3497c5e340812f902a787bdb918bd"}, + {file = "crosshair_tool-0.0.86-cp312-cp312-win_amd64.whl", hash = "sha256:8d43c4d90f867e28e9795dbc73fd6cf87d67adbf2845b8ab682a3b68418c35c3"}, + {file = "crosshair_tool-0.0.86-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c3f4614bb7af5bc91b5b8d292fa4e0828822aabd6f87fc047f027fddccf37638"}, + {file = "crosshair_tool-0.0.86-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ce41044f349c35d5db0eef9d1f6d5d59d30c3c2686b71a287a271c9f38007e9"}, + {file = "crosshair_tool-0.0.86-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9a1cbf944d053d8948d244127a2e89a8a9f2e2b2767aecf9c29572f1c26cedab"}, + {file = "crosshair_tool-0.0.86-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d2fa3d74d49cf8cb45e2372e4799e6cd34391fa7a56ba696e5d547fb283f8589"}, + {file = "crosshair_tool-0.0.86-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4345112fbf971e2bc37fe0b5c473dcb2cd473a6566a29980ca7e1df9c6e1a6c"}, + {file = "crosshair_tool-0.0.86-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cfa42ead7c64e0777778c53ab6f1f5fe2a721fd7938a01f8028a484fec4e138d"}, + {file = "crosshair_tool-0.0.86-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7c15ca03155063d1cd2af2d947d258e5d448c19c14fc3d0a7a3779be7ee4c5fc"}, + {file = "crosshair_tool-0.0.86-cp313-cp313-win32.whl", hash = "sha256:9730b52cee1724e4f9d52360c1f4fc3c2ee63e454f29b13ebec35233a312488f"}, + {file = "crosshair_tool-0.0.86-cp313-cp313-win_amd64.whl", hash = "sha256:4912a41486e9cff8383436d450c0b3476d28ccfd0f348213c4b7e79bb4295ed7"}, + {file = "crosshair_tool-0.0.86-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:74b65d406ad90d65bd209820b6a466cd32a1448182467d0f80a8f5a54546364d"}, + {file = "crosshair_tool-0.0.86-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:dda5f72ebedfdaf578faac34574438304168f937c43bf1810f0607ae9fa9e547"}, + {file = "crosshair_tool-0.0.86-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:08d97eeb30711f1e65ba2fc113424d987be8e96ccb1d828b12911943cabc6d3e"}, + {file = "crosshair_tool-0.0.86-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:726828907c23523fa1c44696df2275b789cb70990f8b0f3974324a91a5406763"}, + {file = "crosshair_tool-0.0.86-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb975dbe3aa7e943fd03cd56b9d066d14bad9beead94ef42e491f7be031bc2ab"}, + {file = "crosshair_tool-0.0.86-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:36b6f10564226cc7e552779ee0685665777ce184036fbb90800d184a264a3881"}, + {file = "crosshair_tool-0.0.86-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c6ab85f9a4f107f74ed25e3cfa531e723b09d75c1b9e8702220a243211364586"}, + {file = "crosshair_tool-0.0.86-cp38-cp38-win32.whl", hash = "sha256:3a648ba34227b9cc792c6cbe7e91abe5525c0646ccfe184a00d0e64210de944e"}, + {file = "crosshair_tool-0.0.86-cp38-cp38-win_amd64.whl", hash = "sha256:f6168f8aad08e3229bbba78eadb0512a1f52c520ce7cebcf3f7a62875749759f"}, + {file = "crosshair_tool-0.0.86-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f67f13ea3a201152005c90860dc5b357fa64e68cff469aeb7067c6dc233f567f"}, + {file = "crosshair_tool-0.0.86-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9c6dbb13c1b2e630711d6fb74fc19bda240fad71060e1cea97cd9739568454f7"}, + {file = "crosshair_tool-0.0.86-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e436e6ebda9b7cf1a95c45f50e6bc33aba78b4d8deb0f8568559c9c0ebc6343c"}, + {file = "crosshair_tool-0.0.86-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e909e29e1b558332a7a832b6afffdb940fff0e4bc3c23975b50326486fb4066a"}, + {file = "crosshair_tool-0.0.86-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50bf372072f7ecc54e7f48eebf752f82bcab3c9d1795d24f04b619e38d935036"}, + {file = "crosshair_tool-0.0.86-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:555b277833daac96ccfbe7cb1ae4b92448003d4e6bf24765400998631b700a81"}, + {file = "crosshair_tool-0.0.86-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a79c81c8ec7a20c8cf38dcec89ba3815c51f7790a745ec518258d338d5115831"}, + {file = "crosshair_tool-0.0.86-cp39-cp39-win32.whl", hash = "sha256:0aba97fec3ae5ba190d8b5523cb0224b952be13a1f4286d9d20e52509b62109a"}, + {file = "crosshair_tool-0.0.86-cp39-cp39-win_amd64.whl", hash = "sha256:5e4975978697cb9a3d0adc02e5e262462136289bc779472ee7378dd1446084f7"}, + {file = "crosshair_tool-0.0.86.tar.gz", hash = "sha256:e2d0477d1b87c12c2b8d5a468cb43f783ef0a4e23ce89c89e5185ca59fd9f9a7"}, +] + +[package.dependencies] +importlib_metadata = ">=4.0.0" +packaging = "*" +pygls = ">=1.0.0" +typeshed-client = ">=2.0.5" +typing_extensions = ">=3.10.0" +typing-inspect = ">=0.7.1" +z3-solver = ">=4.13.0.0" + +[package.extras] +dev = ["autodocsumm (>=0.2.2,<1)", "black (==22.3.0)", "deal (>=4.13.0)", "icontract (>=2.4.0)", "isort (==5.11.5)", "mypy (==0.990)", "numpy (==1.23.4) ; python_version < \"3.12\"", "numpy (==1.26.0) ; python_version == \"3.12\"", "numpy (==2.0.1) ; python_version >= \"3.13\"", "pre-commit (>=2.20,<3.0)", "pytest", "pytest-xdist", "setuptools", "sphinx (>=3.4.3)", "sphinx-rtd-theme (>=0.5.1)", "wheel"] + +[[package]] +name = "decorator" +version = "5.2.1" +description = "Decorators for Humans" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a"}, + {file = "decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360"}, +] + +[[package]] +name = "dill" +version = "0.4.0" +description = "serialize all of Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "dill-0.4.0-py3-none-any.whl", hash = "sha256:44f54bf6412c2c8464c14e8243eb163690a9800dbe2c367330883b19c7561049"}, + {file = "dill-0.4.0.tar.gz", hash = "sha256:0633f1d2df477324f53a895b02c901fb961bdbf65a17122586ea7019292cbcf0"}, +] + +[package.extras] +graph = ["objgraph (>=1.7.2)"] +profile = ["gprof2dot (>=2022.7.29)"] + +[[package]] +name = "distlib" +version = "0.3.9" +description = "Distribution utilities" +optional = false +python-versions = "*" +groups = ["dev"] +files = [ + {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, + {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, +] + +[[package]] +name = "distro" +version = "1.9.0" +description = "Distro - an OS platform information API" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, + {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, +] + +[[package]] +name = "editor" +version = "1.6.6" +description = "🖋 Open the default text editor 🖋" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "editor-1.6.6-py3-none-any.whl", hash = "sha256:e818e6913f26c2a81eadef503a2741d7cca7f235d20e217274a009ecd5a74abf"}, + {file = "editor-1.6.6.tar.gz", hash = "sha256:bb6989e872638cd119db9a4fce284cd8e13c553886a1c044c6b8d8a160c871f8"}, +] + +[package.dependencies] +runs = "*" +xmod = "*" + +[[package]] +name = "exceptiongroup" +version = "1.3.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +groups = ["main", "dev"] +markers = "python_version < \"3.11\"" +files = [ + {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}, + {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.13\""} + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "executing" +version = "2.2.0" +description = "Get the currently executing AST node of a frame, and other information" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "executing-2.2.0-py2.py3-none-any.whl", hash = "sha256:11387150cad388d62750327a53d3339fad4888b39a6fe233c3afbb54ecffd3aa"}, + {file = "executing-2.2.0.tar.gz", hash = "sha256:5d108c028108fe2551d1a7b2e8b713341e2cb4fc0aa7dcf966fa4327a5226755"}, +] + +[package.extras] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich ; python_version >= \"3.11\""] + +[[package]] +name = "filelock" +version = "3.18.0" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de"}, + {file = "filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.10)", "diff-cover (>=9.2.1)", "pytest (>=8.3.4)", "pytest-asyncio (>=0.25.2)", "pytest-cov (>=6)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.28.1)"] +typing = ["typing-extensions (>=4.12.2) ; python_version < \"3.11\""] + +[[package]] +name = "gitdb" +version = "4.0.12" +description = "Git Object Database" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "gitdb-4.0.12-py3-none-any.whl", hash = "sha256:67073e15955400952c6565cc3e707c554a4eea2e428946f7a4c162fab9bd9bcf"}, + {file = "gitdb-4.0.12.tar.gz", hash = "sha256:5ef71f855d191a3326fcfbc0d5da835f26b13fbcba60c32c21091c349ffdb571"}, +] + +[package.dependencies] +smmap = ">=3.0.1,<6" + +[[package]] +name = "gitpython" +version = "3.1.44" +description = "GitPython is a Python library used to interact with Git repositories" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "GitPython-3.1.44-py3-none-any.whl", hash = "sha256:9e0e10cda9bed1ee64bc9a6de50e7e38a9c9943241cd7f585f6df3ed28011110"}, + {file = "gitpython-3.1.44.tar.gz", hash = "sha256:c87e30b26253bf5418b01b0660f818967f3c503193838337fe5e573331249269"}, +] + +[package.dependencies] +gitdb = ">=4.0.1,<5" + +[package.extras] +doc = ["sphinx (>=7.1.2,<7.2)", "sphinx-autodoc-typehints", "sphinx_rtd_theme"] +test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock ; python_version < \"3.8\"", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions ; python_version < \"3.11\""] + +[[package]] +name = "humanize" +version = "4.12.3" +description = "Python humanize utilities" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "humanize-4.12.3-py3-none-any.whl", hash = "sha256:2cbf6370af06568fa6d2da77c86edb7886f3160ecd19ee1ffef07979efc597f6"}, + {file = "humanize-4.12.3.tar.gz", hash = "sha256:8430be3a615106fdfceb0b2c1b41c4c98c6b0fc5cc59663a5539b111dd325fb0"}, +] + +[package.extras] +tests = ["freezegun", "pytest", "pytest-cov"] + +[[package]] +name = "identify" +version = "2.6.10" +description = "File identification library for Python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "identify-2.6.10-py2.py3-none-any.whl", hash = "sha256:5f34248f54136beed1a7ba6a6b5c4b6cf21ff495aac7c359e1ef831ae3b8ab25"}, + {file = "identify-2.6.10.tar.gz", hash = "sha256:45e92fd704f3da71cc3880036633f48b4b7265fd4de2b57627cb157216eb7eb8"}, +] + +[package.extras] +license = ["ukkonen"] + +[[package]] +name = "idna" +version = "3.10" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "importlib-metadata" +version = "8.7.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd"}, + {file = "importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000"}, +] + +[package.dependencies] +zipp = ">=3.20" + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +perf = ["ipython"] +test = ["flufl.flake8", "importlib_resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] + +[[package]] +name = "importlib-resources" +version = "6.5.2" +description = "Read resources from Python packages" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "importlib_resources-6.5.2-py3-none-any.whl", hash = "sha256:789cfdc3ed28c78b67a06acb8126751ced69a3d5f79c095a98298cd8a760ccec"}, + {file = "importlib_resources-6.5.2.tar.gz", hash = "sha256:185f87adef5bcc288449d98fb4fba07cea78bc036455dd44c5fc4a2fe78fed2c"}, +] + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["jaraco.test (>=5.4)", "pytest (>=6,!=8.1.*)", "zipp (>=3.17)"] +type = ["pytest-mypy"] + +[[package]] +name = "iniconfig" +version = "2.1.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, + {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, +] + +[[package]] +name = "inquirer" +version = "3.4.0" +description = "Collection of common interactive command line user interfaces, based on Inquirer.js" +optional = false +python-versions = ">=3.8.1" +groups = ["main"] +files = [ + {file = "inquirer-3.4.0-py3-none-any.whl", hash = "sha256:bb0ec93c833e4ce7b51b98b1644b0a4d2bb39755c39787f6a504e4fee7a11b60"}, + {file = "inquirer-3.4.0.tar.gz", hash = "sha256:8edc99c076386ee2d2204e5e3653c2488244e82cb197b2d498b3c1b5ffb25d0b"}, +] + +[package.dependencies] +blessed = ">=1.19.0" +editor = ">=1.6.0" +readchar = ">=4.2.0" + +[[package]] +name = "ipython" +version = "8.18.1" +description = "IPython: Productive Interactive Computing" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397"}, + {file = "ipython-8.18.1.tar.gz", hash = "sha256:ca6f079bb33457c66e233e4580ebfc4128855b4cf6370dddd73842a9563e8a27"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} +jedi = ">=0.16" +matplotlib-inline = "*" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} +prompt-toolkit = ">=3.0.41,<3.1.0" +pygments = ">=2.4.0" +stack-data = "*" +traitlets = ">=5" +typing-extensions = {version = "*", markers = "python_version < \"3.10\""} + +[package.extras] +all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio (<0.22)", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +black = ["black"] +doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pickleshare", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio (<0.22)", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +kernel = ["ipykernel"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["ipywidgets", "notebook"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath"] +test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath", "trio"] + +[[package]] +name = "isort" +version = "6.0.1" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.9.0" +groups = ["main"] +files = [ + {file = "isort-6.0.1-py3-none-any.whl", hash = "sha256:2dc5d7f65c9678d94c88dfc29161a320eec67328bc97aad576874cb4be1e9615"}, + {file = "isort-6.0.1.tar.gz", hash = "sha256:1cb5df28dfbc742e490c5e41bad6da41b805b0a8be7bc93cd0fb2a8a890ac450"}, +] + +[package.extras] +colors = ["colorama"] +plugins = ["setuptools"] + +[[package]] +name = "jedi" +version = "0.19.2" +description = "An autocompletion tool for Python that can be used for text editors." +optional = false +python-versions = ">=3.6" +groups = ["main", "dev"] +files = [ + {file = "jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9"}, + {file = "jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0"}, +] + +[package.dependencies] +parso = ">=0.8.4,<0.9.0" + +[package.extras] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["Django", "attrs", "colorama", "docopt", "pytest (<9.0.0)"] + +[[package]] +name = "jinxed" +version = "1.3.0" +description = "Jinxed Terminal Library" +optional = false +python-versions = "*" +groups = ["main"] +markers = "platform_system == \"Windows\"" +files = [ + {file = "jinxed-1.3.0-py2.py3-none-any.whl", hash = "sha256:b993189f39dc2d7504d802152671535b06d380b26d78070559551cbf92df4fc5"}, + {file = "jinxed-1.3.0.tar.gz", hash = "sha256:1593124b18a41b7a3da3b078471442e51dbad3d77b4d4f2b0c26ab6f7d660dbf"}, +] + +[package.dependencies] +ansicon = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "junitparser" +version = "3.2.0" +description = "Manipulates JUnit/xUnit Result XML files" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "junitparser-3.2.0-py2.py3-none-any.whl", hash = "sha256:e14fdc0a999edfc15889b637390e8ef6ca09a49532416d3bd562857d42d4b96d"}, + {file = "junitparser-3.2.0.tar.gz", hash = "sha256:b05e89c27e7b74b3c563a078d6e055d95cf397444f8f689b0ca616ebda0b3c65"}, +] + +[[package]] +name = "libcst" +version = "1.7.0" +description = "A concrete syntax tree with AST-like properties for Python 3.0 through 3.13 programs." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "libcst-1.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:340054c57abcd42953248af18ed278be651a03b1c2a1616f7e1f1ef90b6018ce"}, + {file = "libcst-1.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdae6e632d222d8db7cb98d7cecb45597c21b8e3841d0c98d4fca79c49dad04b"}, + {file = "libcst-1.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71a8f59f3472fe8c0f6e2fad457825ea2ccad8c4c713cca55a91ff2cbfa9bc03"}, + {file = "libcst-1.7.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1560598f5c56681adbd32f4b08e9cffcd45a021921d1d784370a7d4d9a2fac11"}, + {file = "libcst-1.7.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9cd5ab15b12a37f0e9994d8847d5670da936a93d98672c442a956fab34ea0c15"}, + {file = "libcst-1.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5d5ba9314569865effd5baff3a58ceb2cced52228e181824759c68486a7ec8f4"}, + {file = "libcst-1.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:3d2ec10015e86a4402c3d2084ede6c7c9268faea1ecb99592fe9e291c515aaa2"}, + {file = "libcst-1.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8f6e693281d6e9a62414205fb300ec228ddc902ca9cb965a09f11561dc10aa94"}, + {file = "libcst-1.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e635eadb6043d5f967450af27125811c6ccc7eeb4d8c5fd4f1bece9d96418781"}, + {file = "libcst-1.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c568e14d29489f09faf4915af18235f805d5aa60fa194023b4fadf3209f0c94"}, + {file = "libcst-1.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9add619a825d6f176774110d79dc3137f353a236c1e3bcd6e063ca6d93d6e0ae"}, + {file = "libcst-1.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:57a6bcfc8ca8a0bb9e89a2dbf63ee8f0c7e8353a130528dcb47c9e59c2dc8c94"}, + {file = "libcst-1.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5e22738ec2855803f8242e6bf78057389d10f8954db34bf7079c82abab1b8b95"}, + {file = "libcst-1.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:fa519d4391326329f37860c2f2aaf80cb11a6122d14afa2f4f00dde6fcfa7ae4"}, + {file = "libcst-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b52692a28d0d958ebfabcf8bfce5fcf2c8582967310d35e6111a6e2d4db96659"}, + {file = "libcst-1.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61bfc90c8a4594296f8b68702f494dfdfec6e745a4abc0cfa8069d7f22061424"}, + {file = "libcst-1.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9370c23a3f609280c3f2296d61d34dd32afd7a1c9b19e4e29cc35cb2e2544363"}, + {file = "libcst-1.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e50e6960ecc3ed67f39fec63aa329e772d5d27f8e2334e30f19a94aa14489f1"}, + {file = "libcst-1.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ca4e91aa854758040fa6fe7036fbe7f90a36a7d283fa1df8587b6f73084fc997"}, + {file = "libcst-1.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d894c48f682b0061fdb2c983d5e64c30334db6ce0783560dbbb9df0163179c0c"}, + {file = "libcst-1.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:14e5c1d427c33d50df75be6bc999a7b2d7c6b7840e2361a18a6f354db50cb18e"}, + {file = "libcst-1.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:93417d36c2a1b70d651d0e970ff73339e8dcd64d341672b68823fa0039665022"}, + {file = "libcst-1.7.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6523731bfbdbc045ff8649130fe14a46b31ad6925f67acdc0e0d80a0c61719fd"}, + {file = "libcst-1.7.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a252fa03ea00986f03100379f11e15d381103a09667900fb0fa2076cec19081a"}, + {file = "libcst-1.7.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09a5530b40a15dbe6fac842ef2ad87ad561760779380ccf3ade6850854d81406"}, + {file = "libcst-1.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0456381c939169c4f11caecdb30f7aca6f234640731f8f965849c1631930536b"}, + {file = "libcst-1.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c8d6176a667d2db0132d133dad6bbf965f915f3071559342ca2cdbbec537ed12"}, + {file = "libcst-1.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:6137fe549bfbb017283c3cf85419eb0dfaa20a211ad6d525538a2494e248a84b"}, + {file = "libcst-1.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3923a341a787c1f454909e726a6213dd59c3db26c6e56d0a1fc4f2f7e96b45d7"}, + {file = "libcst-1.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7d9a796c2f3d5b71dd06b7578e8d1fb1c031d2eb8d59e7b40e288752ae1b210"}, + {file = "libcst-1.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:932a4c4508bd4cf5248c99b7218bb86af97d87fefa2bdab7ea8a0c28c270724a"}, + {file = "libcst-1.7.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d12ffe199ff677a37abfb6b21aba1407eb02246dc7e6bcaf4f8e24a195ec4ad6"}, + {file = "libcst-1.7.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:81036e820249937608db7e72d0799180122d40d76d0c0414c454f8aa2ffa9c51"}, + {file = "libcst-1.7.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:94acd51ea1206460c20dea764c59222e62c45ae8a486f22024f063d11a7bca88"}, + {file = "libcst-1.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:c3445dce908fd4971ce9bb5fef5742e26c984027676e3dcf24875fbed1ff7e4c"}, + {file = "libcst-1.7.0.tar.gz", hash = "sha256:a63f44ffa81292f183656234c7f2848653ff45c17d867db83c9335119e28aafa"}, +] + +[package.dependencies] +pyyaml = ">=5.2" + +[package.extras] +dev = ["jupyter (>=1.0.0)", "libcst[dev-without-jupyter]", "nbsphinx (>=0.4.2)"] +dev-without-jupyter = ["Sphinx (>=5.1.1)", "black (==24.8.0)", "build (>=0.10.0)", "coverage[toml] (>=4.5.4)", "fixit (==2.1.0)", "flake8 (==7.1.2)", "hypothesis (>=4.36.0)", "hypothesmith (>=0.0.4)", "jinja2 (==3.1.5)", "maturin (>=1.7.0,<1.8)", "prompt-toolkit (>=2.0.9)", "pyre-check (==0.9.18) ; platform_system != \"Windows\"", "setuptools-rust (>=1.5.2)", "setuptools_scm (>=6.0.1)", "slotscheck (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "ufmt (==2.8.0)", "usort (==1.0.8.post1)"] + +[[package]] +name = "line-profiler" +version = "4.2.0" +description = "Line-by-line profiler" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "line_profiler-4.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:70e2503f52ee6464ac908b578d73ad6dae21d689c95f2252fee97d7aa8426693"}, + {file = "line_profiler-4.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b6047c8748d7a2453522eaea3edc8d9febc658b57f2ea189c03fe3d5e34595b5"}, + {file = "line_profiler-4.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0048360a2afbd92c0b423f8207af1f6581d85c064c0340b0d02c63c8e0c8292c"}, + {file = "line_profiler-4.2.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e71fa1c85f21e3de575c7c617fd4eb607b052cc7b4354035fecc18f3f2a4317"}, + {file = "line_profiler-4.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5ec99d48cffdf36efbcd7297e81cc12bf2c0a7e0627a567f3ab0347e607b242"}, + {file = "line_profiler-4.2.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:bfc9582f19a64283434fc6a3fd41a3a51d59e3cce2dc7adc5fe859fcae67e746"}, + {file = "line_profiler-4.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2b5dcfb3205e18c98c94388065f1604dc9d709df4dd62300ff8c5bbbd9bd163f"}, + {file = "line_profiler-4.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:4999eb1db5d52cb34a5293941986eea4357fb9fe3305a160694e5f13c9ec4008"}, + {file = "line_profiler-4.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:402406f200401a496fb93e1788387bf2d87c921d7f8f7e5f88324ac9efb672ac"}, + {file = "line_profiler-4.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d9a0b5696f1ad42bb31e90706e5d57845833483d1d07f092b66b4799847a2f76"}, + {file = "line_profiler-4.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f2f950fa19f797a9ab55c8d7b33a7cdd95c396cf124c3adbc1cf93a1978d2767"}, + {file = "line_profiler-4.2.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d09fd8f580716da5a0b9a7f544a306b468f38eee28ba2465c56e0aa5d7d1822"}, + {file = "line_profiler-4.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:628f585960c6538873a9760d112db20b76b6035d3eaad7711a8bd80fa909d7ea"}, + {file = "line_profiler-4.2.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:63ed929c7d41e230cc1c4838c25bbee165d7f2fa974ca28d730ea69e501fc44d"}, + {file = "line_profiler-4.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6bda74fc206ba375396068526e9e7b5466a24c7e54cbd6ee1c98c1e0d1f0fd99"}, + {file = "line_profiler-4.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:eaf6eb827c202c07b8b8d82363bb039a6747fbf84ca04279495a91b7da3b773f"}, + {file = "line_profiler-4.2.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:82d29887f1226938a86db30ca3a125b1bde89913768a2a486fa14d0d3f8c0d91"}, + {file = "line_profiler-4.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bf60706467203db0a872b93775a5e5902a02b11d79f8f75a8f8ef381b75789e1"}, + {file = "line_profiler-4.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:934fd964eed9bed87e3c01e8871ee6bdc54d10edf7bf14d20e72f7be03567ae3"}, + {file = "line_profiler-4.2.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d623e5b37fa48c7ad0c29b4353244346a5dcb1bf75e117e19400b8ffd3393d1b"}, + {file = "line_profiler-4.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efcdbed9ba9003792d8bfd56c11bb3d4e29ad7e0d2f583e1c774de73bbf02933"}, + {file = "line_profiler-4.2.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:df0149c191a95f2dbc93155b2f9faaee563362d61e78b8986cdb67babe017cdc"}, + {file = "line_profiler-4.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5e3a1ca491a8606ed674882b59354087f6e9ab6b94aa6d5fa5d565c6f2acc7a8"}, + {file = "line_profiler-4.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:a85ff57d4ef9d899ca12d6b0883c3cab1786388b29d2fb5f30f909e70bb9a691"}, + {file = "line_profiler-4.2.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:49db0804e9e330076f0b048d63fd3206331ca0104dd549f61b2466df0f10ecda"}, + {file = "line_profiler-4.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2e983ed4fb2cd68bb8896f6bad7f29ddf9112b978f700448510477bc9fde18db"}, + {file = "line_profiler-4.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d6b27c5880b29369e6bebfe434a16c60cbcd290aa4c384ac612e5777737893f8"}, + {file = "line_profiler-4.2.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2584dc0af3107efa60bd2ccaa7233dca98e3dff4b11138c0ac30355bc87f1a"}, + {file = "line_profiler-4.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6767d8b922a7368b6917a47c164c3d96d48b82109ad961ef518e78800947cef4"}, + {file = "line_profiler-4.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:3137672a769717be4da3a6e006c3bd7b66ad4a341ba89ee749ef96c158a15b22"}, + {file = "line_profiler-4.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:727e970d358616a1a33d51d696efec932a5ef7730785df62658bd7e74aa58951"}, + {file = "line_profiler-4.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8dd674be39b27920aeaaacb12df1f7e789cd60238972bf7caf0f352ce97bb502"}, + {file = "line_profiler-4.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9a0cbb5385021a793acb25bed1bcc1fe3f522092566e4f8dee71e5acde699deb"}, + {file = "line_profiler-4.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e1240667d49d147b1f4b6e966fc9a0223fd58b126f0ee58c8b7a82dfee39ec07"}, + {file = "line_profiler-4.2.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0a52df987b8d3a9b5ffb51f93171d2f4ba82cf8c384256bc8d13cbdbb3d3172"}, + {file = "line_profiler-4.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4691399961e64646a1293831da4dcaa5908588a41d845f55ac708f7da600a4f"}, + {file = "line_profiler-4.2.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5c030eaf3f44c3dadf4c8d92bc9994afac2ca4d3ae90acd46272910de9f62a89"}, + {file = "line_profiler-4.2.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:91c8078bd7d6c86ca148074bc1583ff4028165153ad8f5f84c6d5ed33d4a150e"}, + {file = "line_profiler-4.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:9f63aa68533710dcdad665e641feff7392609299d54c399599768bcbbd3435eb"}, + {file = "line_profiler-4.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:80dd7e7990e346ed8ef32702f8fe3c60abdb0de95980d422c02f1ef30a6a828d"}, + {file = "line_profiler-4.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:31e1057448cfdb2678756163135b43bbbf698b2a1f7c88eb807f3fb2cdc2e3e7"}, + {file = "line_profiler-4.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3ea02ccd7dc97b5777c032297991b5637130fbd07fa2c6a1f89f248aa12ef71b"}, + {file = "line_profiler-4.2.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4bbbc4e8545f0c187cfed7c323b8cc1121d28001b222b26f6bc3bc554ba82d4f"}, + {file = "line_profiler-4.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d76d37c1084210363261d08eaabd30310eefb707ba8ab736a61e43930afaf47"}, + {file = "line_profiler-4.2.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:22f84c3dbb807a26c115626bee19cb5f93683fa08c8d3836ec30af06fa9eb5c3"}, + {file = "line_profiler-4.2.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e6131bcd5888371b61e05631555592feba12e73c96596b8d26ffe03cea0fc088"}, + {file = "line_profiler-4.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:fb58aa12cf64f0176d84bc4033bb0701fe8075d5da57149839ef895d961bbdad"}, + {file = "line_profiler-4.2.0.tar.gz", hash = "sha256:09e10f25f876514380b3faee6de93fb0c228abba85820ba1a591ddb3eb451a96"}, +] + +[package.extras] +all = ["Cython (>=3.0.3)", "IPython (>=7.14.0) ; python_version < \"3.7.0\" and python_version >= \"3.6.0\"", "IPython (>=7.18.0) ; python_version < \"3.8.0\" and python_version >= \"3.7.0\"", "IPython (>=8.12.2) ; python_version < \"3.9.0\" and python_version >= \"3.8.0\"", "IPython (>=8.14.0) ; python_version < \"4.0.0\" and python_version >= \"3.9.0\"", "cibuildwheel (>=2.11.2) ; python_version < \"4.0\" and python_version >= \"3.11\"", "cibuildwheel (>=2.11.2) ; python_version == \"3.10\"", "cibuildwheel (>=2.11.2) ; python_version == \"3.7\"", "cibuildwheel (>=2.11.2) ; python_version == \"3.8\"", "cibuildwheel (>=2.11.2) ; python_version == \"3.9\"", "cibuildwheel (>=2.8.1) ; python_version == \"3.6\"", "cmake (>=3.21.2)", "coverage[toml] (>=6.1.1) ; python_version == \"3.6\"", "coverage[toml] (>=6.5.0) ; python_version < \"3.12\" and python_version >= \"3.10\"", "coverage[toml] (>=6.5.0) ; python_version == \"3.7\"", "coverage[toml] (>=6.5.0) ; python_version == \"3.8\"", "coverage[toml] (>=6.5.0) ; python_version == \"3.9\"", "coverage[toml] (>=7.3.0) ; python_version < \"4.0\" and python_version >= \"3.12\"", "ninja (>=1.10.2)", "pytest (>=6.2.5) ; python_version == \"3.6\"", "pytest (>=7.4.4) ; python_version < \"4.0\" and python_version >= \"3.13\"", "pytest (>=7.4.4) ; python_version == \"3.10\"", "pytest (>=7.4.4) ; python_version == \"3.11\"", "pytest (>=7.4.4) ; python_version == \"3.12\"", "pytest (>=7.4.4) ; python_version == \"3.7\"", "pytest (>=7.4.4) ; python_version == \"3.8\"", "pytest (>=7.4.4) ; python_version == \"3.9\"", "pytest-cov (>=3.0.0) ; python_version >= \"3.6.0\"", "rich (>=12.3.0)", "scikit-build (>=0.11.1)", "setuptools (>=41.0.1) ; python_version < \"3.8\" and python_version >= \"3.6\"", "setuptools (>=68.2.2) ; python_version < \"4.0\" and python_version >= \"3.8\"", "ubelt (>=1.3.4)", "xdoctest (>=1.1.3)"] +all-strict = ["Cython (==3.0.3)", "IPython (==7.14.0) ; python_version < \"3.7.0\" and python_version >= \"3.6.0\"", "IPython (==7.18.0) ; python_version < \"3.8.0\" and python_version >= \"3.7.0\"", "IPython (==8.12.2) ; python_version < \"3.9.0\" and python_version >= \"3.8.0\"", "IPython (==8.14.0) ; python_version < \"4.0.0\" and python_version >= \"3.9.0\"", "cibuildwheel (==2.11.2) ; python_version < \"4.0\" and python_version >= \"3.11\"", "cibuildwheel (==2.11.2) ; python_version == \"3.10\"", "cibuildwheel (==2.11.2) ; python_version == \"3.7\"", "cibuildwheel (==2.11.2) ; python_version == \"3.8\"", "cibuildwheel (==2.11.2) ; python_version == \"3.9\"", "cibuildwheel (==2.8.1) ; python_version == \"3.6\"", "cmake (==3.21.2)", "coverage[toml] (==6.1.1) ; python_version == \"3.6\"", "coverage[toml] (==6.5.0) ; python_version < \"3.12\" and python_version >= \"3.10\"", "coverage[toml] (==6.5.0) ; python_version == \"3.7\"", "coverage[toml] (==6.5.0) ; python_version == \"3.8\"", "coverage[toml] (==6.5.0) ; python_version == \"3.9\"", "coverage[toml] (==7.3.0) ; python_version < \"4.0\" and python_version >= \"3.12\"", "ninja (==1.10.2)", "pytest (==6.2.5) ; python_version == \"3.6\"", "pytest (==7.4.4) ; python_version < \"4.0\" and python_version >= \"3.13\"", "pytest (==7.4.4) ; python_version == \"3.10\"", "pytest (==7.4.4) ; python_version == \"3.11\"", "pytest (==7.4.4) ; python_version == \"3.12\"", "pytest (==7.4.4) ; python_version == \"3.7\"", "pytest (==7.4.4) ; python_version == \"3.8\"", "pytest (==7.4.4) ; python_version == \"3.9\"", "pytest-cov (==3.0.0) ; python_version >= \"3.6.0\"", "rich (==12.3.0)", "scikit-build (==0.11.1)", "setuptools (==41.0.1) ; python_version < \"3.8\" and python_version >= \"3.6\"", "setuptools (==68.2.2) ; python_version < \"4.0\" and python_version >= \"3.8\"", "ubelt (==1.3.4)", "xdoctest (==1.1.3)"] +ipython = ["IPython (>=7.14.0) ; python_version < \"3.7.0\" and python_version >= \"3.6.0\"", "IPython (>=7.18.0) ; python_version < \"3.8.0\" and python_version >= \"3.7.0\"", "IPython (>=8.12.2) ; python_version < \"3.9.0\" and python_version >= \"3.8.0\"", "IPython (>=8.14.0) ; python_version < \"4.0.0\" and python_version >= \"3.9.0\""] +ipython-strict = ["IPython (==7.14.0) ; python_version < \"3.7.0\" and python_version >= \"3.6.0\"", "IPython (==7.18.0) ; python_version < \"3.8.0\" and python_version >= \"3.7.0\"", "IPython (==8.12.2) ; python_version < \"3.9.0\" and python_version >= \"3.8.0\"", "IPython (==8.14.0) ; python_version < \"4.0.0\" and python_version >= \"3.9.0\""] +optional = ["IPython (>=7.14.0) ; python_version < \"3.7.0\" and python_version >= \"3.6.0\"", "IPython (>=7.18.0) ; python_version < \"3.8.0\" and python_version >= \"3.7.0\"", "IPython (>=8.12.2) ; python_version < \"3.9.0\" and python_version >= \"3.8.0\"", "IPython (>=8.14.0) ; python_version < \"4.0.0\" and python_version >= \"3.9.0\"", "rich (>=12.3.0)"] +optional-strict = ["IPython (==7.14.0) ; python_version < \"3.7.0\" and python_version >= \"3.6.0\"", "IPython (==7.18.0) ; python_version < \"3.8.0\" and python_version >= \"3.7.0\"", "IPython (==8.12.2) ; python_version < \"3.9.0\" and python_version >= \"3.8.0\"", "IPython (==8.14.0) ; python_version < \"4.0.0\" and python_version >= \"3.9.0\"", "rich (==12.3.0)"] +tests = ["coverage[toml] (>=6.1.1) ; python_version == \"3.6\"", "coverage[toml] (>=6.5.0) ; python_version < \"3.12\" and python_version >= \"3.10\"", "coverage[toml] (>=6.5.0) ; python_version == \"3.7\"", "coverage[toml] (>=6.5.0) ; python_version == \"3.8\"", "coverage[toml] (>=6.5.0) ; python_version == \"3.9\"", "coverage[toml] (>=7.3.0) ; python_version < \"4.0\" and python_version >= \"3.12\"", "pytest (>=6.2.5) ; python_version == \"3.6\"", "pytest (>=7.4.4) ; python_version < \"4.0\" and python_version >= \"3.13\"", "pytest (>=7.4.4) ; python_version == \"3.10\"", "pytest (>=7.4.4) ; python_version == \"3.11\"", "pytest (>=7.4.4) ; python_version == \"3.12\"", "pytest (>=7.4.4) ; python_version == \"3.7\"", "pytest (>=7.4.4) ; python_version == \"3.8\"", "pytest (>=7.4.4) ; python_version == \"3.9\"", "pytest-cov (>=3.0.0) ; python_version >= \"3.6.0\"", "ubelt (>=1.3.4)", "xdoctest (>=1.1.3)"] +tests-strict = ["coverage[toml] (==6.1.1) ; python_version == \"3.6\"", "coverage[toml] (==6.5.0) ; python_version < \"3.12\" and python_version >= \"3.10\"", "coverage[toml] (==6.5.0) ; python_version == \"3.7\"", "coverage[toml] (==6.5.0) ; python_version == \"3.8\"", "coverage[toml] (==6.5.0) ; python_version == \"3.9\"", "coverage[toml] (==7.3.0) ; python_version < \"4.0\" and python_version >= \"3.12\"", "pytest (==6.2.5) ; python_version == \"3.6\"", "pytest (==7.4.4) ; python_version < \"4.0\" and python_version >= \"3.13\"", "pytest (==7.4.4) ; python_version == \"3.10\"", "pytest (==7.4.4) ; python_version == \"3.11\"", "pytest (==7.4.4) ; python_version == \"3.12\"", "pytest (==7.4.4) ; python_version == \"3.7\"", "pytest (==7.4.4) ; python_version == \"3.8\"", "pytest (==7.4.4) ; python_version == \"3.9\"", "pytest-cov (==3.0.0) ; python_version >= \"3.6.0\"", "ubelt (==1.3.4)", "xdoctest (==1.1.3)"] + +[[package]] +name = "lsprotocol" +version = "2023.0.1" +description = "Python implementation of the Language Server Protocol." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "lsprotocol-2023.0.1-py3-none-any.whl", hash = "sha256:c75223c9e4af2f24272b14c6375787438279369236cd568f596d4951052a60f2"}, + {file = "lsprotocol-2023.0.1.tar.gz", hash = "sha256:cc5c15130d2403c18b734304339e51242d3018a05c4f7d0f198ad6e0cd21861d"}, +] + +[package.dependencies] +attrs = ">=21.3.0" +cattrs = "!=23.2.1" + +[[package]] +name = "lxml" +version = "5.4.0" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "lxml-5.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e7bc6df34d42322c5289e37e9971d6ed114e3776b45fa879f734bded9d1fea9c"}, + {file = "lxml-5.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6854f8bd8a1536f8a1d9a3655e6354faa6406621cf857dc27b681b69860645c7"}, + {file = "lxml-5.4.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:696ea9e87442467819ac22394ca36cb3d01848dad1be6fac3fb612d3bd5a12cf"}, + {file = "lxml-5.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ef80aeac414f33c24b3815ecd560cee272786c3adfa5f31316d8b349bfade28"}, + {file = "lxml-5.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b9c2754cef6963f3408ab381ea55f47dabc6f78f4b8ebb0f0b25cf1ac1f7609"}, + {file = "lxml-5.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7a62cc23d754bb449d63ff35334acc9f5c02e6dae830d78dab4dd12b78a524f4"}, + {file = "lxml-5.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f82125bc7203c5ae8633a7d5d20bcfdff0ba33e436e4ab0abc026a53a8960b7"}, + {file = "lxml-5.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:b67319b4aef1a6c56576ff544b67a2a6fbd7eaee485b241cabf53115e8908b8f"}, + {file = "lxml-5.4.0-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:a8ef956fce64c8551221f395ba21d0724fed6b9b6242ca4f2f7beb4ce2f41997"}, + {file = "lxml-5.4.0-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:0a01ce7d8479dce84fc03324e3b0c9c90b1ece9a9bb6a1b6c9025e7e4520e78c"}, + {file = "lxml-5.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:91505d3ddebf268bb1588eb0f63821f738d20e1e7f05d3c647a5ca900288760b"}, + {file = "lxml-5.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a3bcdde35d82ff385f4ede021df801b5c4a5bcdfb61ea87caabcebfc4945dc1b"}, + {file = "lxml-5.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:aea7c06667b987787c7d1f5e1dfcd70419b711cdb47d6b4bb4ad4b76777a0563"}, + {file = "lxml-5.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:a7fb111eef4d05909b82152721a59c1b14d0f365e2be4c742a473c5d7372f4f5"}, + {file = "lxml-5.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:43d549b876ce64aa18b2328faff70f5877f8c6dede415f80a2f799d31644d776"}, + {file = "lxml-5.4.0-cp310-cp310-win32.whl", hash = "sha256:75133890e40d229d6c5837b0312abbe5bac1c342452cf0e12523477cd3aa21e7"}, + {file = "lxml-5.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:de5b4e1088523e2b6f730d0509a9a813355b7f5659d70eb4f319c76beea2e250"}, + {file = "lxml-5.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:98a3912194c079ef37e716ed228ae0dcb960992100461b704aea4e93af6b0bb9"}, + {file = "lxml-5.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ea0252b51d296a75f6118ed0d8696888e7403408ad42345d7dfd0d1e93309a7"}, + {file = "lxml-5.4.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b92b69441d1bd39f4940f9eadfa417a25862242ca2c396b406f9272ef09cdcaa"}, + {file = "lxml-5.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20e16c08254b9b6466526bc1828d9370ee6c0d60a4b64836bc3ac2917d1e16df"}, + {file = "lxml-5.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7605c1c32c3d6e8c990dd28a0970a3cbbf1429d5b92279e37fda05fb0c92190e"}, + {file = "lxml-5.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ecf4c4b83f1ab3d5a7ace10bafcb6f11df6156857a3c418244cef41ca9fa3e44"}, + {file = "lxml-5.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cef4feae82709eed352cd7e97ae062ef6ae9c7b5dbe3663f104cd2c0e8d94ba"}, + {file = "lxml-5.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:df53330a3bff250f10472ce96a9af28628ff1f4efc51ccba351a8820bca2a8ba"}, + {file = "lxml-5.4.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:aefe1a7cb852fa61150fcb21a8c8fcea7b58c4cb11fbe59c97a0a4b31cae3c8c"}, + {file = "lxml-5.4.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ef5a7178fcc73b7d8c07229e89f8eb45b2908a9238eb90dcfc46571ccf0383b8"}, + {file = "lxml-5.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d2ed1b3cb9ff1c10e6e8b00941bb2e5bb568b307bfc6b17dffbbe8be5eecba86"}, + {file = "lxml-5.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:72ac9762a9f8ce74c9eed4a4e74306f2f18613a6b71fa065495a67ac227b3056"}, + {file = "lxml-5.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f5cb182f6396706dc6cc1896dd02b1c889d644c081b0cdec38747573db88a7d7"}, + {file = "lxml-5.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:3a3178b4873df8ef9457a4875703488eb1622632a9cee6d76464b60e90adbfcd"}, + {file = "lxml-5.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e094ec83694b59d263802ed03a8384594fcce477ce484b0cbcd0008a211ca751"}, + {file = "lxml-5.4.0-cp311-cp311-win32.whl", hash = "sha256:4329422de653cdb2b72afa39b0aa04252fca9071550044904b2e7036d9d97fe4"}, + {file = "lxml-5.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd3be6481ef54b8cfd0e1e953323b7aa9d9789b94842d0e5b142ef4bb7999539"}, + {file = "lxml-5.4.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b5aff6f3e818e6bdbbb38e5967520f174b18f539c2b9de867b1e7fde6f8d95a4"}, + {file = "lxml-5.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:942a5d73f739ad7c452bf739a62a0f83e2578afd6b8e5406308731f4ce78b16d"}, + {file = "lxml-5.4.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:460508a4b07364d6abf53acaa0a90b6d370fafde5693ef37602566613a9b0779"}, + {file = "lxml-5.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:529024ab3a505fed78fe3cc5ddc079464e709f6c892733e3f5842007cec8ac6e"}, + {file = "lxml-5.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ca56ebc2c474e8f3d5761debfd9283b8b18c76c4fc0967b74aeafba1f5647f9"}, + {file = "lxml-5.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a81e1196f0a5b4167a8dafe3a66aa67c4addac1b22dc47947abd5d5c7a3f24b5"}, + {file = "lxml-5.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00b8686694423ddae324cf614e1b9659c2edb754de617703c3d29ff568448df5"}, + {file = "lxml-5.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:c5681160758d3f6ac5b4fea370495c48aac0989d6a0f01bb9a72ad8ef5ab75c4"}, + {file = "lxml-5.4.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:2dc191e60425ad70e75a68c9fd90ab284df64d9cd410ba8d2b641c0c45bc006e"}, + {file = "lxml-5.4.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:67f779374c6b9753ae0a0195a892a1c234ce8416e4448fe1e9f34746482070a7"}, + {file = "lxml-5.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:79d5bfa9c1b455336f52343130b2067164040604e41f6dc4d8313867ed540079"}, + {file = "lxml-5.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3d3c30ba1c9b48c68489dc1829a6eede9873f52edca1dda900066542528d6b20"}, + {file = "lxml-5.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1af80c6316ae68aded77e91cd9d80648f7dd40406cef73df841aa3c36f6907c8"}, + {file = "lxml-5.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4d885698f5019abe0de3d352caf9466d5de2baded00a06ef3f1216c1a58ae78f"}, + {file = "lxml-5.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:aea53d51859b6c64e7c51d522c03cc2c48b9b5d6172126854cc7f01aa11f52bc"}, + {file = "lxml-5.4.0-cp312-cp312-win32.whl", hash = "sha256:d90b729fd2732df28130c064aac9bb8aff14ba20baa4aee7bd0795ff1187545f"}, + {file = "lxml-5.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1dc4ca99e89c335a7ed47d38964abcb36c5910790f9bd106f2a8fa2ee0b909d2"}, + {file = "lxml-5.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:773e27b62920199c6197130632c18fb7ead3257fce1ffb7d286912e56ddb79e0"}, + {file = "lxml-5.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ce9c671845de9699904b1e9df95acfe8dfc183f2310f163cdaa91a3535af95de"}, + {file = "lxml-5.4.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9454b8d8200ec99a224df8854786262b1bd6461f4280064c807303c642c05e76"}, + {file = "lxml-5.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cccd007d5c95279e529c146d095f1d39ac05139de26c098166c4beb9374b0f4d"}, + {file = "lxml-5.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0fce1294a0497edb034cb416ad3e77ecc89b313cff7adbee5334e4dc0d11f422"}, + {file = "lxml-5.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:24974f774f3a78ac12b95e3a20ef0931795ff04dbb16db81a90c37f589819551"}, + {file = "lxml-5.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:497cab4d8254c2a90bf988f162ace2ddbfdd806fce3bda3f581b9d24c852e03c"}, + {file = "lxml-5.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:e794f698ae4c5084414efea0f5cc9f4ac562ec02d66e1484ff822ef97c2cadff"}, + {file = "lxml-5.4.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:2c62891b1ea3094bb12097822b3d44b93fc6c325f2043c4d2736a8ff09e65f60"}, + {file = "lxml-5.4.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:142accb3e4d1edae4b392bd165a9abdee8a3c432a2cca193df995bc3886249c8"}, + {file = "lxml-5.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1a42b3a19346e5601d1b8296ff6ef3d76038058f311902edd574461e9c036982"}, + {file = "lxml-5.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4291d3c409a17febf817259cb37bc62cb7eb398bcc95c1356947e2871911ae61"}, + {file = "lxml-5.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4f5322cf38fe0e21c2d73901abf68e6329dc02a4994e483adbcf92b568a09a54"}, + {file = "lxml-5.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:0be91891bdb06ebe65122aa6bf3fc94489960cf7e03033c6f83a90863b23c58b"}, + {file = "lxml-5.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:15a665ad90054a3d4f397bc40f73948d48e36e4c09f9bcffc7d90c87410e478a"}, + {file = "lxml-5.4.0-cp313-cp313-win32.whl", hash = "sha256:d5663bc1b471c79f5c833cffbc9b87d7bf13f87e055a5c86c363ccd2348d7e82"}, + {file = "lxml-5.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:bcb7a1096b4b6b24ce1ac24d4942ad98f983cd3810f9711bcd0293f43a9d8b9f"}, + {file = "lxml-5.4.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:7be701c24e7f843e6788353c055d806e8bd8466b52907bafe5d13ec6a6dbaecd"}, + {file = "lxml-5.4.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb54f7c6bafaa808f27166569b1511fc42701a7713858dddc08afdde9746849e"}, + {file = "lxml-5.4.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97dac543661e84a284502e0cf8a67b5c711b0ad5fb661d1bd505c02f8cf716d7"}, + {file = "lxml-5.4.0-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:c70e93fba207106cb16bf852e421c37bbded92acd5964390aad07cb50d60f5cf"}, + {file = "lxml-5.4.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9c886b481aefdf818ad44846145f6eaf373a20d200b5ce1a5c8e1bc2d8745410"}, + {file = "lxml-5.4.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:fa0e294046de09acd6146be0ed6727d1f42ded4ce3ea1e9a19c11b6774eea27c"}, + {file = "lxml-5.4.0-cp36-cp36m-win32.whl", hash = "sha256:61c7bbf432f09ee44b1ccaa24896d21075e533cd01477966a5ff5a71d88b2f56"}, + {file = "lxml-5.4.0-cp36-cp36m-win_amd64.whl", hash = "sha256:7ce1a171ec325192c6a636b64c94418e71a1964f56d002cc28122fceff0b6121"}, + {file = "lxml-5.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:795f61bcaf8770e1b37eec24edf9771b307df3af74d1d6f27d812e15a9ff3872"}, + {file = "lxml-5.4.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29f451a4b614a7b5b6c2e043d7b64a15bd8304d7e767055e8ab68387a8cacf4e"}, + {file = "lxml-5.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4aa412a82e460571fad592d0f93ce9935a20090029ba08eca05c614f99b0cc92"}, + {file = "lxml-5.4.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:c5d32f5284012deaccd37da1e2cd42f081feaa76981f0eaa474351b68df813c5"}, + {file = "lxml-5.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:31e63621e073e04697c1b2d23fcb89991790eef370ec37ce4d5d469f40924ed6"}, + {file = "lxml-5.4.0-cp37-cp37m-win32.whl", hash = "sha256:be2ba4c3c5b7900246a8f866580700ef0d538f2ca32535e991027bdaba944063"}, + {file = "lxml-5.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:09846782b1ef650b321484ad429217f5154da4d6e786636c38e434fa32e94e49"}, + {file = "lxml-5.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eaf24066ad0b30917186420d51e2e3edf4b0e2ea68d8cd885b14dc8afdcf6556"}, + {file = "lxml-5.4.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b31a3a77501d86d8ade128abb01082724c0dfd9524f542f2f07d693c9f1175f"}, + {file = "lxml-5.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e108352e203c7afd0eb91d782582f00a0b16a948d204d4dec8565024fafeea5"}, + {file = "lxml-5.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a11a96c3b3f7551c8a8109aa65e8594e551d5a84c76bf950da33d0fb6dfafab7"}, + {file = "lxml-5.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:ca755eebf0d9e62d6cb013f1261e510317a41bf4650f22963474a663fdfe02aa"}, + {file = "lxml-5.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:4cd915c0fb1bed47b5e6d6edd424ac25856252f09120e3e8ba5154b6b921860e"}, + {file = "lxml-5.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:226046e386556a45ebc787871d6d2467b32c37ce76c2680f5c608e25823ffc84"}, + {file = "lxml-5.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:b108134b9667bcd71236c5a02aad5ddd073e372fb5d48ea74853e009fe38acb6"}, + {file = "lxml-5.4.0-cp38-cp38-win32.whl", hash = "sha256:1320091caa89805df7dcb9e908add28166113dcd062590668514dbd510798c88"}, + {file = "lxml-5.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:073eb6dcdf1f587d9b88c8c93528b57eccda40209cf9be549d469b942b41d70b"}, + {file = "lxml-5.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bda3ea44c39eb74e2488297bb39d47186ed01342f0022c8ff407c250ac3f498e"}, + {file = "lxml-5.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9ceaf423b50ecfc23ca00b7f50b64baba85fb3fb91c53e2c9d00bc86150c7e40"}, + {file = "lxml-5.4.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:664cdc733bc87449fe781dbb1f309090966c11cc0c0cd7b84af956a02a8a4729"}, + {file = "lxml-5.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67ed8a40665b84d161bae3181aa2763beea3747f748bca5874b4af4d75998f87"}, + {file = "lxml-5.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b4a3bd174cc9cdaa1afbc4620c049038b441d6ba07629d89a83b408e54c35cd"}, + {file = "lxml-5.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:b0989737a3ba6cf2a16efb857fb0dfa20bc5c542737fddb6d893fde48be45433"}, + {file = "lxml-5.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:dc0af80267edc68adf85f2a5d9be1cdf062f973db6790c1d065e45025fa26140"}, + {file = "lxml-5.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:639978bccb04c42677db43c79bdaa23785dc7f9b83bfd87570da8207872f1ce5"}, + {file = "lxml-5.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5a99d86351f9c15e4a901fc56404b485b1462039db59288b203f8c629260a142"}, + {file = "lxml-5.4.0-cp39-cp39-win32.whl", hash = "sha256:3e6d5557989cdc3ebb5302bbdc42b439733a841891762ded9514e74f60319ad6"}, + {file = "lxml-5.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:a8c9b7f16b63e65bbba889acb436a1034a82d34fa09752d754f88d708eca80e1"}, + {file = "lxml-5.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1b717b00a71b901b4667226bba282dd462c42ccf618ade12f9ba3674e1fabc55"}, + {file = "lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27a9ded0f0b52098ff89dd4c418325b987feed2ea5cc86e8860b0f844285d740"}, + {file = "lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b7ce10634113651d6f383aa712a194179dcd496bd8c41e191cec2099fa09de5"}, + {file = "lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53370c26500d22b45182f98847243efb518d268374a9570409d2e2276232fd37"}, + {file = "lxml-5.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c6364038c519dffdbe07e3cf42e6a7f8b90c275d4d1617a69bb59734c1a2d571"}, + {file = "lxml-5.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b12cb6527599808ada9eb2cd6e0e7d3d8f13fe7bbb01c6311255a15ded4c7ab4"}, + {file = "lxml-5.4.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5f11a1526ebd0dee85e7b1e39e39a0cc0d9d03fb527f56d8457f6df48a10dc0c"}, + {file = "lxml-5.4.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48b4afaf38bf79109bb060d9016fad014a9a48fb244e11b94f74ae366a64d252"}, + {file = "lxml-5.4.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de6f6bb8a7840c7bf216fb83eec4e2f79f7325eca8858167b68708b929ab2172"}, + {file = "lxml-5.4.0-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5cca36a194a4eb4e2ed6be36923d3cffd03dcdf477515dea687185506583d4c9"}, + {file = "lxml-5.4.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b7c86884ad23d61b025989d99bfdd92a7351de956e01c61307cb87035960bcb1"}, + {file = "lxml-5.4.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:53d9469ab5460402c19553b56c3648746774ecd0681b1b27ea74d5d8a3ef5590"}, + {file = "lxml-5.4.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:56dbdbab0551532bb26c19c914848d7251d73edb507c3079d6805fa8bba5b706"}, + {file = "lxml-5.4.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14479c2ad1cb08b62bb941ba8e0e05938524ee3c3114644df905d2331c76cd57"}, + {file = "lxml-5.4.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32697d2ea994e0db19c1df9e40275ffe84973e4232b5c274f47e7c1ec9763cdd"}, + {file = "lxml-5.4.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:24f6df5f24fc3385f622c0c9d63fe34604893bc1a5bdbb2dbf5870f85f9a404a"}, + {file = "lxml-5.4.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:151d6c40bc9db11e960619d2bf2ec5829f0aaffb10b41dcf6ad2ce0f3c0b2325"}, + {file = "lxml-5.4.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4025bf2884ac4370a3243c5aa8d66d3cb9e15d3ddd0af2d796eccc5f0244390e"}, + {file = "lxml-5.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:9459e6892f59ecea2e2584ee1058f5d8f629446eab52ba2305ae13a32a059530"}, + {file = "lxml-5.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47fb24cc0f052f0576ea382872b3fc7e1f7e3028e53299ea751839418ade92a6"}, + {file = "lxml-5.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50441c9de951a153c698b9b99992e806b71c1f36d14b154592580ff4a9d0d877"}, + {file = "lxml-5.4.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ab339536aa798b1e17750733663d272038bf28069761d5be57cb4a9b0137b4f8"}, + {file = "lxml-5.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9776af1aad5a4b4a1317242ee2bea51da54b2a7b7b48674be736d463c999f37d"}, + {file = "lxml-5.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:63e7968ff83da2eb6fdda967483a7a023aa497d85ad8f05c3ad9b1f2e8c84987"}, + {file = "lxml-5.4.0.tar.gz", hash = "sha256:d12832e1dbea4be280b22fd0ea7c9b87f0d8fc51ba06e92dc62d52f804f78ebd"}, +] + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html-clean = ["lxml_html_clean"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] +source = ["Cython (>=3.0.11,<3.1.0)"] + +[[package]] +name = "lxml-stubs" +version = "0.5.1" +description = "Type annotations for the lxml package" +optional = false +python-versions = "*" +groups = ["dev"] +files = [ + {file = "lxml-stubs-0.5.1.tar.gz", hash = "sha256:e0ec2aa1ce92d91278b719091ce4515c12adc1d564359dfaf81efa7d4feab79d"}, + {file = "lxml_stubs-0.5.1-py3-none-any.whl", hash = "sha256:1f689e5dbc4b9247cb09ae820c7d34daeb1fdbd1db06123814b856dae7787272"}, +] + +[package.extras] +test = ["coverage[toml] (>=7.2.5)", "mypy (>=1.2.0)", "pytest (>=7.3.0)", "pytest-mypy-plugins (>=1.10.1)"] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "matplotlib-inline" +version = "0.1.7" +description = "Inline Matplotlib backend for Jupyter" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, + {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, +] + +[package.dependencies] +traitlets = "*" + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "mypy" +version = "1.15.0" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13"}, + {file = "mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559"}, + {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b"}, + {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3"}, + {file = "mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b"}, + {file = "mypy-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828"}, + {file = "mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f"}, + {file = "mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5"}, + {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e"}, + {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c"}, + {file = "mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f"}, + {file = "mypy-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f"}, + {file = "mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd"}, + {file = "mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f"}, + {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464"}, + {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee"}, + {file = "mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e"}, + {file = "mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22"}, + {file = "mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445"}, + {file = "mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d"}, + {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5"}, + {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036"}, + {file = "mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357"}, + {file = "mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf"}, + {file = "mypy-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e601a7fa172c2131bff456bb3ee08a88360760d0d2f8cbd7a75a65497e2df078"}, + {file = "mypy-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:712e962a6357634fef20412699a3655c610110e01cdaa6180acec7fc9f8513ba"}, + {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95579473af29ab73a10bada2f9722856792a36ec5af5399b653aa28360290a5"}, + {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f8722560a14cde92fdb1e31597760dc35f9f5524cce17836c0d22841830fd5b"}, + {file = "mypy-1.15.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fbb8da62dc352133d7d7ca90ed2fb0e9d42bb1a32724c287d3c76c58cbaa9c2"}, + {file = "mypy-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:d10d994b41fb3497719bbf866f227b3489048ea4bbbb5015357db306249f7980"}, + {file = "mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e"}, + {file = "mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43"}, +] + +[package.dependencies] +mypy_extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing_extensions = ">=4.6.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +description = "Node.js virtual environment builder" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["dev"] +files = [ + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, +] + +[[package]] +name = "numpy" +version = "2.0.2" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "numpy-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:51129a29dbe56f9ca83438b706e2e69a39892b5eda6cedcb6b0c9fdc9b0d3ece"}, + {file = "numpy-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f15975dfec0cf2239224d80e32c3170b1d168335eaedee69da84fbe9f1f9cd04"}, + {file = "numpy-2.0.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:8c5713284ce4e282544c68d1c3b2c7161d38c256d2eefc93c1d683cf47683e66"}, + {file = "numpy-2.0.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:becfae3ddd30736fe1889a37f1f580e245ba79a5855bff5f2a29cb3ccc22dd7b"}, + {file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2da5960c3cf0df7eafefd806d4e612c5e19358de82cb3c343631188991566ccd"}, + {file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:496f71341824ed9f3d2fd36cf3ac57ae2e0165c143b55c3a035ee219413f3318"}, + {file = "numpy-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a61ec659f68ae254e4d237816e33171497e978140353c0c2038d46e63282d0c8"}, + {file = "numpy-2.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d731a1c6116ba289c1e9ee714b08a8ff882944d4ad631fd411106a30f083c326"}, + {file = "numpy-2.0.2-cp310-cp310-win32.whl", hash = "sha256:984d96121c9f9616cd33fbd0618b7f08e0cfc9600a7ee1d6fd9b239186d19d97"}, + {file = "numpy-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:c7b0be4ef08607dd04da4092faee0b86607f111d5ae68036f16cc787e250a131"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:49ca4decb342d66018b01932139c0961a8f9ddc7589611158cb3c27cbcf76448"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:11a76c372d1d37437857280aa142086476136a8c0f373b2e648ab2c8f18fb195"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:807ec44583fd708a21d4a11d94aedf2f4f3c3719035c76a2bbe1fe8e217bdc57"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:8cafab480740e22f8d833acefed5cc87ce276f4ece12fdaa2e8903db2f82897a"}, + {file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a15f476a45e6e5a3a79d8a14e62161d27ad897381fecfa4a09ed5322f2085669"}, + {file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13e689d772146140a252c3a28501da66dfecd77490b498b168b501835041f951"}, + {file = "numpy-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9ea91dfb7c3d1c56a0e55657c0afb38cf1eeae4544c208dc465c3c9f3a7c09f9"}, + {file = "numpy-2.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c1c9307701fec8f3f7a1e6711f9089c06e6284b3afbbcd259f7791282d660a15"}, + {file = "numpy-2.0.2-cp311-cp311-win32.whl", hash = "sha256:a392a68bd329eafac5817e5aefeb39038c48b671afd242710b451e76090e81f4"}, + {file = "numpy-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:286cd40ce2b7d652a6f22efdfc6d1edf879440e53e76a75955bc0c826c7e64dc"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:df55d490dea7934f330006d0f81e8551ba6010a5bf035a249ef61a94f21c500b"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8df823f570d9adf0978347d1f926b2a867d5608f434a7cff7f7908c6570dcf5e"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9a92ae5c14811e390f3767053ff54eaee3bf84576d99a2456391401323f4ec2c"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:a842d573724391493a97a62ebbb8e731f8a5dcc5d285dfc99141ca15a3302d0c"}, + {file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c05e238064fc0610c840d1cf6a13bf63d7e391717d247f1bf0318172e759e692"}, + {file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0123ffdaa88fa4ab64835dcbde75dcdf89c453c922f18dced6e27c90d1d0ec5a"}, + {file = "numpy-2.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:96a55f64139912d61de9137f11bf39a55ec8faec288c75a54f93dfd39f7eb40c"}, + {file = "numpy-2.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec9852fb39354b5a45a80bdab5ac02dd02b15f44b3804e9f00c556bf24b4bded"}, + {file = "numpy-2.0.2-cp312-cp312-win32.whl", hash = "sha256:671bec6496f83202ed2d3c8fdc486a8fc86942f2e69ff0e986140339a63bcbe5"}, + {file = "numpy-2.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:cfd41e13fdc257aa5778496b8caa5e856dc4896d4ccf01841daee1d96465467a"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9059e10581ce4093f735ed23f3b9d283b9d517ff46009ddd485f1747eb22653c"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:423e89b23490805d2a5a96fe40ec507407b8ee786d66f7328be214f9679df6dd"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:2b2955fa6f11907cf7a70dab0d0755159bca87755e831e47932367fc8f2f2d0b"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:97032a27bd9d8988b9a97a8c4d2c9f2c15a81f61e2f21404d7e8ef00cb5be729"}, + {file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e795a8be3ddbac43274f18588329c72939870a16cae810c2b73461c40718ab1"}, + {file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26b258c385842546006213344c50655ff1555a9338e2e5e02a0756dc3e803dd"}, + {file = "numpy-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fec9451a7789926bcf7c2b8d187292c9f93ea30284802a0ab3f5be8ab36865d"}, + {file = "numpy-2.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9189427407d88ff25ecf8f12469d4d39d35bee1db5d39fc5c168c6f088a6956d"}, + {file = "numpy-2.0.2-cp39-cp39-win32.whl", hash = "sha256:905d16e0c60200656500c95b6b8dca5d109e23cb24abc701d41c02d74c6b3afa"}, + {file = "numpy-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:a3f4ab0caa7f053f6797fcd4e1e25caee367db3112ef2b6ef82d749530768c73"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7f0a0c6f12e07fa94133c8a67404322845220c06a9e80e85999afe727f7438b8"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:312950fdd060354350ed123c0e25a71327d3711584beaef30cdaa93320c392d4"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26df23238872200f63518dd2aa984cfca675d82469535dc7162dc2ee52d9dd5c"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a46288ec55ebbd58947d31d72be2c63cbf839f0a63b49cb755022310792a3385"}, + {file = "numpy-2.0.2.tar.gz", hash = "sha256:883c987dee1880e2a864ab0dc9892292582510604156762362d9326444636e78"}, +] + +[[package]] +name = "packaging" +version = "25.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, +] + +[[package]] +name = "pandas-stubs" +version = "2.2.2.240807" +description = "Type annotations for pandas" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pandas_stubs-2.2.2.240807-py3-none-any.whl", hash = "sha256:893919ad82be4275f0d07bb47a95d08bae580d3fdea308a7acfcb3f02e76186e"}, + {file = "pandas_stubs-2.2.2.240807.tar.gz", hash = "sha256:64a559725a57a449f46225fbafc422520b7410bff9252b661a225b5559192a93"}, +] + +[package.dependencies] +numpy = ">=1.23.5" +types-pytz = ">=2022.1.1" + +[[package]] +name = "parameterized" +version = "0.9.0" +description = "Parameterized testing with any Python test framework" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "parameterized-0.9.0-py2.py3-none-any.whl", hash = "sha256:4e0758e3d41bea3bbd05ec14fc2c24736723f243b28d702081aef438c9372b1b"}, + {file = "parameterized-0.9.0.tar.gz", hash = "sha256:7fc905272cefa4f364c1a3429cbbe9c0f98b793988efb5bf90aac80f08db09b1"}, +] + +[package.extras] +dev = ["jinja2"] + +[[package]] +name = "parso" +version = "0.8.4" +description = "A Python Parser" +optional = false +python-versions = ">=3.6" +groups = ["main", "dev"] +files = [ + {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, + {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, +] + +[package.extras] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["docopt", "pytest"] + +[[package]] +name = "pexpect" +version = "4.9.0" +description = "Pexpect allows easy control of interactive console applications." +optional = false +python-versions = "*" +groups = ["dev"] +markers = "sys_platform != \"win32\"" +files = [ + {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, + {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, +] + +[package.dependencies] +ptyprocess = ">=0.5" + +[[package]] +name = "platformdirs" +version = "4.3.8" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4"}, + {file = "platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.14.1)"] + +[[package]] +name = "pluggy" +version = "1.6.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, + {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["coverage", "pytest", "pytest-benchmark"] + +[[package]] +name = "posthog" +version = "4.0.1" +description = "Integrate PostHog into any python application." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "posthog-4.0.1-py2.py3-none-any.whl", hash = "sha256:0c76cbab3e5ab0096c4f591c0b536465478357270f926d11ff833c97984659d8"}, + {file = "posthog-4.0.1.tar.gz", hash = "sha256:77e7ebfc6086972db421d3e05c91d5431b2b964865d33a9a32e55dd88da4bff8"}, +] + +[package.dependencies] +backoff = ">=1.10.0" +distro = ">=1.5.0" +python-dateutil = ">=2.2" +requests = ">=2.7,<3.0" +six = ">=1.5" + +[package.extras] +dev = ["black", "django-stubs", "flake8", "flake8-print", "isort", "lxml", "mypy", "mypy-baseline", "pre-commit", "pydantic", "types-mock", "types-python-dateutil", "types-requests", "types-setuptools", "types-six"] +langchain = ["langchain (>=0.2.0)"] +sentry = ["django", "sentry-sdk"] +test = ["anthropic", "coverage", "django", "flake8", "freezegun (==1.5.1)", "langchain-anthropic (>=0.2.0)", "langchain-community (>=0.2.0)", "langchain-openai (>=0.2.0)", "langgraph", "mock (>=2.0.0)", "openai", "parameterized (>=0.8.1)", "pydantic", "pylint", "pytest", "pytest-asyncio", "pytest-timeout"] + +[[package]] +name = "pre-commit" +version = "4.2.0" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pre_commit-4.2.0-py2.py3-none-any.whl", hash = "sha256:a009ca7205f1eb497d10b845e52c838a98b6cdd2102a6c8e4540e94ee75c58bd"}, + {file = "pre_commit-4.2.0.tar.gz", hash = "sha256:601283b9757afd87d40c4c4a9b2b5de9637a8ea02eaff7adc2d0fb4e04841146"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + +[[package]] +name = "prompt-toolkit" +version = "3.0.51" +description = "Library for building powerful interactive command lines in Python" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "prompt_toolkit-3.0.51-py3-none-any.whl", hash = "sha256:52742911fde84e2d423e2f9a4cf1de7d7ac4e51958f648d9540e0fb8db077b07"}, + {file = "prompt_toolkit-3.0.51.tar.gz", hash = "sha256:931a162e3b27fc90c86f1b48bb1fb2c528c2761475e57c9c06de13311c7b54ed"}, +] + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "ptyprocess" +version = "0.7.0" +description = "Run a subprocess in a pseudo terminal" +optional = false +python-versions = "*" +groups = ["dev"] +markers = "sys_platform != \"win32\"" +files = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] + +[[package]] +name = "pure-eval" +version = "0.2.3" +description = "Safely evaluate AST nodes without side effects" +optional = false +python-versions = "*" +groups = ["dev"] +files = [ + {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, + {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, +] + +[package.extras] +tests = ["pytest"] + +[[package]] +name = "py-cpuinfo" +version = "9.0.0" +description = "Get CPU info with pure Python" +optional = false +python-versions = "*" +groups = ["dev"] +files = [ + {file = "py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690"}, + {file = "py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5"}, +] + +[[package]] +name = "pydantic" +version = "2.11.4" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic-2.11.4-py3-none-any.whl", hash = "sha256:d9615eaa9ac5a063471da949c8fc16376a84afb5024688b3ff885693506764eb"}, + {file = "pydantic-2.11.4.tar.gz", hash = "sha256:32738d19d63a226a52eed76645a98ee07c1f410ee41d93b4afbfa85ed8111c2d"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.33.2" +typing-extensions = ">=4.12.2" +typing-inspection = ">=0.4.0" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] + +[[package]] +name = "pydantic-core" +version = "2.33.2" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8"}, + {file = "pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b"}, + {file = "pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22"}, + {file = "pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640"}, + {file = "pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7"}, + {file = "pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65"}, + {file = "pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc"}, + {file = "pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab"}, + {file = "pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f"}, + {file = "pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d"}, + {file = "pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e"}, + {file = "pydantic_core-2.33.2-cp39-cp39-win32.whl", hash = "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27"}, + {file = "pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pygls" +version = "1.3.1" +description = "A pythonic generic language server (pronounced like 'pie glass')" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pygls-1.3.1-py3-none-any.whl", hash = "sha256:6e00f11efc56321bdeb6eac04f6d86131f654c7d49124344a9ebb968da3dd91e"}, + {file = "pygls-1.3.1.tar.gz", hash = "sha256:140edceefa0da0e9b3c533547c892a42a7d2fd9217ae848c330c53d266a55018"}, +] + +[package.dependencies] +cattrs = ">=23.1.2" +lsprotocol = "2023.0.1" + +[package.extras] +ws = ["websockets (>=11.0.3)"] + +[[package]] +name = "pygments" +version = "2.19.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, + {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pytest" +version = "8.3.5" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, + {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.5,<2" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-benchmark" +version = "5.1.0" +description = "A ``pytest`` fixture for benchmarking code. It will group the tests into rounds that are calibrated to the chosen timer." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pytest-benchmark-5.1.0.tar.gz", hash = "sha256:9ea661cdc292e8231f7cd4c10b0319e56a2118e2c09d9f50e1b3d150d2aca105"}, + {file = "pytest_benchmark-5.1.0-py3-none-any.whl", hash = "sha256:922de2dfa3033c227c96da942d1878191afa135a29485fb942e85dff1c592c89"}, +] + +[package.dependencies] +py-cpuinfo = "*" +pytest = ">=8.1" + +[package.extras] +aspect = ["aspectlib"] +elasticsearch = ["elasticsearch"] +histogram = ["pygal", "pygaljs", "setuptools"] + +[[package]] +name = "pytest-cov" +version = "6.1.1" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pytest_cov-6.1.1-py3-none-any.whl", hash = "sha256:bddf29ed2d0ab6f4df17b4c55b0a657287db8684af9c42ea546b21b1041b3dde"}, + {file = "pytest_cov-6.1.1.tar.gz", hash = "sha256:46935f7aaefba760e716c2ebfbe1c216240b9592966e7da99ea8292d4d3e2a0a"}, +] + +[package.dependencies] +coverage = {version = ">=7.5", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] + +[[package]] +name = "pytest-timeout" +version = "2.4.0" +description = "pytest plugin to abort hanging tests" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "pytest_timeout-2.4.0-py3-none-any.whl", hash = "sha256:c42667e5cdadb151aeb5b26d114aff6bdf5a907f176a007a30b940d3d865b5c2"}, + {file = "pytest_timeout-2.4.0.tar.gz", hash = "sha256:7e68e90b01f9eff71332b25001f85c75495fc4e3a836701876183c4bcfd0540a"}, +] + +[package.dependencies] +pytest = ">=7.0.0" + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + +[[package]] +name = "readchar" +version = "4.2.1" +description = "Library to easily read single chars and key strokes" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "readchar-4.2.1-py3-none-any.whl", hash = "sha256:a769305cd3994bb5fa2764aa4073452dc105a4ec39068ffe6efd3c20c60acc77"}, + {file = "readchar-4.2.1.tar.gz", hash = "sha256:91ce3faf07688de14d800592951e5575e9c7a3213738ed01d394dcc949b79adb"}, +] + +[[package]] +name = "referencing" +version = "0.36.2" +description = "JSON Referencing + Python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0"}, + {file = "referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +rpds-py = ">=0.7.0" +typing-extensions = {version = ">=4.4.0", markers = "python_version < \"3.13\""} + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rich" +version = "14.0.0" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.8.0" +groups = ["main"] +files = [ + {file = "rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0"}, + {file = "rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" +typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.11\""} + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "rpds-py" +version = "0.25.0" +description = "Python bindings to Rust's persistent data structures (rpds)" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "rpds_py-0.25.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:c146a24a8f0dc4a7846fb4640b88b3a68986585b8ce8397af15e66b7c5817439"}, + {file = "rpds_py-0.25.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:77814c7a4e1dc43fba73aeb4c1ef0fe37d901f3aa869a4823de5ea843a283fd0"}, + {file = "rpds_py-0.25.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5afbff2822016db3c696cb0c1432e6b1f0e34aa9280bc5184dc216812a24e70d"}, + {file = "rpds_py-0.25.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ffae52cd76837a5c16409359d236b1fced79e42e0792e8adf375095a5e855368"}, + {file = "rpds_py-0.25.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddf9426b740a7047b2b0dddcba775211542e8053ce1e509a1759b665fe573508"}, + {file = "rpds_py-0.25.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9cad834f1a8f51eb037c3c4dc72c884c9e1e0644d900e2d45aa76450e4aa6282"}, + {file = "rpds_py-0.25.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c46bd76986e05689376d28fdc2b97d899576ce3e3aaa5a5f80f67a8300b26eb3"}, + {file = "rpds_py-0.25.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f3353a2d7eb7d5e0af8a7ca9fc85a34ba12619119bcdee6b8a28a6373cda65ce"}, + {file = "rpds_py-0.25.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fdc648d4e81eef5ac4bb35d731562dffc28358948410f3274d123320e125d613"}, + {file = "rpds_py-0.25.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:098d446d76d26e394b440d73921b49c1c90274d46ccbaadf346b1b78f9fdd4b1"}, + {file = "rpds_py-0.25.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c624c82e645f6b5465d08cdc802fb0cd53aa1478782fb2992b9e09f2c9426865"}, + {file = "rpds_py-0.25.0-cp310-cp310-win32.whl", hash = "sha256:9d0041bd9e2d2ef803b32d84a0c8115d178132da5691346465953a2a966ba8ca"}, + {file = "rpds_py-0.25.0-cp310-cp310-win_amd64.whl", hash = "sha256:d8b41195a6b03280ab00749a438fbce761e7acfd5381051a570239d752376f27"}, + {file = "rpds_py-0.25.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6587ece9f205097c62d0e3d3cb7c06991eb0083ab6a9cf48951ec49c2ab7183c"}, + {file = "rpds_py-0.25.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b0a5651e350997cebcdc23016dca26c4d1993d29015a535284da3159796e30b6"}, + {file = "rpds_py-0.25.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3752a015db89ea3e9c04d5e185549be4aa29c1882150e094c614c0de8e788feb"}, + {file = "rpds_py-0.25.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a05b199c11d2f39c72de8c30668734b5d20974ad44b65324ea3e647a211f135d"}, + {file = "rpds_py-0.25.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2f91902fc0c95dd1fa6b30ebd2af83ace91e592f7fd6340a375588a9d4b9341b"}, + {file = "rpds_py-0.25.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98c729193e7abe498565266933c125780fb646e977e94289cadbb36e4eeeb370"}, + {file = "rpds_py-0.25.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36a7564deaac3f372e8b8b701eb982ea3113516e8e08cd87e3dc6ccf29bad14b"}, + {file = "rpds_py-0.25.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6b0c0f671a53c129ea48f9481e95532579cc489ab5a0ffe750c9020787181c48"}, + {file = "rpds_py-0.25.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d21408eaa157063f56e58ca50da27cad67c4395a85fb44cc7a31253ea4e58918"}, + {file = "rpds_py-0.25.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a413674eb2bd2ecb2b93fcc928871b19f7220ee04bca4af3375c50a2b32b5a50"}, + {file = "rpds_py-0.25.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:94f89161a3e358db33310a8a064852a6eb119ed1aa1a3dba927b4e5140e65d00"}, + {file = "rpds_py-0.25.0-cp311-cp311-win32.whl", hash = "sha256:540cd89d256119845b7f8f56c4bb80cad280cab92d9ca473be49ea13e678fd44"}, + {file = "rpds_py-0.25.0-cp311-cp311-win_amd64.whl", hash = "sha256:2649ff19291928243f90c86e4dc9cd86c8c4c6a73c3693ba2e23bc2fbcd8338c"}, + {file = "rpds_py-0.25.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:89260601d497fa5957c3e46f10b16cfa2a4808ad4dd46cddc0b997461923a7d9"}, + {file = "rpds_py-0.25.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:637ec39f97e342a3f76af739eda96800549d92f3aa27a2170b6dcbdffd49f480"}, + {file = "rpds_py-0.25.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd08c82336412a39a598e5baccab2ee2d7bd54e9115c8b64f2febb45da5c368"}, + {file = "rpds_py-0.25.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:837fd066f974e5b98c69ac83ec594b79a2724a39a92a157b8651615e5032e530"}, + {file = "rpds_py-0.25.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:653a066d2a4a332d4f8a11813e8124b643fa7b835b78468087a9898140469eee"}, + {file = "rpds_py-0.25.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:91a51499be506022b9f09facfc42f0c3a1c45969c0fc8f0bbebc8ff23ab9e531"}, + {file = "rpds_py-0.25.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb91471640390a82744b164f8a0be4d7c89d173b1170713f9639c6bad61e9e64"}, + {file = "rpds_py-0.25.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28bd2969445acc2d6801a22f97a43134ae3cb18e7495d668bfaa8d82b8526cdc"}, + {file = "rpds_py-0.25.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f933b35fa563f047896a70b69414dfb3952831817e4c4b3a6faa96737627f363"}, + {file = "rpds_py-0.25.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:80b37b37525492250adc7cbca20ae7084f86eb3eb62414b624d2a400370853b1"}, + {file = "rpds_py-0.25.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:864573b6440b770db5a8693547a8728d7fd32580d4903010a8eee0bb5b03b130"}, + {file = "rpds_py-0.25.0-cp312-cp312-win32.whl", hash = "sha256:ad4a896896346adab86d52b31163c39d49e4e94c829494b96cc064bff82c5851"}, + {file = "rpds_py-0.25.0-cp312-cp312-win_amd64.whl", hash = "sha256:4fbec54cc42fa90ca69158d75f125febc4116b2d934e71c78f97de1388a8feb2"}, + {file = "rpds_py-0.25.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:4e5fe366fa53bd6777cf5440245366705338587b2cf8d61348ddaad744eb591a"}, + {file = "rpds_py-0.25.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:54f925ff8d4443b7cae23a5215954abbf4736a3404188bde53c4d744ac001d89"}, + {file = "rpds_py-0.25.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d58258a66255b2500ddaa4f33191ada5ec983a429c09eb151daf81efbb9aa115"}, + {file = "rpds_py-0.25.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8f3a57f08c558d0983a708bfe6d1265f47b5debff9b366b2f2091690fada055c"}, + {file = "rpds_py-0.25.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7d60d42f1b9571341ad2322e748f7a60f9847546cd801a3a0eb72a1b54c6519"}, + {file = "rpds_py-0.25.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a54b94b0e4de95aa92618906fb631779d9fde29b4bf659f482c354a3a79fd025"}, + {file = "rpds_py-0.25.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af1c2241919304cc2f90e7dcb3eb1c1df6fb4172dd338e629dd6410e48b3d1a0"}, + {file = "rpds_py-0.25.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7d34547810bfd61acf8a441e8a3651e7a919e8e8aed29850be14a1b05cfc6f41"}, + {file = "rpds_py-0.25.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:66568caacf18542f0cf213db7adf3de2da6ad58c7bf2c4fafec0d81ae557443b"}, + {file = "rpds_py-0.25.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e49e4c3e899c32884d7828c91d6c3aff08d2f18857f50f86cc91187c31a4ca58"}, + {file = "rpds_py-0.25.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:20af08b0b2d5b196a2bcb70becf0b97ec5af579cee0ae6750b08a2eea3b6c77d"}, + {file = "rpds_py-0.25.0-cp313-cp313-win32.whl", hash = "sha256:d3dc8d6ce8f001c80919bdb49d8b0b815185933a0b8e9cdeaea42b0b6f27eeb0"}, + {file = "rpds_py-0.25.0-cp313-cp313-win_amd64.whl", hash = "sha256:113d134dc5a8d2503630ca2707b58a1bf5b1b3c69b35c7dab8690ee650c111b8"}, + {file = "rpds_py-0.25.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:6c72a4a8fab10bc96720ad40941bb471e3b1150fb8d62dab205d495511206cf1"}, + {file = "rpds_py-0.25.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:bb979162323f3534dce84b59f86e689a0761a2a300e0212bfaedfa80d4eb8100"}, + {file = "rpds_py-0.25.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35c8cb5dcf7d36d3adf2ae0730b60fb550a8feb6e432bee7ef84162a0d15714b"}, + {file = "rpds_py-0.25.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:673ba018df5ae5e7b6c9a021d51ffe39c0ae1daa0041611ed27a0bca634b2d2e"}, + {file = "rpds_py-0.25.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16fb28d3a653f67c871a47c5ca0be17bce9fab8adb8bcf7bd09f3771b8c4d860"}, + {file = "rpds_py-0.25.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12a84c3851f9e68633d883c01347db3cb87e6160120a489f9c47162cd276b0a5"}, + {file = "rpds_py-0.25.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b5f457afffb45d3804728a54083e31fbaf460e902e3f7d063e56d0d0814301e"}, + {file = "rpds_py-0.25.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9442cbff21122e9a529b942811007d65eabe4182e7342d102caf119b229322c6"}, + {file = "rpds_py-0.25.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:383cf0d4288baf5a16812ed70d54ecb7f2064e255eb7fe42c38e926adeae4534"}, + {file = "rpds_py-0.25.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:0dcdee07ebf76223092666c72a9552db276fbe46b98830ecd1bb836cc98adc81"}, + {file = "rpds_py-0.25.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5bbfbd9c74c4dd74815bd532bf29bedea6d27d38f35ef46f9754172a14e4c655"}, + {file = "rpds_py-0.25.0-cp313-cp313t-win32.whl", hash = "sha256:90dbd2c42cb6463c07020695800ae8f347e7dbeff09da2975a988e467b624539"}, + {file = "rpds_py-0.25.0-cp313-cp313t-win_amd64.whl", hash = "sha256:8c2ad59c4342a176cb3e0d5753e1c911eabc95c210fc6d0e913c32bf560bf012"}, + {file = "rpds_py-0.25.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:9f9a1b15b875160186177f659cde2b0f899182b0aca49457d6396afc4bbda7b9"}, + {file = "rpds_py-0.25.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e849315963eb08c26167d0f2c0f9319c9bd379daea75092b3c595d70be6209d"}, + {file = "rpds_py-0.25.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ad37c29adc435e6d8b24be86b03596183ee8d4bb8580cc4c676879b0b896a99"}, + {file = "rpds_py-0.25.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:587cad3959d3d85127cf5df1624cdce569bb3796372e00420baad46af7c56b9b"}, + {file = "rpds_py-0.25.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce0518667855a1598d9b1f4fcf0fed1182c67c5ba4fe6a2c6bce93440a65cead"}, + {file = "rpds_py-0.25.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c18cb2f6805861dcdf11fb0b3c111a0335f6475411687db2f6636f32bed66b0"}, + {file = "rpds_py-0.25.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a21f4584f69547ae03aaa21be98753e85599f3437b84039da5dc20b53abe987"}, + {file = "rpds_py-0.25.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3d7d65aa934899849628137ab391562cdb487c6ffb9b9781319a64a9c66afbce"}, + {file = "rpds_py-0.25.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fd9167e9604cb5a218a2e847aa8cdc5f98b379a673371978ee7b0c11b4d2e140"}, + {file = "rpds_py-0.25.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:6c27156c8d836e7ff760767e93245b286ae028bfd81d305db676662d1f642637"}, + {file = "rpds_py-0.25.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:66087711faf29cb3ac8ab05341939aec29968626aff8ef18e483e229055dd9a7"}, + {file = "rpds_py-0.25.0-cp39-cp39-win32.whl", hash = "sha256:f2e69415e4e33cdeee50ebc2c4d8fcbef12c3181d9274e512ccd2a905a76aad1"}, + {file = "rpds_py-0.25.0-cp39-cp39-win_amd64.whl", hash = "sha256:58cfaa54752d6d2b4f10e87571688dbb7792327a69eca5417373d77d42787058"}, + {file = "rpds_py-0.25.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:57e9616a2a9da08fe0994e37a0c6f578fbaf6d35911bcba31e99660542d60c45"}, + {file = "rpds_py-0.25.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:6d95521901896a90a858993bfa3ec0f9160d3d97e8c8fefc279b3306cdadfee0"}, + {file = "rpds_py-0.25.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d33aef3914a5b49db12ed3f24d214ffa50caefc8f4b0c7c7b9485bd4b231a898"}, + {file = "rpds_py-0.25.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4acbe2349a3baac9cc212005b6cb4bbb7e5b34538886cde4f55dfc29173da1d6"}, + {file = "rpds_py-0.25.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9b75b5d3416b00d064a5e6f4814fdfb18a964a7cf38dc00b5c2c02fa30a7dd0b"}, + {file = "rpds_py-0.25.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:542a6f1d0f400b9ce1facb3e30dd3dc84e4affc60353509b00a7bdcd064be91e"}, + {file = "rpds_py-0.25.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a60ba9d104f4e8496107b1cb86e45a68a16d13511dc3986e0780e9f85c2136f9"}, + {file = "rpds_py-0.25.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6065a489b7b284efb29d57adffae2b9b5e9403d3c8d95cfa04e04e024e6b4e77"}, + {file = "rpds_py-0.25.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:6bcca4d0d24d8c37bfe0cafdaaf4346b6c516db21ccaad5c7fba0a0df818dfc9"}, + {file = "rpds_py-0.25.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:8155e21203161e5c78791fc049b99f0bbbf14d1d1839c8c93c8344957f9e8e1e"}, + {file = "rpds_py-0.25.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:6a1eda14db1ac7a2ab4536dfe69e4d37fdd765e8e784ae4451e61582ebb76012"}, + {file = "rpds_py-0.25.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:de34a7d1893be76cb015929690dce3bde29f4de08143da2e9ad1cedb11dbf80e"}, + {file = "rpds_py-0.25.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0d63a86b457069d669c423f093db4900aa102f0e5a626973eff4db8355c0fd96"}, + {file = "rpds_py-0.25.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:89bb2b20829270aca28b1e5481be8ee24cb9aa86e6c0c81cb4ada2112c9588c5"}, + {file = "rpds_py-0.25.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83e103b48e63fd2b8a8e2b21ab5b5299a7146045626c2ed4011511ea8122d217"}, + {file = "rpds_py-0.25.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fccd24c080850715c58a80200d367bc62b4bff6c9fb84e9564da1ebcafea6418"}, + {file = "rpds_py-0.25.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12b42790c91e0041a98f0ec04244fb334696938793e785a5d4c7e56ca534d7da"}, + {file = "rpds_py-0.25.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bc907ea12216cfc5560148fc42459d86740fc739981c6feb94230dab09362679"}, + {file = "rpds_py-0.25.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e11065b759c38c4945f8c9765ed2910e31fa5b2f7733401eb7d966f468367a2"}, + {file = "rpds_py-0.25.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8abc1a3e29b599bf8bb5ad455256a757e8b0ed5621e7e48abe8209932dc6d11e"}, + {file = "rpds_py-0.25.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:cd36b71f9f3bf195b2dd9be5eafbfc9409e6c8007aebc38a4dc051f522008033"}, + {file = "rpds_py-0.25.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:805a0dff0674baa3f360c21dcbc622ae544f2bb4753d87a4a56a1881252a477e"}, + {file = "rpds_py-0.25.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:96742796f499ac23b59856db734e65b286d1214a0d9b57bcd7bece92d9201fa4"}, + {file = "rpds_py-0.25.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7715597186a7277be12729c896019226321bad1f047da381ab707b177aa5017c"}, + {file = "rpds_py-0.25.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5b049dd0792d51f07193cd934acec89abe84d2607109e6ca223b2f0ff24f0c7d"}, + {file = "rpds_py-0.25.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87c6ff87b38f46d712418d78b34db1198408a3d9a42eddc640644aea561216b1"}, + {file = "rpds_py-0.25.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:240251fd95b057c26f8538d0e673bf983eba4f38da95fbaf502bfc1a768b3984"}, + {file = "rpds_py-0.25.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85587479f210350e9d9d25e505f422dd636e561658382ee8947357a4bac491ad"}, + {file = "rpds_py-0.25.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:551897221bbc9de17bce4574810347db8ec1ba4ec2f50f35421790d34bdb6ef9"}, + {file = "rpds_py-0.25.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3d50ac3b772c10e0b918a5ce2e871138896bfb5f35050ff1ff87ddca45961fc"}, + {file = "rpds_py-0.25.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8029c19c8a32ef3093c417dd16a5f806e7f529fcceea7c627b2635e9da5104da"}, + {file = "rpds_py-0.25.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:fe7439d9c5b402af2c9911c7facda1808d0c8dbfa9cf085e6aeac511a23f7d87"}, + {file = "rpds_py-0.25.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:77910d6bec321c9fccfe9cf5e407fed9d2c48a5e510473b4f070d5cf2413c003"}, + {file = "rpds_py-0.25.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:0ee0cc81f875e853ccdf3badb44b67f771fb9149baa9e752777ccdcaf052ad26"}, + {file = "rpds_py-0.25.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:469054e6b2f8e41f1fe62b95f617082019d343eddeec3219ff3909067e672fb9"}, + {file = "rpds_py-0.25.0.tar.gz", hash = "sha256:4d97661bf5848dd9e5eb7ded480deccf9d32ce2cd500b88a26acbf7bd2864985"}, +] + +[[package]] +name = "ruff" +version = "0.11.10" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "ruff-0.11.10-py3-none-linux_armv6l.whl", hash = "sha256:859a7bfa7bc8888abbea31ef8a2b411714e6a80f0d173c2a82f9041ed6b50f58"}, + {file = "ruff-0.11.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:968220a57e09ea5e4fd48ed1c646419961a0570727c7e069842edd018ee8afed"}, + {file = "ruff-0.11.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:1067245bad978e7aa7b22f67113ecc6eb241dca0d9b696144256c3a879663bca"}, + {file = "ruff-0.11.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4854fd09c7aed5b1590e996a81aeff0c9ff51378b084eb5a0b9cd9518e6cff2"}, + {file = "ruff-0.11.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b4564e9f99168c0f9195a0fd5fa5928004b33b377137f978055e40008a082c5"}, + {file = "ruff-0.11.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b6a9cc5b62c03cc1fea0044ed8576379dbaf751d5503d718c973d5418483641"}, + {file = "ruff-0.11.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:607ecbb6f03e44c9e0a93aedacb17b4eb4f3563d00e8b474298a201622677947"}, + {file = "ruff-0.11.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7b3a522fa389402cd2137df9ddefe848f727250535c70dafa840badffb56b7a4"}, + {file = "ruff-0.11.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f071b0deed7e9245d5820dac235cbdd4ef99d7b12ff04c330a241ad3534319f"}, + {file = "ruff-0.11.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a60e3a0a617eafba1f2e4186d827759d65348fa53708ca547e384db28406a0b"}, + {file = "ruff-0.11.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:da8ec977eaa4b7bf75470fb575bea2cb41a0e07c7ea9d5a0a97d13dbca697bf2"}, + {file = "ruff-0.11.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ddf8967e08227d1bd95cc0851ef80d2ad9c7c0c5aab1eba31db49cf0a7b99523"}, + {file = "ruff-0.11.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5a94acf798a82db188f6f36575d80609072b032105d114b0f98661e1679c9125"}, + {file = "ruff-0.11.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3afead355f1d16d95630df28d4ba17fb2cb9c8dfac8d21ced14984121f639bad"}, + {file = "ruff-0.11.10-py3-none-win32.whl", hash = "sha256:dc061a98d32a97211af7e7f3fa1d4ca2fcf919fb96c28f39551f35fc55bdbc19"}, + {file = "ruff-0.11.10-py3-none-win_amd64.whl", hash = "sha256:5cc725fbb4d25b0f185cb42df07ab6b76c4489b4bfb740a175f3a59c70e8a224"}, + {file = "ruff-0.11.10-py3-none-win_arm64.whl", hash = "sha256:ef69637b35fb8b210743926778d0e45e1bffa850a7c61e428c6b971549b5f5d1"}, + {file = "ruff-0.11.10.tar.gz", hash = "sha256:d522fb204b4959909ecac47da02830daec102eeb100fb50ea9554818d47a5fa6"}, +] + +[[package]] +name = "runs" +version = "1.2.2" +description = "🏃 Run a block of text as a subprocess 🏃" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "runs-1.2.2-py3-none-any.whl", hash = "sha256:0980dcbc25aba1505f307ac4f0e9e92cbd0be2a15a1e983ee86c24c87b839dfd"}, + {file = "runs-1.2.2.tar.gz", hash = "sha256:9dc1815e2895cfb3a48317b173b9f1eac9ba5549b36a847b5cc60c3bf82ecef1"}, +] + +[package.dependencies] +xmod = "*" + +[[package]] +name = "sentry-sdk" +version = "2.29.1" +description = "Python client for Sentry (https://sentry.io)" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "sentry_sdk-2.29.1-py2.py3-none-any.whl", hash = "sha256:90862fe0616ded4572da6c9dadb363121a1ae49a49e21c418f0634e9d10b4c19"}, + {file = "sentry_sdk-2.29.1.tar.gz", hash = "sha256:8d4a0206b95fa5fe85e5e7517ed662e3888374bdc342c00e435e10e6d831aa6d"}, +] + +[package.dependencies] +certifi = "*" +urllib3 = ">=1.26.11" + +[package.extras] +aiohttp = ["aiohttp (>=3.5)"] +anthropic = ["anthropic (>=0.16)"] +arq = ["arq (>=0.23)"] +asyncpg = ["asyncpg (>=0.23)"] +beam = ["apache-beam (>=2.12)"] +bottle = ["bottle (>=0.12.13)"] +celery = ["celery (>=3)"] +celery-redbeat = ["celery-redbeat (>=2)"] +chalice = ["chalice (>=1.16.0)"] +clickhouse-driver = ["clickhouse-driver (>=0.2.0)"] +django = ["django (>=1.8)"] +falcon = ["falcon (>=1.4)"] +fastapi = ["fastapi (>=0.79.0)"] +flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"] +grpcio = ["grpcio (>=1.21.1)", "protobuf (>=3.8.0)"] +http2 = ["httpcore[http2] (==1.*)"] +httpx = ["httpx (>=0.16.0)"] +huey = ["huey (>=2)"] +huggingface-hub = ["huggingface_hub (>=0.22)"] +langchain = ["langchain (>=0.0.210)"] +launchdarkly = ["launchdarkly-server-sdk (>=9.8.0)"] +litestar = ["litestar (>=2.0.0)"] +loguru = ["loguru (>=0.5)"] +openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"] +openfeature = ["openfeature-sdk (>=0.7.1)"] +opentelemetry = ["opentelemetry-distro (>=0.35b0)"] +opentelemetry-experimental = ["opentelemetry-distro"] +pure-eval = ["asttokens", "executing", "pure_eval"] +pymongo = ["pymongo (>=3.1)"] +pyspark = ["pyspark (>=2.4.4)"] +quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] +rq = ["rq (>=0.6)"] +sanic = ["sanic (>=0.8)"] +sqlalchemy = ["sqlalchemy (>=1.2)"] +starlette = ["starlette (>=0.19.1)"] +starlite = ["starlite (>=1.48)"] +statsig = ["statsig (>=0.55.3)"] +tornado = ["tornado (>=6)"] +unleash = ["UnleashClient (>=6.0.1)"] + +[[package]] +name = "six" +version = "1.17.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +files = [ + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, +] + +[[package]] +name = "smmap" +version = "5.0.2" +description = "A pure Python implementation of a sliding window memory map manager" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "smmap-5.0.2-py3-none-any.whl", hash = "sha256:b30115f0def7d7531d22a0fb6502488d879e75b260a9db4d0819cfb25403af5e"}, + {file = "smmap-5.0.2.tar.gz", hash = "sha256:26ea65a03958fa0c8a1c7e8c7a58fdc77221b8910f6be2131affade476898ad5"}, +] + +[[package]] +name = "stack-data" +version = "0.6.3" +description = "Extract data from python stack frames and tracebacks for informative displays" +optional = false +python-versions = "*" +groups = ["dev"] +files = [ + {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, + {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, +] + +[package.dependencies] +asttokens = ">=2.1.0" +executing = ">=1.2.0" +pure-eval = "*" + +[package.extras] +tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] + +[[package]] +name = "timeout-decorator" +version = "0.5.0" +description = "Timeout decorator" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "timeout-decorator-0.5.0.tar.gz", hash = "sha256:6a2f2f58db1c5b24a2cc79de6345760377ad8bdc13813f5265f6c3e63d16b3d7"}, +] + +[[package]] +name = "tomli" +version = "2.2.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, +] +markers = {main = "python_version < \"3.11\"", dev = "python_full_version <= \"3.11.0a6\""} + +[[package]] +name = "tomlkit" +version = "0.13.2" +description = "Style preserving TOML library" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, + {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, +] + +[[package]] +name = "traitlets" +version = "5.14.3" +description = "Traitlets Python configuration system" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, + {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, +] + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"] + +[[package]] +name = "types-cffi" +version = "1.17.0.20250516" +description = "Typing stubs for cffi" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_cffi-1.17.0.20250516-py3-none-any.whl", hash = "sha256:b5a7b61fa60114072900a1f25094d0ea3d4f398d060128583ef644bb686d027d"}, + {file = "types_cffi-1.17.0.20250516.tar.gz", hash = "sha256:f63c42ab07fd71f4ed218e2dea64f8714e71c585db5c6bdef9ea8f57cf99979b"}, +] + +[package.dependencies] +types-setuptools = "*" + +[[package]] +name = "types-colorama" +version = "0.4.15.20240311" +description = "Typing stubs for colorama" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "types-colorama-0.4.15.20240311.tar.gz", hash = "sha256:a28e7f98d17d2b14fb9565d32388e419f4108f557a7d939a66319969b2b99c7a"}, + {file = "types_colorama-0.4.15.20240311-py3-none-any.whl", hash = "sha256:6391de60ddc0db3f147e31ecb230006a6823e81e380862ffca1e4695c13a0b8e"}, +] + +[[package]] +name = "types-decorator" +version = "5.2.0.20250324" +description = "Typing stubs for decorator" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_decorator-5.2.0.20250324-py3-none-any.whl", hash = "sha256:0740cee7ce57cf9cf2b306114a1588984255f706efa0f35b54b2cff290a110e2"}, + {file = "types_decorator-5.2.0.20250324.tar.gz", hash = "sha256:8fbd72b0dadc56176e48e5187de744e76fe45bcc91a25874baa75662412155d3"}, +] + +[[package]] +name = "types-docutils" +version = "0.21.0.20250516" +description = "Typing stubs for docutils" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_docutils-0.21.0.20250516-py3-none-any.whl", hash = "sha256:390e125385f9a40c9f46ec43c2f76c5719091afa44147592d7c1b8541928efca"}, + {file = "types_docutils-0.21.0.20250516.tar.gz", hash = "sha256:975e5ade9ef0b1f45b6b075f017a0f470ee4e53b0c2dd50ccbf3a04d9c53816f"}, +] + +[[package]] +name = "types-gevent" +version = "24.11.0.20250401" +description = "Typing stubs for gevent" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_gevent-24.11.0.20250401-py3-none-any.whl", hash = "sha256:6764faf861ea99250c38179c58076392c44019ac3393029f71b06c4a15e8c1d1"}, + {file = "types_gevent-24.11.0.20250401.tar.gz", hash = "sha256:1443f796a442062698e67d818fca50aa88067dee4021d457a7c0c6bedd6f46ca"}, +] + +[package.dependencies] +types-greenlet = "*" +types-psutil = "*" + +[[package]] +name = "types-greenlet" +version = "3.2.0.20250417" +description = "Typing stubs for greenlet" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_greenlet-3.2.0.20250417-py3-none-any.whl", hash = "sha256:7798b9fdf19d718a62e2d63351e112e7bee622898c6e6cec539296c3dec27808"}, + {file = "types_greenlet-3.2.0.20250417.tar.gz", hash = "sha256:eb006afcf281ec5756a75c1fd4a6c8a7be5d0cc09b2e82c4856c764760cfa0e3"}, +] + +[[package]] +name = "types-jsonschema" +version = "4.23.0.20250516" +description = "Typing stubs for jsonschema" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_jsonschema-4.23.0.20250516-py3-none-any.whl", hash = "sha256:e7d0dd7db7e59e63c26e3230e26ffc64c4704cc5170dc21270b366a35ead1618"}, + {file = "types_jsonschema-4.23.0.20250516.tar.gz", hash = "sha256:9ace09d9d35c4390a7251ccd7d833b92ccc189d24d1b347f26212afce361117e"}, +] + +[package.dependencies] +referencing = "*" + +[[package]] +name = "types-openpyxl" +version = "3.1.5.20250516" +description = "Typing stubs for openpyxl" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_openpyxl-3.1.5.20250516-py3-none-any.whl", hash = "sha256:87c6b04b30fd1cbab85dc93cbe4f57ce1eb3df5d2911f742a1a0e3bf94314dfc"}, + {file = "types_openpyxl-3.1.5.20250516.tar.gz", hash = "sha256:691339abe141a5713f115558cc39023ebdda6298bfa875d575cc5a961a3c5523"}, +] + +[[package]] +name = "types-pexpect" +version = "4.9.0.20250516" +description = "Typing stubs for pexpect" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_pexpect-4.9.0.20250516-py3-none-any.whl", hash = "sha256:84cbd7ae9da577c0d2629d4e4fd53cf074cd012296e01fd4fa1031e01973c28a"}, + {file = "types_pexpect-4.9.0.20250516.tar.gz", hash = "sha256:7baed9ee566fa24034a567cbec56a5cff189a021344e84383b14937b35d83881"}, +] + +[[package]] +name = "types-psutil" +version = "7.0.0.20250516" +description = "Typing stubs for psutil" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_psutil-7.0.0.20250516-py3-none-any.whl", hash = "sha256:6a8a761c5161e4c9762ff4fb160af4491d08b67f3b1aebe85e1f2890e8378f23"}, + {file = "types_psutil-7.0.0.20250516.tar.gz", hash = "sha256:3d8aa8392ac4d1e5e9237d111cf9e59d8fce8f2bde3115e687623f5e1039a1ab"}, +] + +[[package]] +name = "types-pygments" +version = "2.19.0.20250516" +description = "Typing stubs for Pygments" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_pygments-2.19.0.20250516-py3-none-any.whl", hash = "sha256:db27de8b59591389cd7d14792483892c021c73b8389ef55fef40a48aa371fbcc"}, + {file = "types_pygments-2.19.0.20250516.tar.gz", hash = "sha256:b53fd07e197f0e7be38ee19598bd99c78be5ca5f9940849c843be74a2f81ab58"}, +] + +[package.dependencies] +types-docutils = "*" + +[[package]] +name = "types-python-dateutil" +version = "2.9.0.20250516" +description = "Typing stubs for python-dateutil" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_python_dateutil-2.9.0.20250516-py3-none-any.whl", hash = "sha256:2b2b3f57f9c6a61fba26a9c0ffb9ea5681c9b83e69cd897c6b5f668d9c0cab93"}, + {file = "types_python_dateutil-2.9.0.20250516.tar.gz", hash = "sha256:13e80d6c9c47df23ad773d54b2826bd52dbbb41be87c3f339381c1700ad21ee5"}, +] + +[[package]] +name = "types-pytz" +version = "2025.2.0.20250516" +description = "Typing stubs for pytz" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_pytz-2025.2.0.20250516-py3-none-any.whl", hash = "sha256:e0e0c8a57e2791c19f718ed99ab2ba623856b11620cb6b637e5f62ce285a7451"}, + {file = "types_pytz-2025.2.0.20250516.tar.gz", hash = "sha256:e1216306f8c0d5da6dafd6492e72eb080c9a166171fa80dd7a1990fd8be7a7b3"}, +] + +[[package]] +name = "types-regex" +version = "2024.11.6.20250403" +description = "Typing stubs for regex" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_regex-2024.11.6.20250403-py3-none-any.whl", hash = "sha256:e22c0f67d73f4b4af6086a340f387b6f7d03bed8a0bb306224b75c51a29b0001"}, + {file = "types_regex-2024.11.6.20250403.tar.gz", hash = "sha256:3fdf2a70bbf830de4b3a28e9649a52d43dabb57cdb18fbfe2252eefb53666665"}, +] + +[[package]] +name = "types-requests" +version = "2.32.0.20250515" +description = "Typing stubs for requests" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_requests-2.32.0.20250515-py3-none-any.whl", hash = "sha256:f8eba93b3a892beee32643ff836993f15a785816acca21ea0ffa006f05ef0fb2"}, + {file = "types_requests-2.32.0.20250515.tar.gz", hash = "sha256:09c8b63c11318cb2460813871aaa48b671002e59fda67ca909e9883777787581"}, +] + +[package.dependencies] +urllib3 = ">=2" + +[[package]] +name = "types-setuptools" +version = "80.7.0.20250516" +description = "Typing stubs for setuptools" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_setuptools-80.7.0.20250516-py3-none-any.whl", hash = "sha256:c1da6c11698139c8307c6df5987592df940e956912c204e42d844ba821dd2741"}, + {file = "types_setuptools-80.7.0.20250516.tar.gz", hash = "sha256:57274b58e05434de42088a86074c9e630e5786f759cf9cc1e3015e886297ca21"}, +] + +[[package]] +name = "types-six" +version = "1.17.0.20250515" +description = "Typing stubs for six" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_six-1.17.0.20250515-py3-none-any.whl", hash = "sha256:adfaa9568caf35e03d80ffa4ed765c33b282579c869b40bf4b6009c7d8db3fb1"}, + {file = "types_six-1.17.0.20250515.tar.gz", hash = "sha256:f4f7f0398cb79304e88397336e642b15e96fbeacf5b96d7625da366b069d2d18"}, +] + +[[package]] +name = "types-unidiff" +version = "0.7.0.20240505" +description = "Typing stubs for unidiff" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "types-unidiff-0.7.0.20240505.tar.gz", hash = "sha256:cf15201c7af233f8380f1a289d169e1f740a67369ff60ab7bd0a131402b33194"}, + {file = "types_unidiff-0.7.0.20240505-py3-none-any.whl", hash = "sha256:f01ac7bf7fe4094b3c97c8f08125278ea71455c18d984a980569aef2ab6a768b"}, +] + +[[package]] +name = "typeshed-client" +version = "2.7.0" +description = "A library for accessing stubs in typeshed." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "typeshed_client-2.7.0-py3-none-any.whl", hash = "sha256:97084e5abc58a76ace2c4618ecaebd625f2d19bbd85aa1b3fb86216bf174bbea"}, + {file = "typeshed_client-2.7.0.tar.gz", hash = "sha256:e63df1e738588ad39f1226de042f4407ab6a99c456f0837063afd83b1415447c"}, +] + +[package.dependencies] +importlib-resources = ">=1.4.0" +typing-extensions = ">=4.5.0" + +[[package]] +name = "typing-extensions" +version = "4.13.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, + {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, +] + +[[package]] +name = "typing-inspect" +version = "0.9.0" +description = "Runtime inspection utilities for typing module." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, + {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, +] + +[package.dependencies] +mypy-extensions = ">=0.3.0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "typing-inspection" +version = "0.4.0" +description = "Runtime typing introspection tools" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f"}, + {file = "typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122"}, +] + +[package.dependencies] +typing-extensions = ">=4.12.0" + +[[package]] +name = "unidiff" +version = "0.7.5" +description = "Unified diff parsing/metadata extraction library." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "unidiff-0.7.5-py2.py3-none-any.whl", hash = "sha256:c93bf2265cc1ba2a520e415ab05da587370bc2a3ae9e0414329f54f0c2fc09e8"}, + {file = "unidiff-0.7.5.tar.gz", hash = "sha256:2e5f0162052248946b9f0970a40e9e124236bf86c82b70821143a6fc1dea2574"}, +] + +[[package]] +name = "unittest-xml-reporting" +version = "3.2.0" +description = "unittest-based test runner with Ant/JUnit like XML reporting." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "unittest-xml-reporting-3.2.0.tar.gz", hash = "sha256:edd8d3170b40c3a81b8cf910f46c6a304ae2847ec01036d02e9c0f9b85762d28"}, + {file = "unittest_xml_reporting-3.2.0-py2.py3-none-any.whl", hash = "sha256:f3d7402e5b3ac72a5ee3149278339db1a8f932ee405f48bcb9c681372f2717d5"}, +] + +[package.dependencies] +lxml = "*" + +[[package]] +name = "urllib3" +version = "2.4.0" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813"}, + {file = "urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "uv" +version = "0.7.6" +description = "An extremely fast Python package and project manager, written in Rust." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "uv-0.7.6-py3-none-linux_armv6l.whl", hash = "sha256:434f1820a8fbf54494c53d8ebb2b6509d98a2792876a2d990f90ac70afc9a11a"}, + {file = "uv-0.7.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:0bad870f797971423d7f654423cf3ccd3bbd3688f88aee3f84e79af008c6abae"}, + {file = "uv-0.7.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:8a86cfefd0b9cd3b8a8577e79a0e61d52ade23a7876ed5b5312cc1f05baa140b"}, + {file = "uv-0.7.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:4cd32743d2c0c0b40ffbde48163ae2835353d319472aadabd71e9dcf98152e8b"}, + {file = "uv-0.7.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:32aecfd27bd724d8ca8bafa811a69d436fcd403d589b025fbbd2e967eb154b46"}, + {file = "uv-0.7.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e15ac957e0a319dba40c897b9408c93e603d2317807384ec8f7d47a9e17c0d85"}, + {file = "uv-0.7.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:832d7741117c41455ff43569b88892ec0a81938750a8bc4307e1160b70c91f3c"}, + {file = "uv-0.7.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:17c79eec35c65bbd25180203be7266dd7d43381e02e28a8f2cb6ee809d008837"}, + {file = "uv-0.7.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c18b2437e254906b1f48710e1fc1b313052e2ee7261ff104d58b25ef2d347d98"}, + {file = "uv-0.7.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f46cfd2de04dd261cc75158c293de64f99cc907ab0d395f3a0f97c94e7f076a"}, + {file = "uv-0.7.6-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:c44311ed1a32e397d81e346e7b868e4ae22f2df2e5ba601e055683fa4cc68323"}, + {file = "uv-0.7.6-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:5e283166816f129f29023a4bfdf49fdb33e1e2bcb4e555e9d6996122867a44af"}, + {file = "uv-0.7.6-py3-none-musllinux_1_1_i686.whl", hash = "sha256:72e9337db681a16a7203abe112fedc249f01fe4cadd6d65d23c85031183dcf23"}, + {file = "uv-0.7.6-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:310e488493d03a843b838e9301af1731b02bc93b14bcaa38c62d448cebbdca3c"}, + {file = "uv-0.7.6-py3-none-win32.whl", hash = "sha256:e3fb41bd4bf88ab21df773b642465fffc469e173645eb986d000db38d7bb8e3c"}, + {file = "uv-0.7.6-py3-none-win_amd64.whl", hash = "sha256:4026513441dc01326f8bc04517956385442523ed1d40400e14723d8fb3d9c321"}, + {file = "uv-0.7.6-py3-none-win_arm64.whl", hash = "sha256:ad79d71d2bb4cc1cb22d09771a23f70190e3b5fa41668da208e694b50b900178"}, + {file = "uv-0.7.6.tar.gz", hash = "sha256:bd188ac9d9902f1652130837ede39768d7c8f72b0a68fd484ba884d88e963b66"}, +] + +[[package]] +name = "virtualenv" +version = "20.31.2" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "virtualenv-20.31.2-py3-none-any.whl", hash = "sha256:36efd0d9650ee985f0cad72065001e66d49a6f24eb44d98980f630686243cf11"}, + {file = "virtualenv-20.31.2.tar.gz", hash = "sha256:e10c0a9d02835e592521be48b332b6caee6887f332c111aa79a09b9e79efc2af"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8) ; platform_python_implementation == \"PyPy\" or platform_python_implementation == \"GraalVM\" or platform_python_implementation == \"CPython\" and sys_platform == \"win32\" and python_version >= \"3.13\"", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10) ; platform_python_implementation == \"CPython\""] + +[[package]] +name = "wcwidth" +version = "0.2.13" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = "*" +groups = ["main", "dev"] +files = [ + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, +] + +[[package]] +name = "xmod" +version = "1.8.1" +description = "🌱 Turn any object into a module 🌱" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "xmod-1.8.1-py3-none-any.whl", hash = "sha256:a24e9458a4853489042522bdca9e50ee2eac5ab75c809a91150a8a7f40670d48"}, + {file = "xmod-1.8.1.tar.gz", hash = "sha256:38c76486b9d672c546d57d8035df0beb7f4a9b088bc3fb2de5431ae821444377"}, +] + +[[package]] +name = "z3-solver" +version = "4.15.0.0" +description = "an efficient SMT solver library" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "z3_solver-4.15.0.0-py3-none-macosx_13_0_arm64.whl", hash = "sha256:f39d07f6df4c8c09df594f1c9d2bb8b424805033a94725bfd535e7eca13eacfc"}, + {file = "z3_solver-4.15.0.0-py3-none-macosx_13_0_x86_64.whl", hash = "sha256:83e82ed14073e72ca62bd3833866a9457df124448054a0c9d36db55e2f5baac6"}, + {file = "z3_solver-4.15.0.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:214d833c2c0959aed3387e35ca36a1bfdfc443f11403d6aaf61ba346b5e21ed3"}, + {file = "z3_solver-4.15.0.0-py3-none-manylinux_2_34_aarch64.whl", hash = "sha256:f90adbb6987493828c9b8381481eaa3eb3efcdc69d326d4dd341da20deaefd31"}, + {file = "z3_solver-4.15.0.0-py3-none-win32.whl", hash = "sha256:cf119f1b95f0c0c6a82fce365336c8887e2f557c5b5b9f370a184aa80d164e1c"}, + {file = "z3_solver-4.15.0.0-py3-none-win_amd64.whl", hash = "sha256:f1d923506654f39834196908ff579c5555b65e2d6140e515dc2d0e43c9218b2f"}, + {file = "z3_solver-4.15.0.0.tar.gz", hash = "sha256:31012fdbaceb92667fd7e338de8b06b41d60c99bf6a3b8ec197de352372f05f1"}, +] + +[[package]] +name = "zipp" +version = "3.21.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, + {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources ; python_version < \"3.9\"", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] + +[metadata] +lock-version = "2.1" +python-versions = ">=3.9" +content-hash = "1a73e9db33e3884cf1cc6e3371816aebd20831845ef9bf671be315e659480e86" diff --git a/pyproject.toml b/pyproject.toml index ee6fa9d6d..7a78b26ce 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -122,6 +122,7 @@ types-greenlet = "^3.1.0.20241221" types-pexpect = "^4.9.0.20241208" types-unidiff = "^0.7.0.20240505" uv = ">=0.6.2" +pre-commit = "^4.2.0" [tool.poetry.build] script = "codeflash/update_license_version.py" @@ -158,6 +159,7 @@ exclude = ["code_to_optimize/", "pie_test_set/", "tests/"] [tool.ruff.lint] select = ["ALL"] ignore = [ + "N802", "C901", "D100", "D101", @@ -179,7 +181,16 @@ ignore = [ "TD003", "TD004", "PLR2004", - "UP007" # remove once we drop 3.9 support. + "UP007", # remove once we drop 3.9 support. + "E501", + "BLE001", + "ERA001", + "TRY003", + "EM101", + "T201", + "PGH004", + "S301", + "D104" ] [tool.ruff.lint.flake8-type-checking]