diff --git a/.coveragerc b/.coveragerc index de9a745..918149d 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,6 +1,6 @@ [run] omit = - src/hario_core/interfaces.py + src/hario_core/**/interfaces.py [report] exclude_lines = diff --git a/.flake8 b/.flake8 index f3f099f..e0ea542 100644 --- a/.flake8 +++ b/.flake8 @@ -1,5 +1,3 @@ [flake8] max-line-length = 88 -extend-ignore = E203 -exclude = - tests/samples.py \ No newline at end of file +extend-ignore = E203 \ No newline at end of file diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 2d5b341..a42584b 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -22,11 +22,10 @@ jobs: python -m pip install --upgrade pip pip install .[dev] - - name: Lint with flake8, black, mypy + - name: Lint with pre-commit run: | - flake8 . - black --check . - mypy -p hario_core + pre-commit run --all-files + - name: Test with pytest run: | pytest --cov --cov-branch --cov-report=xml diff --git a/.gitignore b/.gitignore index c9322ab..5053f6f 100644 --- a/.gitignore +++ b/.gitignore @@ -3,6 +3,11 @@ __pycache__/ *.py[cod] *$py.class +# Benchmarks +benchmarks/*.har +benchmarks/*.stats +benchmarks/*.csv + # C extensions *.so diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 87b56f8..3116ea1 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,20 +3,27 @@ repos: rev: 24.3.0 hooks: - id: black + files: ^src/|^tests/ + exclude: ^tests/samples.py$ - repo: https://github.com/PyCQA/isort rev: 5.12.0 hooks: - id: isort + files: ^src/|^tests/ + exclude: ^tests/samples.py$ - repo: https://github.com/pycqa/flake8 rev: 6.0.0 hooks: - id: flake8 + files: ^src/|^tests/ + exclude: ^tests/samples.py$ - repo: https://github.com/pre-commit/mirrors-mypy rev: v1.7.1 hooks: - id: mypy + files: ^src/|^tests/ + exclude: ^tests/samples.py$ args: ["--config-file=pyproject.toml"] - exclude: ^(docs|examples)/ additional_dependencies: - pydantic \ No newline at end of file diff --git a/README.md b/README.md index 2f6127b..c6e8cac 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,7 @@ A modern, extensible, and type-safe Python library for parsing, transforming, an ## Features - **Type-Safe Parsing**: Validates HAR files using Pydantic models, catching errors early. -- **Transformers**: Apply built-in or custom transformations to each HAR entry (e.g., flattening, normalization). +- **Transformers**: Apply built-in or custom transformations to each HAR entry (e.g., flatten, normalizations). - **Normalization**: Ensures all numeric fields (sizes, timings) are non-negative, so you can safely sum, aggregate, and analyze data without errors from negative values. This is crucial for analytics and reporting. - **Deterministic & Random IDs**: Generate unique or deterministic IDs for each entry. Deterministic IDs ensure that the same request always gets the same ID—useful for deduplication, comparison, and building analytics pipelines. - **Extensible**: Register your own entry models to support browser-specific or proprietary HAR extensions (e.g., Chrome DevTools, Safari). @@ -24,32 +24,65 @@ pip install hario-core ## Quickstart +### 1. Parse and validate a HAR file + +```python +from hario_core import parse + +har_log = parse("example.har") +entries = har_log.model_dump()["entries"] # list of dicts +``` + +### 2. Transform entries with a pipeline + +```python +from hario_core.transform import Pipeline, flatten, set_id, by_field + +pipeline = Pipeline([ + set_id(by_field(["request.url", "startedDateTime"])) +]) +results = pipeline.process(entries) +``` + +### 3. Custom entry models (extensions) + ```python -from hario_core import parse, Pipeline, by_field, normalize_sizes, flatten +from hario_core.parse import register_entry_model +from hario_core.models import Entry -# Build a processing pipeline: deterministic ID, normalization, flattening -pipeline = Pipeline( - id_fn=by_field(["request.url", "startedDateTime"]), - transformers=[normalize_sizes(), flatten()], -) +def is_custom_entry(entry: dict) -> bool: + return "x-custom" in entry -# Parse your HAR file (from path, bytes, or file-like object) -model = parse("example.har") -result_dict = pipeline.process(model) +class CustomEntry(Entry): + x_custom: str -for entry in result_dict: - print(entry["id"], entry["request"]["url"]) +register_entry_model(is_custom_entry, CustomEntry) ``` +## Public API + +### Parsing and validation +- `parse(path_or_bytes_or_filelike) -> HarLog` +- `validate(har_dict: dict) -> HarLog` +- `register_entry_model(detector: Callable, model: Type[Entry])` +- `entry_selector(entry_dict: dict) -> Type[Entry]` + +### Models +- `Entry`, `HarLog`, `DevToolsEntry` (and all standard HAR 1.2 models) + +### Transform +- `Pipeline`, `flatten`, `normalize_sizes`, `normalize_timings`, `set_id`, `by_field`, `uuid`, `json_array_handler` + ## Documentation -- [API Reference](docs/api.md) -- [Changelog](docs/changelog.md) -- [Contributing](CONTRIBUTING.md) +- [API Reference](https://github.com/pikulev/hario-core/blob/main/docs/api.md) +- [Changelog](https://github.com/pikulev/hario-core/blob/main/docs/changelog.md) +- [Contributing](https://github.com/pikulev/hario-core/blob/main/CONTRIBUTING.md) + ## License -MIT License. See [LICENSE](LICENSE). +MIT License. See [LICENSE](https://github.com/pikulev/hario-core/blob/main/LICENSE). ## Supported Python Versions diff --git a/benchmarks/bench.py b/benchmarks/bench.py new file mode 100644 index 0000000..72da6d3 --- /dev/null +++ b/benchmarks/bench.py @@ -0,0 +1,111 @@ +from bench_core import ( + STRATEGIES, HAR_PATH, + bench_flatten, bench_full, bench_normalize_sizes, bench_normalize_timings, bench_cpu_heavy, + create_results_table, create_results_csv, average_run, get_entries +) +from rich.console import Console +import argparse +import cProfile +import pstats +import sys + + + +def main() -> None: + parser = argparse.ArgumentParser( + description=""" + Microbenchmark for HAR Pipeline with different strategies, averaging, profiling and CSV output. + + Example usage: + python bench.py flatten -f my.har --no-gc --csv results.csv + python bench.py --csv all_results.csv + python bench.py full --profile process + """, + formatter_class=argparse.RawDescriptionHelpFormatter + ) + parser.add_argument( + "mode", + nargs="?", + default="all", + choices=["flatten", "normalize", "full", "cpu_heavy", "all"], + help="Benchmark mode: flatten, normalize, full, cpu_heavy, all (default: all)" + ) + parser.add_argument( + "-f", "--file", + default=HAR_PATH, + help="Path to HAR file (default: benchmarks/test_lg.har)" + ) + parser.add_argument( + "--profile", + choices=STRATEGIES, + help="Enable cProfile profiling for the given strategy (e.g. --profile process)" + ) + parser.add_argument( + "--no-gc", + action="store_true", + help="Disable GC during measurement (default: GC enabled)" + ) + parser.add_argument( + "--csv", + nargs="?", + const="-", + help="Save results to CSV file (or stdout if no file is specified)" + ) + args = parser.parse_args() + + mode = args.mode + har_path = args.file + profile_strategy = args.profile + use_gc = not args.no_gc + + console = Console() + console.print(f"Loading HAR file: {har_path} ...") + entries = get_entries(har_path) + console.print(f"Loaded {len(entries)} entries.") + + bench_map = { + "flatten": bench_flatten, + "normalize_sizes": bench_normalize_sizes, + "normalize_timings": bench_normalize_timings, + "full": bench_full, + "cpu_heavy": bench_cpu_heavy, + } + + if profile_strategy: + if mode == "all": + console.print("[red]Profiling is only available for a single mode, not for 'all'.[/red]") + sys.exit(1) + bench_func = bench_map[mode] + profile_file = f"benchmarks/{mode}.stats" + console.print(f"Profiling... profile saved in {profile_file} (strategy: {profile_strategy})") + def prof(): + bench_func(entries, profile_strategy, use_gc=use_gc) + cProfile.runctx("prof()", globals(), locals(), profile_file) + p = pstats.Stats(profile_file) + console.print("\n=== TOP-20 functions by cumtime ===\n") + p.strip_dirs().sort_stats("cumtime").print_stats(20) + console.print(f"\n=== For visualization, run: snakeviz {profile_file} ===") + else: + results = {strategy: {} for strategy in STRATEGIES} + if mode == "all": + for test_name, bench_func in bench_map.items(): + for strategy in STRATEGIES: + console.print(f"\n[bold]Running {test_name} with {strategy} strategy...[/bold]") + elapsed, current, peak, rss = average_run(bench_func, entries, strategy, use_gc=use_gc) + results[strategy][test_name] = (elapsed, current, peak, rss) + else: + bench_func = bench_map[mode] + for strategy in STRATEGIES: + console.print(f"\n[bold]Running {mode} with {strategy} strategy...[/bold]") + elapsed, current, peak, rss = average_run(bench_func, entries, strategy, use_gc=use_gc) + results[strategy][mode] = (elapsed, current, peak, rss) + # Display results table + table = create_results_table(results) + console.print(table) + # CSV output + if args.csv: + filename = None if args.csv == "-" else args.csv + create_results_csv(results, f"benchmarks/{filename}") + +if __name__ == "__main__": + main() diff --git a/benchmarks/bench_core.py b/benchmarks/bench_core.py new file mode 100644 index 0000000..900facb --- /dev/null +++ b/benchmarks/bench_core.py @@ -0,0 +1,193 @@ +import sys +import time +import tracemalloc +import hashlib +import orjson +from typing import Dict, Tuple, Any +import gc +import psutil +import os +import csv +from rich.table import Table + +from hario_core.transform import ( + Pipeline, + PipelineConfig, + by_field, + flatten, + normalize_sizes, + normalize_timings, + set_id, +) +from hario_core.parse import parse + +REPEAT = 5 +BATCH_SIZE = 12 +MAX_WORKERS = 6 + +HAR_PATH = "benchmarks/test_lg.har" +STRATEGIES = ["process", "thread", "sequential", "async"] + +def get_entries(har_path: str) -> dict: + """ + Get entries from HAR file. + """ + har_log = parse(har_path) + return har_log.model_dump()['entries'] + + +def bench_flatten(entries: dict, strategy: str, use_gc: bool = True) -> Tuple[float, int, int, int]: + config = PipelineConfig( + batch_size=BATCH_SIZE, + processing_strategy=strategy, + max_workers=MAX_WORKERS if strategy in ["process", "thread"] else None, + ) + pipeline = Pipeline( + transformers=[set_id(by_field(["request.url", "startedDateTime"])), flatten()], + config=config, + ) + return run_pipeline(pipeline, entries, f"flatten ({strategy})", use_gc=use_gc) + + +def bench_normalize_sizes(entries: dict, strategy: str, use_gc: bool = True) -> Tuple[float, int, int, int]: + config = PipelineConfig( + batch_size=BATCH_SIZE, + processing_strategy=strategy, + max_workers=MAX_WORKERS if strategy in ["process", "thread"] else None, + ) + pipeline = Pipeline( + transformers=[set_id(by_field(["request.url", "startedDateTime"])), normalize_sizes()], + config=config, + ) + return run_pipeline(pipeline, entries, f"normalize_sizes ({strategy})", use_gc=use_gc) + + +def bench_normalize_timings(entries: dict, strategy: str, use_gc: bool = True) -> Tuple[float, int, int, int]: + config = PipelineConfig( + batch_size=BATCH_SIZE, + processing_strategy=strategy, + max_workers=MAX_WORKERS if strategy in ["process", "thread"] else None, + ) + pipeline = Pipeline( + transformers=[set_id(by_field(["request.url", "startedDateTime"])), normalize_timings()], + config=config, + ) + return run_pipeline(pipeline, entries, f"normalize_timings ({strategy})", use_gc=use_gc) + + +def bench_full(entries: dict, strategy: str, use_gc: bool = True) -> Tuple[float, int, int, int]: + config = PipelineConfig( + batch_size=BATCH_SIZE, + processing_strategy=strategy, + max_workers=MAX_WORKERS if strategy in ["process", "thread"] else None, + ) + pipeline = Pipeline( + transformers=[ + set_id(by_field(["request.url", "startedDateTime"])), + normalize_sizes(), + normalize_timings(), + flatten(), + ], + config=config, + ) + return run_pipeline(pipeline, entries, f"full pipeline ({strategy})", use_gc=use_gc) + + +class CpuHeavy: + def __call__(self, data: Dict[str, Any]) -> Dict[str, Any]: + payload = orjson.dumps(data) + for _ in range(1000): + payload = hashlib.sha256(payload).digest() + data["cpu_hash"] = payload.hex() + return data + + +def cpu_heavy_transformer() -> CpuHeavy: + return CpuHeavy() + + +def bench_cpu_heavy(entries: dict, strategy: str, use_gc: bool = True) -> Tuple[float, int, int, int]: + config = PipelineConfig( + batch_size=BATCH_SIZE, + processing_strategy=strategy, + max_workers=MAX_WORKERS if strategy in ["process", "thread"] else None, + ) + pipeline = Pipeline( + transformers=[set_id(by_field(["request.url", "startedDateTime"])), cpu_heavy_transformer(), flatten()], + config=config, + ) + return run_pipeline(pipeline, entries, f"cpu_heavy_transformer ({strategy})", use_gc=use_gc) + + +def run_pipeline(pipeline: Pipeline, entries: dict, label: str, use_gc: bool = True) -> Tuple[float, int, int, int]: + if use_gc: + gc.collect() + else: + gc.disable() + tracemalloc.start() + start = time.perf_counter() + result = pipeline.process(entries) + elapsed = time.perf_counter() - start + current, peak = tracemalloc.get_traced_memory() + tracemalloc.stop() + rss = psutil.Process(os.getpid()).memory_info().rss + if not use_gc: + gc.enable() + return elapsed, current, peak, rss + + +def average_run(bench_func, entries: dict, strategy: str, use_gc: bool = True): + times = [] + currents = [] + peaks = [] + rss_list = [] + for _ in range(REPEAT): + elapsed, current, peak, rss = bench_func(entries, strategy, use_gc=use_gc) + times.append(elapsed) + currents.append(current) + peaks.append(peak) + rss_list.append(rss) + n = REPEAT + return ( + sum(times) / n, + sum(currents) / n, + sum(peaks) / n, + sum(rss_list) / n, + ) + + +def create_results_table(results: Dict[str, Dict[str, Tuple[float, int, int, int]]]) -> Table: + table = Table(title="Benchmark Results") + table.add_column("Test", style="cyan") + for strategy in STRATEGIES: + table.add_column(strategy, justify="right", style="green") + for test_name in results[STRATEGIES[0]].keys(): + row = [test_name] + for strategy in STRATEGIES: + elapsed, current, peak, rss = results[strategy][test_name] + row.append(f"{elapsed:.3f}s\nPyHeap: {current/1024/1024:.1f}MB\nPyPeak: {peak/1024/1024:.1f}MB\nRSS: {rss/1024/1024:.1f}MB") + table.add_row(*row) + return table + + +def create_results_csv(results: Dict[str, Dict[str, tuple]], filename: str = None): + fieldnames = ["Test"] + [f"{strategy}_elapsed" for strategy in STRATEGIES] + [f"{strategy}_pyheap" for strategy in STRATEGIES] + [f"{strategy}_pypeak" for strategy in STRATEGIES] + [f"{strategy}_rss" for strategy in STRATEGIES] + rows = [] + for test_name in results[STRATEGIES[0]].keys(): + row = {"Test": test_name} + for strategy in STRATEGIES: + elapsed, current, peak, rss = results[strategy][test_name] + row[f"{strategy}_elapsed"] = f"{elapsed:.6f}" + row[f"{strategy}_pyheap"] = f"{current/1024/1024:.2f}" + row[f"{strategy}_pypeak"] = f"{peak/1024/1024:.2f}" + row[f"{strategy}_rss"] = f"{rss/1024/1024:.2f}" + rows.append(row) + if filename: + with open(filename, "w", newline="") as f: + writer = csv.DictWriter(f, fieldnames=fieldnames) + writer.writeheader() + writer.writerows(rows) + else: + writer = csv.DictWriter(sys.stdout, fieldnames=fieldnames) + writer.writeheader() + writer.writerows(rows) \ No newline at end of file diff --git a/docs/api.md b/docs/api.md index 2e67461..f52ce03 100644 --- a/docs/api.md +++ b/docs/api.md @@ -9,36 +9,39 @@ This page provides a detailed guide to the main functions, classes, and extensib ### `parse` ```python -from hario_core import parse +from hario_core.parse import parse ``` Parses a HAR file from a path, bytes, or file-like object and returns a validated `HarLog` model. Automatically selects the correct Pydantic model for each entry (including extensions). **Signature:** ```python -def parse( - src: str | Path | bytes | bytearray | IO[Any], - *, - entry_model_selector: Callable[[dict[str, Any]], type[Entry]] = entry_selector, -) -> HarLog +def parse(src: str | Path | bytes | bytearray | IO[Any]) -> HarLog ``` - - `src`: Path, bytes, or file-like object containing HAR JSON. -- `entry_model_selector`: Optional. Function to select the Pydantic model for each entry (default: registry-based selector). **Returns:** - `HarLog` — a validated Pydantic model with `.entries` (list of `Entry` or extension models). **Example:** ```python -model = parse("example.har") +har_log = parse("example.har") for entry in har_log.entries: print(entry.request.url) ``` --- -## Entry Model Registration +### `validate` + +Validates a HAR dict (already loaded from JSON) and returns a `HarLog` model. + +**Signature:** +```python +def validate(har_dict: dict) -> HarLog +``` + +--- ### `register_entry_model` @@ -46,41 +49,48 @@ Register a custom Pydantic model and detector function for new HAR entry formats **Signature:** ```python -def register_entry_model( - detector: Callable[[dict[str, Any]], bool], - model: type[Entry], -) -> None +def register_entry_model(detector: Callable[[dict], bool], model: type[Entry]) -> None ``` - - `detector`: Function that takes an entry dict and returns True if the model should be used. - `model`: Pydantic model class to use for matching entries. **Example:** ```python -from hario_core.models.har_1_2 import Entry -from pydantic import Field +from hario_core.models import Entry -class SafariEntry(Entry): - webkit_trace: dict = Field(alias="_webkitTrace") +class CustomEntry(Entry): + x_custom: str -def is_safari_entry(entry_json): - return "_webkitTrace" in entry_json +def is_custom_entry(entry): + return "x-custom" in entry -register_entry_model(is_safari_entry, SafariEntry) +register_entry_model(is_custom_entry, CustomEntry) +``` + +--- + +### `entry_selector` + +Selects the appropriate Entry model for a given entry dict (based on registered detectors). + +**Signature:** +```python +def entry_selector(entry_dict: dict) -> type[Entry] ``` --- ## Data Models -All core data structures are implemented as Pydantic models in `hario_core.models.har_1_2`. +All core data structures are implemented as Pydantic models in `hario_core.models`. - `Entry`: Pydantic model for a HAR entry (fields: request, response, timings, cache, etc.). - `HarLog`: Pydantic model for the HAR log (fields: version, creator, entries, etc.). +- `DevToolsEntry`: Chrome DevTools extension entry model. **Example:** ```python -from hario_core.models.har_1_2 import HarLog, Entry +from hario_core.models import HarLog, Entry har_log = HarLog.model_validate(har_json["log"]) for entry in har_log.entries: @@ -88,80 +98,56 @@ for entry in har_log.entries: print(entry.request.url) ``` -### `Transformer` -A transformer is a function that takes an `Entry` (or its extension) and returns a dict (possibly mutated/transformed). - -```python -def my_transformer(entry: Entry) -> dict: - data = entry.model_dump() - # mutate data - return data -``` - -### `EntryIdFn` -A function that takes an `Entry` and returns a string ID. - --- -## ID Generation +## Transformers & ID Generators -### `by_field` +### `Transformer` +A transformer is a callable that takes a dict (parsed HAR entry) and returns a dict (possibly mutated/transformed). -Returns a deterministic ID function based on specified fields of a HAR entry. +### `set_id` +Sets an ID field in each entry using a provided function. **Signature:** ```python -def by_field(fields: list[str]) -> EntryIdFn +def set_id(id_fn: Callable[[dict], str], id_field: str = "id") -> Transformer ``` -**Example:** +### `by_field` +Returns a deterministic ID function based on specified fields of a HAR entry. + +**Signature:** ```python -from hario_core.utils import by_field -id_fn = by_field(["request.url", "startedDateTime"]) +def by_field(fields: list[str]) -> Callable[[dict], str] ``` ### `uuid` - Returns a function that generates a random UUID for each entry. **Signature:** ```python -def uuid() -> EntryIdFn +def uuid() -> Callable[[dict], str] ``` -**Example:** -```python -from hario_core.utils import uuid -id_fn = uuid() -``` - ---- - -## Transformers - -Transformers are functions that mutate or normalize HAR entry data for storage or analysis. - ### `flatten` - -Flattens nested structures in a HAR entry to a single level, stringifying deep or large fields (useful for DB storage). +Flattens nested structures in a HAR entry to a flat dict with keys joined by separator. If a list is encountered, array_handler is called (default: str). Useful for exporting to CSV, analytics, or custom DB schemas. **Signature:** ```python -def flatten( - max_depth: int = 3, - size_limit: int = 32_000, -) -> Transformer +def flatten(separator: str = ".", array_handler: Callable[[list, str], Any] = None) -> Transformer ``` -- `max_depth`: Maximum depth to keep as dicts/lists (default: 3). -- `size_limit`: Maximum size (in bytes) for nested data before stringifying (default: 32,000). +- `separator`: Separator for keys (default: '.') +- `array_handler`: Function (lambda arr, path) -> value. Default is str(arr) **Example:** ```python -flat_entry = flatten()(entry) +def header_handler(arr, path): + return {f"{path}.{item['name']}": item["value"] for item in arr if isinstance(item, dict) and "name" in item and "value" in item} + +flat_entry = flatten(array_handler=header_handler)(entry) ``` ### `normalize_sizes` - Normalizes negative size fields in request/response to zero. **Signature:** @@ -170,7 +156,6 @@ def normalize_sizes() -> Transformer ``` ### `normalize_timings` - Normalizes negative timing fields in entry.timings to zero. **Signature:** @@ -182,45 +167,88 @@ def normalize_timings() -> Transformer ## Pipeline -### `Pipeline` - -A high-level class for processing HAR data: transforming and assigning IDs. You must pass a parsed `HarLog` object (see `parse`). +### `PipelineConfig` +Configuration for the Pipeline processor. **Signature:** ```python -class Pipeline: - def __init__( - self, - id_fn: EntryIdFn, - id_field: str = "id", - transformers: Sequence[Transformer] = (), - ) -> None - def process(self, har_log: HarLog) -> list[dict[str, Any]] +from hario_core.transform import PipelineConfig + +config = PipelineConfig( + batch_size=1000, # entries per batch + processing_strategy="process", # "sequential", "thread", "process", "async" + max_workers=4 # number of parallel workers (if applicable) +) ``` -- `id_fn`: Function to generate an ID for each entry. -- `id_field`: Field name for the generated ID (default: "id"). -- `transformers`: List of transformer functions to apply to each entry. +- `batch_size`: int, default 20000 +- `processing_strategy`: str, one of "sequential", "thread", "process", "async" +- `max_workers`: int | None, number of parallel workers (for thread/process) --- -## Example: Full Pipeline +### `Pipeline` +A high-level class for processing HAR entry dicts: transforming and assigning IDs. +**Signature:** ```python -from hario_core import Pipeline, by_field, flatten, normalize_sizes, parse +from hario_core.transform import Pipeline, PipelineConfig pipeline = Pipeline( - id_fn=by_field(["request.url", "startedDateTime"]), - transformers=[flatten(), normalize_sizes()], + transformers=[...], + config=PipelineConfig(...) ) -model = parse("example.har") -result_dict = pipeline.process(model) +results = pipeline.process(entries) # entries: list[dict] +``` +- `transformers`: List of transformer functions to apply to each entry. +- `config`: PipelineConfig instance (optional, default: sequential, batch_size=20000) +- `process(entries)`: entries must be a list of dicts (e.g., from HarLog.model_dump()["entries"]) + +--- + +### Example: Full Pipeline + +```python +from hario_core.parse import parse +from hario_core.transform import Pipeline, PipelineConfig, by_field, flatten, normalize_sizes, set_id + +har_log = parse("example.har") +entries = har_log.model_dump()["entries"] + +pipeline = Pipeline([ + set_id(by_field(["request.url", "startedDateTime"])), + flatten(), + normalize_sizes(), +]) +results = pipeline.process(entries) +``` -for entry in result_dict: - print(entry["id"], entry["request"]["url"]) +--- + +### Example: Parallel Processing with Custom Batch Size and Workers + +```python +from hario_core.transform import Pipeline, PipelineConfig, flatten + +config = PipelineConfig( + processing_strategy="process", # or "thread" + batch_size=20, # process 20 entries per batch + max_workers=6 # use 6 parallel workers +) + +pipeline = Pipeline([ + flatten(), +], config=config) +results = pipeline.process(entries) ``` +#### Available Processing Strategies +- `sequential` (default): Process entries one by one in a single thread. Best for small datasets or debugging. +- `thread`: Parallel processing using threads. Useful for I/O-bound tasks or when GIL is not a bottleneck. +- `process`: Parallel processing using multiple processes. Recommended for CPU-bound tasks and large datasets. +- `async`: Asynchronous processing (if your transformers support async). For advanced use cases with async I/O. + --- ## Chrome DevTools Extension Example @@ -229,8 +257,8 @@ You can use the Chrome DevTools HAR extension models to validate and work with H **Example:** ```python -from hario_core.models.extensions.chrome_devtools import DevToolsEntry -from hario_core.models.har_1_2 import HarLog +from hario_core.models import DevToolsEntry +from hario_core.models import HarLog # Suppose har_json is a dict loaded from a Chrome DevTools HAR file har_log = HarLog.model_validate(har_json["log"]) diff --git a/docs/changelog.md b/docs/changelog.md index 1978fec..f71d82f 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -1,5 +1,18 @@ # Changelog +### v0.4.0 +- BREAKING: The old `flatten` (which stringifies nested structures) is now called `stringify`. +- BREAKING: The new `flatten` has different behavior—please update your code if you relied on the old flatten logic. +- BREAKING: Pipeline now requires a list of transformers and a PipelineConfig instance (no more id_fn/id_field in constructor). +- BREAKING: Pipeline.process now expects a list of dicts (e.g., from HarLog.model_dump()["entries"]). +- New: Introduced a new `flatten` transformer that fully flattens nested HAR entries into a flat dict, with customizable key separator and flexible array handling via `array_handler`. Designed for advanced analytics and BI. +- New: PipelineConfig class for configuring batch size, processing strategy (sequential/thread/process/async), and max_workers. +- New: Parallel and batch processing strategies for large HAR files (process, thread, async). +- New: Benchmarks and benchmarking scripts for pipeline performance (see `benchmarks/`). +- New: All transformers (`flatten`, `normalize_sizes`, `normalize_timings`, `set_id`) are now implemented as picklable callable classes, fully compatible with multiprocessing. +- New: `set_id` transformer for assigning IDs to entries using any function (e.g., by_field, uuid). +- Internal: Test suite and samples updated for new API and real-world HAR compatibility. + ### v0.3.1 - FIX real-world HAR compatibility: made nested fields like `postData.params` optional in models, so parsing DevTools and other real HAR files is more robust. - All test samples are now based on real HAR data with valid `pages` and `pageref` links. diff --git a/docs/index.md b/docs/index.md index 97b959a..237825d 100644 --- a/docs/index.md +++ b/docs/index.md @@ -2,75 +2,136 @@ Hario Core is a modern, extensible, and type-safe Python library for parsing, transforming, and analyzing HAR (HTTP Archive) files. Built on Pydantic, it provides robust validation, flexible transformation, and easy extension for custom HAR formats. -## Main Concepts +--- -- **Parser**: Use `parse()` to load and validate HAR files into Pydantic models (`HarLog`, `Entry`). -- **Pipeline**: The `Pipeline` class lets you process HAR logs, assign IDs, and apply transformations in a composable way. -- **Transformers**: Built-in and custom functions (like `flatten`, `normalize_sizes`, `normalize_timings`) to mutate or normalize HAR entries for storage or analytics. -- **Utils**: Utilities for ID generation (`by_field`, `uuid`), model registration (`register_entry_model`), and more. +## API Overview -See the [API Reference](api.md) for detailed usage, signatures, and extension patterns. +Hario Core exposes three main namespaces: -## Key Features +### 1. Models (`hario_core.models`) + +- **Purpose:** Type-safe data structures for HAR 1.2 and browser extensions. +- **Key classes:** `Entry`, `HarLog`, `DevToolsEntry` +- **Usage:** Use these models for type checking, validation, and extension. + +```python +from hario_core.models import Entry, HarLog, DevToolsEntry +``` -- **Type-Safe Parsing**: Validates HAR files using Pydantic models, catching errors early. -- **Transformers**: Apply built-in or custom transformations to each HAR entry (e.g., flattening, normalization). -- **Normalization**: Ensures all numeric fields (sizes, timings) are non-negative, so you can safely sum, aggregate, and analyze data without errors from negative values. This is crucial for analytics and reporting. -- **Deterministic & Random IDs**: Generate unique or deterministic IDs for each entry. Deterministic IDs ensure that the same request always gets the same ID—useful for deduplication, comparison, and building analytics pipelines. -- **Extensible**: Register your own entry models to support browser-specific or proprietary HAR extensions (e.g., Chrome DevTools, Safari). -- **Composable Pipelines**: Chain any number of transformers and ID strategies for flexible data processing. +--- -## Why Normalize HAR Data? +### 2. Parse (`hario_core.parse`) -HAR files from browsers or proxies sometimes contain negative values for sizes or timings (e.g., -1 for unknown). Normalization transforms these to zero, so you can safely compute totals, averages, and other metrics without skewing your analytics. This is especially important for dashboards, BI, and automated reporting. +- **Purpose:** Loading, validating, and extending HAR files. +- **Key functions:** + - `parse(path_or_bytes_or_filelike) -> HarLog` + - `validate(har_dict: dict) -> HarLog` + - `register_entry_model(detector, model)` + - `entry_selector(entry_dict) -> Type[Entry]` +- **Usage:** Always start with parsing or validating your HAR data. -## Why Deterministic IDs? +```python +from hario_core.parse import parse, validate, register_entry_model, entry_selector -A deterministic ID is generated from key fields (like URL and timestamp), so the same logical request always gets the same ID—even if the HAR is re-exported or merged. This is essential for deduplication, change tracking, and building reliable analytics or data warehouses. +har_log = parse("example.har") +# or, if you already have a dict: +har_log = validate(har_dict) +``` -## Extensibility and DevTools Support +--- -Modern HAR files exported from browsers (e.g., Chrome DevTools) often contain additional, non-standard fields such as `_initiator`, `_resourceType`, `_transferSize`, and more. These fields are used for advanced diagnostics and analysis. +### 3. Transform (`hario_core.transform`) +- **Purpose:** Transforming and processing HAR entries with pipelines and utilities. +- **Key classes/functions:** `Pipeline`, `flatten`, `normalize_sizes`, `normalize_timings`, `set_id`, `by_field`, `uuid`, `json_array_handler` +- **Usage:** Build flexible pipelines for cleaning, normalizing, and analyzing HAR data. -### Where do extensions appear? +```python +from hario_core.transform import Pipeline, flatten, set_id, by_field + +entries = har_log.model_dump()["entries"] +pipeline = Pipeline([ + set_id(by_field(["request.url", "startedDateTime"])), + flatten() +]) +results = pipeline.process(entries) +``` -- **Almost all extensions are found only in `entries`** (and their submodels: request, response, timings, etc.). -- Extensions at the root `log` level, or in `pages`, `creator`, `browser`, etc., are extremely rare and not typically used for traffic analysis. +--- -### Why are extensions implemented only at the entry level? +## Key Features -- In hario-core, extensions are supported via a separate `DevToolsEntry` model, which inherits from the standard `Entry` and adds all known DevTools-specific fields. -- The `entry_selector` function inspects each entry and chooses the appropriate model (`Entry` or `DevToolsEntry`). -- This approach covers 99% of real-world cases and allows you to work with any HAR file exported from browsers. +- **Type-Safe Parsing:** Validates HAR files using Pydantic models, catching errors early. +- **Composable Transformations:** Build pipelines from built-in or custom transformers. +- **Extensible:** Register your own entry models for browser-specific or proprietary HAR extensions. +- **Deterministic & Random IDs:** Generate unique or deterministic IDs for each entry. +- **Batch & Parallel Processing:** Built-in strategies for processing large HAR files. -### How it works +--- -```python -from hario_core.har_parser import parse +## Example: Full Workflow -har_log = parse(devtools_har_bytes) -entry = har_log.entries[0] -assert isinstance(entry, Entry) -assert isinstance(entry, DevToolsEntry) # True for DevTools entries +```python +from hario_core.parse import parse +from hario_core.transform import Pipeline, by_field, flatten, normalize_sizes, set_id + +har_log = parse("example.har") +entries = har_log.model_dump()["entries"] + +pipeline = Pipeline([ + set_id(by_field(["request.url", "startedDateTime"])), + flatten(), + normalize_sizes(), +]) +results = pipeline.process(entries) ``` -## Example: Full Pipeline +--- + +### Parallel Processing with Custom Batch Size and Workers ```python -from hario_core import parse, Pipeline, by_field, flatten, normalize_sizes +from hario_core.transform import Pipeline, PipelineConfig, flatten + +config = PipelineConfig( + strategy="process", # or "thread" for multithreading + batch_size=20, # process 20 entries per batch + num_workers=6 # use 6 parallel workers +) -pipeline = Pipeline( - id_fn=by_field(["request.url", "startedDateTime"]), - transformers=[flatten(), normalize_sizes()], +pipeline = Pipeline([ + flatten(), +], + config=config ) +results = pipeline.process(entries) +``` + +#### Available Processing Strategies + +- `sequential` (default): Process entries one by one in a single thread. Best for small datasets or debugging. +- `thread`: Parallel processing using threads. Useful for I/O-bound tasks or when GIL is not a bottleneck. +- `process`: Parallel processing using multiple processes. Recommended for CPU-bound tasks and large datasets. +- `async`: Asynchronous processing (if your transformers support async). For advanced use cases with async I/O. -model = parse("example.har") -result_dict = pipeline.process(model) +--- + +## Extending with Custom Entry Models + +```python +from hario_core.parse import register_entry_model +from hario_core.models import Entry -for entry in result_dict: - print(entry["id"], entry["request"]["url"]) +def is_custom_entry(entry: dict) -> bool: + return "x-custom" in entry + +class CustomEntry(Entry): + x_custom: str + +register_entry_model(is_custom_entry, CustomEntry) ``` +--- + ## Installation ```bash diff --git a/environment.yml b/environment.yml index b982078..5fba4f4 100644 --- a/environment.yml +++ b/environment.yml @@ -6,6 +6,7 @@ dependencies: - python=3.10 - pip - pydantic>=2.5.0 + - orjson=3.10.18 # dev-dependencies - pytest=8.2.2 - pytest-cov @@ -21,6 +22,10 @@ dependencies: - mkdocstrings=0.25.1 - mkdocs-autorefs=0.5.0 - poetry + - rich=13.9.4 - pip: + - -e . - mkdocstrings[python]==0.25.1 - - pydocstyle[toml]==6.3.0 \ No newline at end of file + - pydocstyle[toml]==6.3.0 + - snakeviz==2.2.2 + - rich==13.9.4 \ No newline at end of file diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..85048fc --- /dev/null +++ b/poetry.lock @@ -0,0 +1,2106 @@ +# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "babel" +version = "2.17.0" +description = "Internationalization utilities" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2"}, + {file = "babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d"}, +] + +[package.extras] +dev = ["backports.zoneinfo ; python_version < \"3.9\"", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata ; sys_platform == \"win32\""] + +[[package]] +name = "backrefs" +version = "5.8" +description = "A wrapper around re and regex that adds additional back references." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "backrefs-5.8-py310-none-any.whl", hash = "sha256:c67f6638a34a5b8730812f5101376f9d41dc38c43f1fdc35cb54700f6ed4465d"}, + {file = "backrefs-5.8-py311-none-any.whl", hash = "sha256:2e1c15e4af0e12e45c8701bd5da0902d326b2e200cafcd25e49d9f06d44bb61b"}, + {file = "backrefs-5.8-py312-none-any.whl", hash = "sha256:bbef7169a33811080d67cdf1538c8289f76f0942ff971222a16034da88a73486"}, + {file = "backrefs-5.8-py313-none-any.whl", hash = "sha256:e3a63b073867dbefd0536425f43db618578528e3896fb77be7141328642a1585"}, + {file = "backrefs-5.8-py39-none-any.whl", hash = "sha256:a66851e4533fb5b371aa0628e1fee1af05135616b86140c9d787a2ffdf4b8fdc"}, + {file = "backrefs-5.8.tar.gz", hash = "sha256:2cab642a205ce966af3dd4b38ee36009b31fa9502a35fd61d59ccc116e40a6bd"}, +] + +[package.extras] +extras = ["regex"] + +[[package]] +name = "black" +version = "24.3.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "black-24.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7d5e026f8da0322b5662fa7a8e752b3fa2dac1c1cbc213c3d7ff9bdd0ab12395"}, + {file = "black-24.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9f50ea1132e2189d8dff0115ab75b65590a3e97de1e143795adb4ce317934995"}, + {file = "black-24.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2af80566f43c85f5797365077fb64a393861a3730bd110971ab7a0c94e873e7"}, + {file = "black-24.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:4be5bb28e090456adfc1255e03967fb67ca846a03be7aadf6249096100ee32d0"}, + {file = "black-24.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4f1373a7808a8f135b774039f61d59e4be7eb56b2513d3d2f02a8b9365b8a8a9"}, + {file = "black-24.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aadf7a02d947936ee418777e0247ea114f78aff0d0959461057cae8a04f20597"}, + {file = "black-24.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c02e4ea2ae09d16314d30912a58ada9a5c4fdfedf9512d23326128ac08ac3d"}, + {file = "black-24.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:bf21b7b230718a5f08bd32d5e4f1db7fc8788345c8aea1d155fc17852b3410f5"}, + {file = "black-24.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:2818cf72dfd5d289e48f37ccfa08b460bf469e67fb7c4abb07edc2e9f16fb63f"}, + {file = "black-24.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4acf672def7eb1725f41f38bf6bf425c8237248bb0804faa3965c036f7672d11"}, + {file = "black-24.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7ed6668cbbfcd231fa0dc1b137d3e40c04c7f786e626b405c62bcd5db5857e4"}, + {file = "black-24.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:56f52cfbd3dabe2798d76dbdd299faa046a901041faf2cf33288bc4e6dae57b5"}, + {file = "black-24.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:79dcf34b33e38ed1b17434693763301d7ccbd1c5860674a8f871bd15139e7837"}, + {file = "black-24.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e19cb1c6365fd6dc38a6eae2dcb691d7d83935c10215aef8e6c38edee3f77abd"}, + {file = "black-24.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b76c275e4c1c5ce6e9870911384bff5ca31ab63d19c76811cb1fb162678213"}, + {file = "black-24.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:b5991d523eee14756f3c8d5df5231550ae8993e2286b8014e2fdea7156ed0959"}, + {file = "black-24.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c45f8dff244b3c431b36e3224b6be4a127c6aca780853574c00faf99258041eb"}, + {file = "black-24.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6905238a754ceb7788a73f02b45637d820b2f5478b20fec82ea865e4f5d4d9f7"}, + {file = "black-24.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7de8d330763c66663661a1ffd432274a2f92f07feeddd89ffd085b5744f85e7"}, + {file = "black-24.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:7bb041dca0d784697af4646d3b62ba4a6b028276ae878e53f6b4f74ddd6db99f"}, + {file = "black-24.3.0-py3-none-any.whl", hash = "sha256:41622020d7120e01d377f74249e677039d20e6344ff5851de8a10f11f513bf93"}, + {file = "black-24.3.0.tar.gz", hash = "sha256:a0c9c4a0771afc6919578cec71ce82a3e31e054904e7197deacbc9382671c41f"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4) ; sys_platform != \"win32\" or implementation_name != \"pypy\"", "aiohttp (>=3.7.4,!=3.9.0) ; sys_platform == \"win32\" and implementation_name == \"pypy\""] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "black" +version = "24.10.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812"}, + {file = "black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea"}, + {file = "black-24.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:649fff99a20bd06c6f727d2a27f401331dc0cc861fb69cde910fe95b01b5928f"}, + {file = "black-24.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe4d6476887de70546212c99ac9bd803d90b42fc4767f058a0baa895013fbb3e"}, + {file = "black-24.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a2221696a8224e335c28816a9d331a6c2ae15a2ee34ec857dcf3e45dbfa99ad"}, + {file = "black-24.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9da3333530dbcecc1be13e69c250ed8dfa67f43c4005fb537bb426e19200d50"}, + {file = "black-24.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4007b1393d902b48b36958a216c20c4482f601569d19ed1df294a496eb366392"}, + {file = "black-24.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:394d4ddc64782e51153eadcaaca95144ac4c35e27ef9b0a42e121ae7e57a9175"}, + {file = "black-24.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e39e0fae001df40f95bd8cc36b9165c5e2ea88900167bddf258bacef9bbdc3"}, + {file = "black-24.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d37d422772111794b26757c5b55a3eade028aa3fde43121ab7b673d050949d65"}, + {file = "black-24.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14b3502784f09ce2443830e3133dacf2c0110d45191ed470ecb04d0f5f6fcb0f"}, + {file = "black-24.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:30d2c30dc5139211dda799758559d1b049f7f14c580c409d6ad925b74a4208a8"}, + {file = "black-24.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cbacacb19e922a1d75ef2b6ccaefcd6e93a2c05ede32f06a21386a04cedb981"}, + {file = "black-24.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1f93102e0c5bb3907451063e08b9876dbeac810e7da5a8bfb7aeb5a9ef89066b"}, + {file = "black-24.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ddacb691cdcdf77b96f549cf9591701d8db36b2f19519373d60d31746068dbf2"}, + {file = "black-24.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:680359d932801c76d2e9c9068d05c6b107f2584b2a5b88831c83962eb9984c1b"}, + {file = "black-24.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:17374989640fbca88b6a448129cd1745c5eb8d9547b464f281b251dd00155ccd"}, + {file = "black-24.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:63f626344343083322233f175aaf372d326de8436f5928c042639a4afbbf1d3f"}, + {file = "black-24.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfa1d0cb6200857f1923b602f978386a3a2758a65b52e0950299ea014be6800"}, + {file = "black-24.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:2cd9c95431d94adc56600710f8813ee27eea544dd118d45896bb734e9d7a0dc7"}, + {file = "black-24.10.0-py3-none-any.whl", hash = "sha256:3bb2b7a1f7b685f85b11fed1ef10f8a9148bceb49853e47a294a3dd963c1dd7d"}, + {file = "black-24.10.0.tar.gz", hash = "sha256:846ea64c97afe3bc677b761787993be4991810ecc7a4a937816dd6bddedc4875"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.10)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "certifi" +version = "2025.4.26" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +groups = ["main", "dev"] +files = [ + {file = "certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3"}, + {file = "certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6"}, +] + +[[package]] +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7" +groups = ["main", "dev"] +files = [ + {file = "charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-win32.whl", hash = "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cad5f45b3146325bb38d6855642f6fd609c3f7cad4dbaf75549bf3b904d3184"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2680962a4848b3c4f155dc2ee64505a9c57186d0d56b43123b17ca3de18f0fa"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:36b31da18b8890a76ec181c3cf44326bf2c48e36d393ca1b72b3f484113ea344"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4074c5a429281bf056ddd4c5d3b740ebca4d43ffffe2ef4bf4d2d05114299da"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9e36a97bee9b86ef9a1cf7bb96747eb7a15c2f22bdb5b516434b00f2a599f02"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:1b1bde144d98e446b056ef98e59c256e9294f6b74d7af6846bf5ffdafd687a7d"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:915f3849a011c1f593ab99092f3cecfcb4d65d8feb4a64cf1bf2d22074dc0ec4"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:fb707f3e15060adf5b7ada797624a6c6e0138e2a26baa089df64c68ee98e040f"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:25a23ea5c7edc53e0f29bae2c44fcb5a1aa10591aae107f2a2b2583a9c5cbc64"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:770cab594ecf99ae64c236bc9ee3439c3f46be49796e265ce0cc8bc17b10294f"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-win32.whl", hash = "sha256:6a0289e4589e8bdfef02a80478f1dfcb14f0ab696b5a00e1f4b8a14a307a3c58"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6fc1f5b51fa4cecaa18f2bd7a003f3dd039dd615cd69a2afd6d3b19aed6775f2"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76af085e67e56c8816c3ccf256ebd136def2ed9654525348cfa744b6802b69eb"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e45ba65510e2647721e35323d6ef54c7974959f6081b58d4ef5d87c60c84919a"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:046595208aae0120559a67693ecc65dd75d46f7bf687f159127046628178dc45"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75d10d37a47afee94919c4fab4c22b9bc2a8bf7d4f46f87363bcf0573f3ff4f5"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6333b3aa5a12c26b2a4d4e7335a28f1475e0e5e17d69d55141ee3cab736f66d1"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8323a9b031aa0393768b87f04b4164a40037fb2a3c11ac06a03ffecd3618027"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:24498ba8ed6c2e0b56d4acbf83f2d989720a93b41d712ebd4f4979660db4417b"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:844da2b5728b5ce0e32d863af26f32b5ce61bc4273a9c720a9f3aa9df73b1455"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:65c981bdbd3f57670af8b59777cbfae75364b483fa8a9f420f08094531d54a01"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:3c21d4fca343c805a52c0c78edc01e3477f6dd1ad7c47653241cf2a206d4fc58"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:dc7039885fa1baf9be153a0626e337aa7ec8bf96b0128605fb0d77788ddc1681"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-win32.whl", hash = "sha256:8272b73e1c5603666618805fe821edba66892e2870058c94c53147602eab29c7"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:70f7172939fdf8790425ba31915bfbe8335030f05b9913d7ae00a87d4395620a"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:005fa3432484527f9732ebd315da8da8001593e2cf46a3d817669f062c3d9ed4"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e92fca20c46e9f5e1bb485887d074918b13543b1c2a1185e69bb8d17ab6236a7"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50bf98d5e563b83cc29471fa114366e6806bc06bc7a25fd59641e41445327836"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:721c76e84fe669be19c5791da68232ca2e05ba5185575086e384352e2c309597"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d8fd25b7f4675d0c47cf95b594d4e7b158aca33b76aa63d07186e13c0e0ab7"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3daeac64d5b371dea99714f08ffc2c208522ec6b06fbc7866a450dd446f5c0f"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dccab8d5fa1ef9bfba0590ecf4d46df048d18ffe3eec01eeb73a42e0d9e7a8ba"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:aaf27faa992bfee0264dc1f03f4c75e9fcdda66a519db6b957a3f826e285cf12"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:eb30abc20df9ab0814b5a2524f23d75dcf83cde762c161917a2b4b7b55b1e518"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c72fbbe68c6f32f251bdc08b8611c7b3060612236e960ef848e0a517ddbe76c5"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:982bb1e8b4ffda883b3d0a521e23abcd6fd17418f6d2c4118d257a10199c0ce3"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-win32.whl", hash = "sha256:43e0933a0eff183ee85833f341ec567c0980dae57c464d8a508e1b2ceb336471"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:d11b54acf878eef558599658b0ffca78138c8c3655cf4f3a4a673c437e67732e"}, + {file = "charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0"}, + {file = "charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63"}, +] + +[[package]] +name = "click" +version = "8.2.1" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.10" +groups = ["main", "dev"] +files = [ + {file = "click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b"}, + {file = "click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev"] +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.9.0" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "coverage-7.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3d494fa4256e3cb161ca1df14a91d2d703c27d60452eb0d4a58bb05f52f676e4"}, + {file = "coverage-7.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b613efceeabf242978d14e1a65626ec3be67c5261918a82a985f56c2a05475ee"}, + {file = "coverage-7.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:673a4d2cb7ec78e1f2f6f41039f6785f27bca0f6bc0e722b53a58286d12754e1"}, + {file = "coverage-7.9.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1edc2244932e9fed92ad14428b9480a97ecd37c970333688bd35048f6472f260"}, + {file = "coverage-7.9.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec8b92a7617faa2017bd44c94583830bab8be175722d420501680abc4f5bc794"}, + {file = "coverage-7.9.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d8f3ca1f128f11812d3baf0a482e7f36ffb856ac1ae14de3b5d1adcfb7af955d"}, + {file = "coverage-7.9.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:c30eed34eb8206d9b8c2d0d9fa342fa98e10f34b1e9e1eb05f79ccbf4499c8ff"}, + {file = "coverage-7.9.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:24e6f8e5f125cd8bff33593a484a079305c9f0be911f76c6432f580ade5c1a17"}, + {file = "coverage-7.9.0-cp310-cp310-win32.whl", hash = "sha256:a1b0317b4a8ff4d3703cd7aa642b4f963a71255abe4e878659f768238fab6602"}, + {file = "coverage-7.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:512b1ea57a11dfa23b7f3d8fe8690fcf8cd983a70ae4c2c262cf5c972618fa15"}, + {file = "coverage-7.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:55b7b9df45174956e0f719a56cf60c0cb4a7f155668881d00de6384e2a3402f4"}, + {file = "coverage-7.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:87bceebbc91a58c9264c43638729fcb45910805b9f86444f93654d988305b3a2"}, + {file = "coverage-7.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81da3b6e289bf9fc7dc159ab6d5222f5330ac6e94a6d06f147ba46e53fa6ec82"}, + {file = "coverage-7.9.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b361684a91224d4362879c1b1802168d2435ff76666f1b7ba52fc300ad832dbc"}, + {file = "coverage-7.9.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9a384ea4f77ac0a7e36c9a805ed95ef10f423bdb68b4e9487646cdf548a6a05"}, + {file = "coverage-7.9.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:38a5642aa82ea6de0e4331e346f5ba188a9fdb7d727e00199f55031b85135d0a"}, + {file = "coverage-7.9.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8c5ff4ca4890c0b57d3e80850534609493280c0f9e6ea2bd314b10cb8cbd76e0"}, + {file = "coverage-7.9.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cd052a0c4727ede06393da3c1df1ae6ef6c079e6bdfefb39079877404b3edc22"}, + {file = "coverage-7.9.0-cp311-cp311-win32.whl", hash = "sha256:f73fd1128165e1d665cb7f863a91d00f073044a672c7dfa04ab400af4d1a9226"}, + {file = "coverage-7.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:dd62d62e782d3add529c8e7943f5600efd0d07dadf3819e5f9917edb4acf85d8"}, + {file = "coverage-7.9.0-cp311-cp311-win_arm64.whl", hash = "sha256:f75288785cc9a67aff3b04dafd8d0f0be67306018b224d319d23867a161578d6"}, + {file = "coverage-7.9.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:969ed1ed0ab0325b50af3204f9024782180e64fb281f5a2952f479ec60a02aba"}, + {file = "coverage-7.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1abd41781c874e716aaeecb8b27db5f4f2bc568f2ed8d41228aa087d567674f0"}, + {file = "coverage-7.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0eb6e99487dffd28c88a4fc2ea4286beaf0207a43388775900c93e56cc5a8ae3"}, + {file = "coverage-7.9.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c425c85ddb62b32d44f83fb20044fe32edceceee1db1f978c062eec020a73ea5"}, + {file = "coverage-7.9.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0a1f7676bc90ceba67caa66850d689947d586f204ccf6478400c2bf39da5790"}, + {file = "coverage-7.9.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f17055c50768d710d6abc789c9469d0353574780935e1381b83e63edc49ff530"}, + {file = "coverage-7.9.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:298d2917a6bfadbb272e08545ed026af3965e4d2fe71e3f38bf0a816818b226e"}, + {file = "coverage-7.9.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d9be5d26e5f817d478506e4d3c4ff7b92f17d980670b4791bf05baaa37ce2f88"}, + {file = "coverage-7.9.0-cp312-cp312-win32.whl", hash = "sha256:dc2784edd9ac9fe8692fc5505667deb0b05d895c016aaaf641031ed4a5f93d53"}, + {file = "coverage-7.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:18223198464a6d5549db1934cf77a15deb24bb88652c4f5f7cb21cd3ad853704"}, + {file = "coverage-7.9.0-cp312-cp312-win_arm64.whl", hash = "sha256:3b00194ff3c84d4b821822ff6c041f245fc55d0d5c7833fc4311d082e97595e8"}, + {file = "coverage-7.9.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:122c60e92ab66c9c88e17565f67a91b3b3be5617cb50f73cfd34a4c60ed4aab0"}, + {file = "coverage-7.9.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:813c11b367a6b3cf37212ec36b230f8d086c22b69dbf62877b40939fb2c79e74"}, + {file = "coverage-7.9.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f05e0f5e87f23d43fefe49e86655c6209dd4f9f034786b983e6803cf4554183"}, + {file = "coverage-7.9.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62f465886fa4f86d5515da525aead97c5dff13a5cf997fc4c5097a1a59e063b2"}, + {file = "coverage-7.9.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:549ea4ca901595bbe3270e1afdef98bf5d4d5791596efbdc90b00449a2bb1f91"}, + {file = "coverage-7.9.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:8cae1d4450945c74a6a65a09864ed3eaa917055cf70aa65f83ac1b9b0d8d5f9a"}, + {file = "coverage-7.9.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d7b263910234c0d5ec913ec79ca921152fe874b805a7bcaf67118ef71708e5d2"}, + {file = "coverage-7.9.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7d7b7425215963da8f5968096a20c5b5c9af4a86a950fcc25dcc2177ab33e9e5"}, + {file = "coverage-7.9.0-cp313-cp313-win32.whl", hash = "sha256:e7dcfa92867b0c53d2e22e985c66af946dc09e8bb13c556709e396e90a0adf5c"}, + {file = "coverage-7.9.0-cp313-cp313-win_amd64.whl", hash = "sha256:aa34ca040785a2b768da489df0c036364d47a6c1c00bdd8f662b98fd3277d3d4"}, + {file = "coverage-7.9.0-cp313-cp313-win_arm64.whl", hash = "sha256:9c5dcb5cd3c52d84c5f52045e1c87c16bf189c2fbfa57cc0d811a3b4059939df"}, + {file = "coverage-7.9.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:b52d2fdc1940f90c4572bd48211475a7b102f75a7f9a5e6cfc6e3da7dc380c44"}, + {file = "coverage-7.9.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4cc555a3e6ceb8841df01a4634374f5f9635e661f5c307da00bce19819e8bcdf"}, + {file = "coverage-7.9.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:244f613617876b7cd32a097788d49c952a8f1698afb25275b2a825a4e895854e"}, + {file = "coverage-7.9.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5c335d77539e66bc6f83e8f1ef207d038129d9b9acd9dc9f0ca42fa9eedf564a"}, + {file = "coverage-7.9.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b335c7077c8da7bb8173d4f9ebd90ff1a97af6a6bec4fc4e6db4856ae80b31e"}, + {file = "coverage-7.9.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:01cbc2c36895b7ab906514042c92b3fc9dd0526bf1c3251cb6aefd9c71ae6dda"}, + {file = "coverage-7.9.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:1ac62880a9dff0726a193ce77a1bcdd4e8491009cb3a0510d31381e8b2c46d7a"}, + {file = "coverage-7.9.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:95314eb306cf54af3d1147e27ba008cf78eed6f1309a1310772f4f05b12c9c65"}, + {file = "coverage-7.9.0-cp313-cp313t-win32.whl", hash = "sha256:c5cbf3ddfb68de8dc8ce33caa9321df27297a032aeaf2e99b278f183fb4ebc37"}, + {file = "coverage-7.9.0-cp313-cp313t-win_amd64.whl", hash = "sha256:e3ec9e1525eb7a0f89d31083539b398d921415d884e9f55400002a1e9fe0cf63"}, + {file = "coverage-7.9.0-cp313-cp313t-win_arm64.whl", hash = "sha256:a02efe6769f74245ce476e89db3d4e110db07b4c0c3d3f81728e2464bbbbcb8e"}, + {file = "coverage-7.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:64dab59d812c1cbfc9cebadada377365874964acdf59b12e86487d25c2e0c29f"}, + {file = "coverage-7.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46b9dc640c6309fb49625d3569d4ba7abe2afcba645eb1e52bad97510f60ac26"}, + {file = "coverage-7.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89358f4025ed424861311b33815a2866f7c94856c932b0ffc98180f655e813e2"}, + {file = "coverage-7.9.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:589e37ae75d81fd53cd1ca624e07af4466e9e4ce259e3bfe2b147896857c06ea"}, + {file = "coverage-7.9.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29dea81eef5432076cee561329b3831bc988a4ce1bfaec90eee2078ff5311e6e"}, + {file = "coverage-7.9.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7b3482588772b6b24601d1677aef299af28d6c212c70b0be27bdfc2e10fb00fe"}, + {file = "coverage-7.9.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2debc0b9481b5fc76f771b3b31e89a0cd8791ad977654940a3523f3f2e5d98fe"}, + {file = "coverage-7.9.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:304ded640bc2a60f14a2ff0fec98cce4c3f2e573c122f0548728c8dceba5abe7"}, + {file = "coverage-7.9.0-cp39-cp39-win32.whl", hash = "sha256:8e0a3a3f9b968007e1f56418a3586f9a983c84ac4e84d28d1c4f8b76c4226282"}, + {file = "coverage-7.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:cb3c07dd71d1ff52156d35ee6fa48458c3cec1add7fcce6a934f977fb80c48a5"}, + {file = "coverage-7.9.0-pp39.pp310.pp311-none-any.whl", hash = "sha256:ccf1540a0e82ff525844880f988f6caaa2d037005e57bfe203b71cac7626145d"}, + {file = "coverage-7.9.0-py3-none-any.whl", hash = "sha256:79ea9a26b27c963cdf541e1eb9ac05311b012bc367d0e31816f1833b06c81c02"}, + {file = "coverage-7.9.0.tar.gz", hash = "sha256:1a93b43de2233a7670a8bf2520fed8ebd5eea6a65b47417500a9d882b0533fa2"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] + +[[package]] +name = "distlib" +version = "0.3.9" +description = "Distribution utilities" +optional = false +python-versions = "*" +groups = ["main", "dev"] +files = [ + {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, + {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.3.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +groups = ["main", "dev"] +markers = "python_version == \"3.10\"" +files = [ + {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}, + {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.13\""} + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "filelock" +version = "3.18.0" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de"}, + {file = "filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.10)", "diff-cover (>=9.2.1)", "pytest (>=8.3.4)", "pytest-asyncio (>=0.25.2)", "pytest-cov (>=6)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.28.1)"] +typing = ["typing-extensions (>=4.12.2) ; python_version < \"3.11\""] + +[[package]] +name = "flake8" +version = "6.0.0" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.8.1" +groups = ["main", "dev"] +files = [ + {file = "flake8-6.0.0-py2.py3-none-any.whl", hash = "sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7"}, + {file = "flake8-6.0.0.tar.gz", hash = "sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.10.0,<2.11.0" +pyflakes = ">=3.0.0,<3.1.0" + +[[package]] +name = "flake8" +version = "6.1.0" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.8.1" +groups = ["dev"] +files = [ + {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, + {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.11.0,<2.12.0" +pyflakes = ">=3.1.0,<3.2.0" + +[[package]] +name = "ghp-import" +version = "2.1.0" +description = "Copy your docs directly to the gh-pages branch." +optional = false +python-versions = "*" +groups = ["main", "dev"] +files = [ + {file = "ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343"}, + {file = "ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619"}, +] + +[package.dependencies] +python-dateutil = ">=2.8.1" + +[package.extras] +dev = ["flake8", "markdown", "twine", "wheel"] + +[[package]] +name = "griffe" +version = "1.7.3" +description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "griffe-1.7.3-py3-none-any.whl", hash = "sha256:c6b3ee30c2f0f17f30bcdef5068d6ab7a2a4f1b8bf1a3e74b56fffd21e1c5f75"}, + {file = "griffe-1.7.3.tar.gz", hash = "sha256:52ee893c6a3a968b639ace8015bec9d36594961e156e23315c8e8e51401fa50b"}, +] + +[package.dependencies] +colorama = ">=0.4" + +[[package]] +name = "identify" +version = "2.6.12" +description = "File identification library for Python" +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "identify-2.6.12-py2.py3-none-any.whl", hash = "sha256:ad9672d5a72e0d2ff7c5c8809b62dfa60458626352fb0eb7b55e69bdc45334a2"}, + {file = "identify-2.6.12.tar.gz", hash = "sha256:d8de45749f1efb108badef65ee8386f0f7bb19a7f26185f74de6367bffbaf0e6"}, +] + +[package.extras] +license = ["ukkonen"] + +[[package]] +name = "idna" +version = "3.10" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.6" +groups = ["main", "dev"] +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "iniconfig" +version = "2.1.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, + {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, +] + +[[package]] +name = "isort" +version = "5.12.0" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +groups = ["main", "dev"] +files = [ + {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, + {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, +] + +[package.extras] +colors = ["colorama (>=0.4.3)"] +pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] +plugins = ["setuptools"] +requirements-deprecated-finder = ["pip-api", "pipreqs"] + +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +groups = ["dev"] +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + +[[package]] +name = "jinja2" +version = "3.1.6" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +groups = ["main", "dev"] +files = [ + {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, + {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "markdown" +version = "3.8" +description = "Python implementation of John Gruber's Markdown." +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "markdown-3.8-py3-none-any.whl", hash = "sha256:794a929b79c5af141ef5ab0f2f642d0f7b1872981250230e72682346f7cc90dc"}, + {file = "markdown-3.8.tar.gz", hash = "sha256:7df81e63f0df5c4b24b7d156eb81e4690595239b7d70937d0409f1b0de319c6f"}, +] + +[package.extras] +docs = ["mdx_gh_links (>=0.2)", "mkdocs (>=1.6)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] +testing = ["coverage", "pyyaml"] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "markupsafe" +version = "3.0.2" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, +] + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +groups = ["main", "dev"] +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +groups = ["main", "dev"] +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "mergedeep" +version = "1.3.4" +description = "A deep merge function for 🐍." +optional = false +python-versions = ">=3.6" +groups = ["main", "dev"] +files = [ + {file = "mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307"}, + {file = "mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8"}, +] + +[[package]] +name = "mkdocs" +version = "1.6.0" +description = "Project documentation with Markdown." +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "mkdocs-1.6.0-py3-none-any.whl", hash = "sha256:1eb5cb7676b7d89323e62b56235010216319217d4af5ddc543a91beb8d125ea7"}, + {file = "mkdocs-1.6.0.tar.gz", hash = "sha256:a73f735824ef83a4f3bcb7a231dcab23f5a838f88b7efc54a0eef5fbdbc3c512"}, +] + +[package.dependencies] +click = ">=7.0" +colorama = {version = ">=0.4", markers = "platform_system == \"Windows\""} +ghp-import = ">=1.0" +jinja2 = ">=2.11.1" +markdown = ">=3.3.6" +markupsafe = ">=2.0.1" +mergedeep = ">=1.3.4" +mkdocs-get-deps = ">=0.2.0" +packaging = ">=20.5" +pathspec = ">=0.11.1" +pyyaml = ">=5.1" +pyyaml-env-tag = ">=0.1" +watchdog = ">=2.0" + +[package.extras] +i18n = ["babel (>=2.9.0)"] +min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4) ; platform_system == \"Windows\"", "ghp-import (==1.0)", "importlib-metadata (==4.4) ; python_version < \"3.10\"", "jinja2 (==2.11.1)", "markdown (==3.3.6)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "mkdocs-get-deps (==0.2.0)", "packaging (==20.5)", "pathspec (==0.11.1)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "watchdog (==2.0)"] + +[[package]] +name = "mkdocs" +version = "1.6.1" +description = "Project documentation with Markdown." +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "mkdocs-1.6.1-py3-none-any.whl", hash = "sha256:db91759624d1647f3f34aa0c3f327dd2601beae39a366d6e064c03468d35c20e"}, + {file = "mkdocs-1.6.1.tar.gz", hash = "sha256:7b432f01d928c084353ab39c57282f29f92136665bdd6abf7c1ec8d822ef86f2"}, +] + +[package.dependencies] +click = ">=7.0" +colorama = {version = ">=0.4", markers = "platform_system == \"Windows\""} +ghp-import = ">=1.0" +jinja2 = ">=2.11.1" +markdown = ">=3.3.6" +markupsafe = ">=2.0.1" +mergedeep = ">=1.3.4" +mkdocs-get-deps = ">=0.2.0" +packaging = ">=20.5" +pathspec = ">=0.11.1" +pyyaml = ">=5.1" +pyyaml-env-tag = ">=0.1" +watchdog = ">=2.0" + +[package.extras] +i18n = ["babel (>=2.9.0)"] +min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4) ; platform_system == \"Windows\"", "ghp-import (==1.0)", "importlib-metadata (==4.4) ; python_version < \"3.10\"", "jinja2 (==2.11.1)", "markdown (==3.3.6)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "mkdocs-get-deps (==0.2.0)", "packaging (==20.5)", "pathspec (==0.11.1)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "watchdog (==2.0)"] + +[[package]] +name = "mkdocs-autorefs" +version = "0.5.0" +description = "Automatically link across pages in MkDocs." +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "mkdocs_autorefs-0.5.0-py3-none-any.whl", hash = "sha256:7930fcb8ac1249f10e683967aeaddc0af49d90702af111a5e390e8b20b3d97ff"}, + {file = "mkdocs_autorefs-0.5.0.tar.gz", hash = "sha256:9a5054a94c08d28855cfab967ada10ed5be76e2bfad642302a610b252c3274c0"}, +] + +[package.dependencies] +Markdown = ">=3.3" +mkdocs = ">=1.1" + +[[package]] +name = "mkdocs-get-deps" +version = "0.2.0" +description = "MkDocs extension that lists all dependencies according to a mkdocs.yml file" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "mkdocs_get_deps-0.2.0-py3-none-any.whl", hash = "sha256:2bf11d0b133e77a0dd036abeeb06dec8775e46efa526dc70667d8863eefc6134"}, + {file = "mkdocs_get_deps-0.2.0.tar.gz", hash = "sha256:162b3d129c7fad9b19abfdcb9c1458a651628e4b1dea628ac68790fb3061c60c"}, +] + +[package.dependencies] +mergedeep = ">=1.3.4" +platformdirs = ">=2.2.0" +pyyaml = ">=5.1" + +[[package]] +name = "mkdocs-material" +version = "9.5.26" +description = "Documentation that simply works" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "mkdocs_material-9.5.26-py3-none-any.whl", hash = "sha256:5d01fb0aa1c7946a1e3ae8689aa2b11a030621ecb54894e35aabb74c21016312"}, + {file = "mkdocs_material-9.5.26.tar.gz", hash = "sha256:56aeb91d94cffa43b6296fa4fbf0eb7c840136e563eecfd12c2d9e92e50ba326"}, +] + +[package.dependencies] +babel = ">=2.10,<3.0" +colorama = ">=0.4,<1.0" +jinja2 = ">=3.0,<4.0" +markdown = ">=3.2,<4.0" +mkdocs = ">=1.6,<2.0" +mkdocs-material-extensions = ">=1.3,<2.0" +paginate = ">=0.5,<1.0" +pygments = ">=2.16,<3.0" +pymdown-extensions = ">=10.2,<11.0" +regex = ">=2022.4" +requests = ">=2.26,<3.0" + +[package.extras] +git = ["mkdocs-git-committers-plugin-2 (>=1.1,<2.0)", "mkdocs-git-revision-date-localized-plugin (>=1.2.4,<2.0)"] +imaging = ["cairosvg (>=2.6,<3.0)", "pillow (>=10.2,<11.0)"] +recommended = ["mkdocs-minify-plugin (>=0.7,<1.0)", "mkdocs-redirects (>=1.2,<2.0)", "mkdocs-rss-plugin (>=1.6,<2.0)"] + +[[package]] +name = "mkdocs-material" +version = "9.6.14" +description = "Documentation that simply works" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "mkdocs_material-9.6.14-py3-none-any.whl", hash = "sha256:3b9cee6d3688551bf7a8e8f41afda97a3c39a12f0325436d76c86706114b721b"}, + {file = "mkdocs_material-9.6.14.tar.gz", hash = "sha256:39d795e90dce6b531387c255bd07e866e027828b7346d3eba5ac3de265053754"}, +] + +[package.dependencies] +babel = ">=2.10,<3.0" +backrefs = ">=5.7.post1,<6.0" +colorama = ">=0.4,<1.0" +jinja2 = ">=3.1,<4.0" +markdown = ">=3.2,<4.0" +mkdocs = ">=1.6,<2.0" +mkdocs-material-extensions = ">=1.3,<2.0" +paginate = ">=0.5,<1.0" +pygments = ">=2.16,<3.0" +pymdown-extensions = ">=10.2,<11.0" +requests = ">=2.26,<3.0" + +[package.extras] +git = ["mkdocs-git-committers-plugin-2 (>=1.1,<3)", "mkdocs-git-revision-date-localized-plugin (>=1.2.4,<2.0)"] +imaging = ["cairosvg (>=2.6,<3.0)", "pillow (>=10.2,<11.0)"] +recommended = ["mkdocs-minify-plugin (>=0.7,<1.0)", "mkdocs-redirects (>=1.2,<2.0)", "mkdocs-rss-plugin (>=1.6,<2.0)"] + +[[package]] +name = "mkdocs-material-extensions" +version = "1.3.1" +description = "Extension pack for Python Markdown and MkDocs Material." +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "mkdocs_material_extensions-1.3.1-py3-none-any.whl", hash = "sha256:adff8b62700b25cb77b53358dad940f3ef973dd6db797907c49e3c2ef3ab4e31"}, + {file = "mkdocs_material_extensions-1.3.1.tar.gz", hash = "sha256:10c9511cea88f568257f960358a467d12b970e1f7b2c0e5fb2bb48cab1928443"}, +] + +[[package]] +name = "mkdocstrings" +version = "0.25.1" +description = "Automatic documentation from sources, for MkDocs." +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "mkdocstrings-0.25.1-py3-none-any.whl", hash = "sha256:da01fcc2670ad61888e8fe5b60afe9fee5781017d67431996832d63e887c2e51"}, + {file = "mkdocstrings-0.25.1.tar.gz", hash = "sha256:c3a2515f31577f311a9ee58d089e4c51fc6046dbd9e9b4c3de4c3194667fe9bf"}, +] + +[package.dependencies] +click = ">=7.0" +Jinja2 = ">=2.11.1" +Markdown = ">=3.3" +MarkupSafe = ">=1.1" +mkdocs = ">=1.4" +mkdocs-autorefs = ">=0.3.1" +mkdocstrings-python = {version = ">=0.5.2", optional = true, markers = "extra == \"python\""} +platformdirs = ">=2.2.0" +pymdown-extensions = ">=6.3" + +[package.extras] +crystal = ["mkdocstrings-crystal (>=0.3.4)"] +python = ["mkdocstrings-python (>=0.5.2)"] +python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"] + +[[package]] +name = "mkdocstrings" +version = "0.25.2" +description = "Automatic documentation from sources, for MkDocs." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "mkdocstrings-0.25.2-py3-none-any.whl", hash = "sha256:9e2cda5e2e12db8bb98d21e3410f3f27f8faab685a24b03b06ba7daa5b92abfc"}, + {file = "mkdocstrings-0.25.2.tar.gz", hash = "sha256:5cf57ad7f61e8be3111a2458b4e49c2029c9cb35525393b179f9c916ca8042dc"}, +] + +[package.dependencies] +click = ">=7.0" +Jinja2 = ">=2.11.1" +Markdown = ">=3.3" +MarkupSafe = ">=1.1" +mkdocs = ">=1.4" +mkdocs-autorefs = ">=0.3.1" +mkdocstrings-python = {version = ">=0.5.2", optional = true, markers = "extra == \"python\""} +platformdirs = ">=2.2.0" +pymdown-extensions = ">=6.3" + +[package.extras] +crystal = ["mkdocstrings-crystal (>=0.3.4)"] +python = ["mkdocstrings-python (>=0.5.2)"] +python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"] + +[[package]] +name = "mkdocstrings-python" +version = "1.10.8" +description = "A Python handler for mkdocstrings." +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "mkdocstrings_python-1.10.8-py3-none-any.whl", hash = "sha256:bb12e76c8b071686617f824029cb1dfe0e9afe89f27fb3ad9a27f95f054dcd89"}, + {file = "mkdocstrings_python-1.10.8.tar.gz", hash = "sha256:5856a59cbebbb8deb133224a540de1ff60bded25e54d8beacc375bb133d39016"}, +] + +[package.dependencies] +griffe = ">=0.49" +mkdocstrings = ">=0.25" + +[[package]] +name = "mypy" +version = "1.7.1" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "mypy-1.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:12cce78e329838d70a204293e7b29af9faa3ab14899aec397798a4b41be7f340"}, + {file = "mypy-1.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1484b8fa2c10adf4474f016e09d7a159602f3239075c7bf9f1627f5acf40ad49"}, + {file = "mypy-1.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31902408f4bf54108bbfb2e35369877c01c95adc6192958684473658c322c8a5"}, + {file = "mypy-1.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f2c2521a8e4d6d769e3234350ba7b65ff5d527137cdcde13ff4d99114b0c8e7d"}, + {file = "mypy-1.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:fcd2572dd4519e8a6642b733cd3a8cfc1ef94bafd0c1ceed9c94fe736cb65b6a"}, + {file = "mypy-1.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4b901927f16224d0d143b925ce9a4e6b3a758010673eeded9b748f250cf4e8f7"}, + {file = "mypy-1.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2f7f6985d05a4e3ce8255396df363046c28bea790e40617654e91ed580ca7c51"}, + {file = "mypy-1.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:944bdc21ebd620eafefc090cdf83158393ec2b1391578359776c00de00e8907a"}, + {file = "mypy-1.7.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9c7ac372232c928fff0645d85f273a726970c014749b924ce5710d7d89763a28"}, + {file = "mypy-1.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:f6efc9bd72258f89a3816e3a98c09d36f079c223aa345c659622f056b760ab42"}, + {file = "mypy-1.7.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6dbdec441c60699288adf051f51a5d512b0d818526d1dcfff5a41f8cd8b4aaf1"}, + {file = "mypy-1.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4fc3d14ee80cd22367caaaf6e014494415bf440980a3045bf5045b525680ac33"}, + {file = "mypy-1.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c6e4464ed5f01dc44dc9821caf67b60a4e5c3b04278286a85c067010653a0eb"}, + {file = "mypy-1.7.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d9b338c19fa2412f76e17525c1b4f2c687a55b156320acb588df79f2e6fa9fea"}, + {file = "mypy-1.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:204e0d6de5fd2317394a4eff62065614c4892d5a4d1a7ee55b765d7a3d9e3f82"}, + {file = "mypy-1.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:84860e06ba363d9c0eeabd45ac0fde4b903ad7aa4f93cd8b648385a888e23200"}, + {file = "mypy-1.7.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8c5091ebd294f7628eb25ea554852a52058ac81472c921150e3a61cdd68f75a7"}, + {file = "mypy-1.7.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40716d1f821b89838589e5b3106ebbc23636ffdef5abc31f7cd0266db936067e"}, + {file = "mypy-1.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cf3f0c5ac72139797953bd50bc6c95ac13075e62dbfcc923571180bebb662e9"}, + {file = "mypy-1.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:78e25b2fd6cbb55ddfb8058417df193f0129cad5f4ee75d1502248e588d9e0d7"}, + {file = "mypy-1.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:75c4d2a6effd015786c87774e04331b6da863fc3fc4e8adfc3b40aa55ab516fe"}, + {file = "mypy-1.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2643d145af5292ee956aa0a83c2ce1038a3bdb26e033dadeb2f7066fb0c9abce"}, + {file = "mypy-1.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75aa828610b67462ffe3057d4d8a4112105ed211596b750b53cbfe182f44777a"}, + {file = "mypy-1.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ee5d62d28b854eb61889cde4e1dbc10fbaa5560cb39780c3995f6737f7e82120"}, + {file = "mypy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:72cf32ce7dd3562373f78bd751f73c96cfb441de147cc2448a92c1a308bd0ca6"}, + {file = "mypy-1.7.1-py3-none-any.whl", hash = "sha256:f7c5d642db47376a0cc130f0de6d055056e010debdaf0707cd2b0fc7e7ef30ea"}, + {file = "mypy-1.7.1.tar.gz", hash = "sha256:fcb6d9afb1b6208b4c712af0dafdc650f518836065df0d4fb1d800f5d6773db2"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy" +version = "1.16.0" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "mypy-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7909541fef256527e5ee9c0a7e2aeed78b6cda72ba44298d1334fe7881b05c5c"}, + {file = "mypy-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e71d6f0090c2256c713ed3d52711d01859c82608b5d68d4fa01a3fe30df95571"}, + {file = "mypy-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:936ccfdd749af4766be824268bfe22d1db9eb2f34a3ea1d00ffbe5b5265f5491"}, + {file = "mypy-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4086883a73166631307fdd330c4a9080ce24913d4f4c5ec596c601b3a4bdd777"}, + {file = "mypy-1.16.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:feec38097f71797da0231997e0de3a58108c51845399669ebc532c815f93866b"}, + {file = "mypy-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:09a8da6a0ee9a9770b8ff61b39c0bb07971cda90e7297f4213741b48a0cc8d93"}, + {file = "mypy-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9f826aaa7ff8443bac6a494cf743f591488ea940dd360e7dd330e30dd772a5ab"}, + {file = "mypy-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:82d056e6faa508501af333a6af192c700b33e15865bda49611e3d7d8358ebea2"}, + {file = "mypy-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:089bedc02307c2548eb51f426e085546db1fa7dd87fbb7c9fa561575cf6eb1ff"}, + {file = "mypy-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6a2322896003ba66bbd1318c10d3afdfe24e78ef12ea10e2acd985e9d684a666"}, + {file = "mypy-1.16.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:021a68568082c5b36e977d54e8f1de978baf401a33884ffcea09bd8e88a98f4c"}, + {file = "mypy-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:54066fed302d83bf5128632d05b4ec68412e1f03ef2c300434057d66866cea4b"}, + {file = "mypy-1.16.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c5436d11e89a3ad16ce8afe752f0f373ae9620841c50883dc96f8b8805620b13"}, + {file = "mypy-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f2622af30bf01d8fc36466231bdd203d120d7a599a6d88fb22bdcb9dbff84090"}, + {file = "mypy-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d045d33c284e10a038f5e29faca055b90eee87da3fc63b8889085744ebabb5a1"}, + {file = "mypy-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b4968f14f44c62e2ec4a038c8797a87315be8df7740dc3ee8d3bfe1c6bf5dba8"}, + {file = "mypy-1.16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eb14a4a871bb8efb1e4a50360d4e3c8d6c601e7a31028a2c79f9bb659b63d730"}, + {file = "mypy-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:bd4e1ebe126152a7bbaa4daedd781c90c8f9643c79b9748caa270ad542f12bec"}, + {file = "mypy-1.16.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a9e056237c89f1587a3be1a3a70a06a698d25e2479b9a2f57325ddaaffc3567b"}, + {file = "mypy-1.16.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0b07e107affb9ee6ce1f342c07f51552d126c32cd62955f59a7db94a51ad12c0"}, + {file = "mypy-1.16.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c6fb60cbd85dc65d4d63d37cb5c86f4e3a301ec605f606ae3a9173e5cf34997b"}, + {file = "mypy-1.16.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a7e32297a437cc915599e0578fa6bc68ae6a8dc059c9e009c628e1c47f91495d"}, + {file = "mypy-1.16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:afe420c9380ccec31e744e8baff0d406c846683681025db3531b32db56962d52"}, + {file = "mypy-1.16.0-cp313-cp313-win_amd64.whl", hash = "sha256:55f9076c6ce55dd3f8cd0c6fff26a008ca8e5131b89d5ba6d86bd3f47e736eeb"}, + {file = "mypy-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f56236114c425620875c7cf71700e3d60004858da856c6fc78998ffe767b73d3"}, + {file = "mypy-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:15486beea80be24ff067d7d0ede673b001d0d684d0095803b3e6e17a886a2a92"}, + {file = "mypy-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f2ed0e0847a80655afa2c121835b848ed101cc7b8d8d6ecc5205aedc732b1436"}, + {file = "mypy-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eb5fbc8063cb4fde7787e4c0406aa63094a34a2daf4673f359a1fb64050e9cb2"}, + {file = "mypy-1.16.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a5fcfdb7318c6a8dd127b14b1052743b83e97a970f0edb6c913211507a255e20"}, + {file = "mypy-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:2e7e0ad35275e02797323a5aa1be0b14a4d03ffdb2e5f2b0489fa07b89c67b21"}, + {file = "mypy-1.16.0-py3-none-any.whl", hash = "sha256:29e1499864a3888bca5c1542f2d7232c6e586295183320caa95758fc84034031"}, + {file = "mypy-1.16.0.tar.gz", hash = "sha256:84b94283f817e2aa6350a14b4a8fb2a35a53c286f97c9d30f53b63620e7af8ab"}, +] + +[package.dependencies] +mypy_extensions = ">=1.0.0" +pathspec = ">=0.9.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing_extensions = ">=4.6.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +description = "Node.js virtual environment builder" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev"] +files = [ + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, +] + +[[package]] +name = "orjson" +version = "3.10.18" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "orjson-3.10.18-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a45e5d68066b408e4bc383b6e4ef05e717c65219a9e1390abc6155a520cac402"}, + {file = "orjson-3.10.18-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be3b9b143e8b9db05368b13b04c84d37544ec85bb97237b3a923f076265ec89c"}, + {file = "orjson-3.10.18-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9b0aa09745e2c9b3bf779b096fa71d1cc2d801a604ef6dd79c8b1bfef52b2f92"}, + {file = "orjson-3.10.18-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53a245c104d2792e65c8d225158f2b8262749ffe64bc7755b00024757d957a13"}, + {file = "orjson-3.10.18-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f9495ab2611b7f8a0a8a505bcb0f0cbdb5469caafe17b0e404c3c746f9900469"}, + {file = "orjson-3.10.18-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73be1cbcebadeabdbc468f82b087df435843c809cd079a565fb16f0f3b23238f"}, + {file = "orjson-3.10.18-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe8936ee2679e38903df158037a2f1c108129dee218975122e37847fb1d4ac68"}, + {file = "orjson-3.10.18-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7115fcbc8525c74e4c2b608129bef740198e9a120ae46184dac7683191042056"}, + {file = "orjson-3.10.18-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:771474ad34c66bc4d1c01f645f150048030694ea5b2709b87d3bda273ffe505d"}, + {file = "orjson-3.10.18-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:7c14047dbbea52886dd87169f21939af5d55143dad22d10db6a7514f058156a8"}, + {file = "orjson-3.10.18-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:641481b73baec8db14fdf58f8967e52dc8bda1f2aba3aa5f5c1b07ed6df50b7f"}, + {file = "orjson-3.10.18-cp310-cp310-win32.whl", hash = "sha256:607eb3ae0909d47280c1fc657c4284c34b785bae371d007595633f4b1a2bbe06"}, + {file = "orjson-3.10.18-cp310-cp310-win_amd64.whl", hash = "sha256:8770432524ce0eca50b7efc2a9a5f486ee0113a5fbb4231526d414e6254eba92"}, + {file = "orjson-3.10.18-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e0a183ac3b8e40471e8d843105da6fbe7c070faab023be3b08188ee3f85719b8"}, + {file = "orjson-3.10.18-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:5ef7c164d9174362f85238d0cd4afdeeb89d9e523e4651add6a5d458d6f7d42d"}, + {file = "orjson-3.10.18-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afd14c5d99cdc7bf93f22b12ec3b294931518aa019e2a147e8aa2f31fd3240f7"}, + {file = "orjson-3.10.18-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7b672502323b6cd133c4af6b79e3bea36bad2d16bca6c1f645903fce83909a7a"}, + {file = "orjson-3.10.18-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:51f8c63be6e070ec894c629186b1c0fe798662b8687f3d9fdfa5e401c6bd7679"}, + {file = "orjson-3.10.18-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f9478ade5313d724e0495d167083c6f3be0dd2f1c9c8a38db9a9e912cdaf947"}, + {file = "orjson-3.10.18-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:187aefa562300a9d382b4b4eb9694806e5848b0cedf52037bb5c228c61bb66d4"}, + {file = "orjson-3.10.18-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9da552683bc9da222379c7a01779bddd0ad39dd699dd6300abaf43eadee38334"}, + {file = "orjson-3.10.18-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e450885f7b47a0231979d9c49b567ed1c4e9f69240804621be87c40bc9d3cf17"}, + {file = "orjson-3.10.18-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:5e3c9cc2ba324187cd06287ca24f65528f16dfc80add48dc99fa6c836bb3137e"}, + {file = "orjson-3.10.18-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:50ce016233ac4bfd843ac5471e232b865271d7d9d44cf9d33773bcd883ce442b"}, + {file = "orjson-3.10.18-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b3ceff74a8f7ffde0b2785ca749fc4e80e4315c0fd887561144059fb1c138aa7"}, + {file = "orjson-3.10.18-cp311-cp311-win32.whl", hash = "sha256:fdba703c722bd868c04702cac4cb8c6b8ff137af2623bc0ddb3b3e6a2c8996c1"}, + {file = "orjson-3.10.18-cp311-cp311-win_amd64.whl", hash = "sha256:c28082933c71ff4bc6ccc82a454a2bffcef6e1d7379756ca567c772e4fb3278a"}, + {file = "orjson-3.10.18-cp311-cp311-win_arm64.whl", hash = "sha256:a6c7c391beaedd3fa63206e5c2b7b554196f14debf1ec9deb54b5d279b1b46f5"}, + {file = "orjson-3.10.18-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:50c15557afb7f6d63bc6d6348e0337a880a04eaa9cd7c9d569bcb4e760a24753"}, + {file = "orjson-3.10.18-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:356b076f1662c9813d5fa56db7d63ccceef4c271b1fb3dd522aca291375fcf17"}, + {file = "orjson-3.10.18-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:559eb40a70a7494cd5beab2d73657262a74a2c59aff2068fdba8f0424ec5b39d"}, + {file = "orjson-3.10.18-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f3c29eb9a81e2fbc6fd7ddcfba3e101ba92eaff455b8d602bf7511088bbc0eae"}, + {file = "orjson-3.10.18-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6612787e5b0756a171c7d81ba245ef63a3533a637c335aa7fcb8e665f4a0966f"}, + {file = "orjson-3.10.18-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ac6bd7be0dcab5b702c9d43d25e70eb456dfd2e119d512447468f6405b4a69c"}, + {file = "orjson-3.10.18-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9f72f100cee8dde70100406d5c1abba515a7df926d4ed81e20a9730c062fe9ad"}, + {file = "orjson-3.10.18-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9dca85398d6d093dd41dc0983cbf54ab8e6afd1c547b6b8a311643917fbf4e0c"}, + {file = "orjson-3.10.18-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:22748de2a07fcc8781a70edb887abf801bb6142e6236123ff93d12d92db3d406"}, + {file = "orjson-3.10.18-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:3a83c9954a4107b9acd10291b7f12a6b29e35e8d43a414799906ea10e75438e6"}, + {file = "orjson-3.10.18-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:303565c67a6c7b1f194c94632a4a39918e067bd6176a48bec697393865ce4f06"}, + {file = "orjson-3.10.18-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:86314fdb5053a2f5a5d881f03fca0219bfdf832912aa88d18676a5175c6916b5"}, + {file = "orjson-3.10.18-cp312-cp312-win32.whl", hash = "sha256:187ec33bbec58c76dbd4066340067d9ece6e10067bb0cc074a21ae3300caa84e"}, + {file = "orjson-3.10.18-cp312-cp312-win_amd64.whl", hash = "sha256:f9f94cf6d3f9cd720d641f8399e390e7411487e493962213390d1ae45c7814fc"}, + {file = "orjson-3.10.18-cp312-cp312-win_arm64.whl", hash = "sha256:3d600be83fe4514944500fa8c2a0a77099025ec6482e8087d7659e891f23058a"}, + {file = "orjson-3.10.18-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:69c34b9441b863175cc6a01f2935de994025e773f814412030f269da4f7be147"}, + {file = "orjson-3.10.18-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:1ebeda919725f9dbdb269f59bc94f861afbe2a27dce5608cdba2d92772364d1c"}, + {file = "orjson-3.10.18-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5adf5f4eed520a4959d29ea80192fa626ab9a20b2ea13f8f6dc58644f6927103"}, + {file = "orjson-3.10.18-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7592bb48a214e18cd670974f289520f12b7aed1fa0b2e2616b8ed9e069e08595"}, + {file = "orjson-3.10.18-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f872bef9f042734110642b7a11937440797ace8c87527de25e0c53558b579ccc"}, + {file = "orjson-3.10.18-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0315317601149c244cb3ecef246ef5861a64824ccbcb8018d32c66a60a84ffbc"}, + {file = "orjson-3.10.18-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0da26957e77e9e55a6c2ce2e7182a36a6f6b180ab7189315cb0995ec362e049"}, + {file = "orjson-3.10.18-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb70d489bc79b7519e5803e2cc4c72343c9dc1154258adf2f8925d0b60da7c58"}, + {file = "orjson-3.10.18-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e9e86a6af31b92299b00736c89caf63816f70a4001e750bda179e15564d7a034"}, + {file = "orjson-3.10.18-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:c382a5c0b5931a5fc5405053d36c1ce3fd561694738626c77ae0b1dfc0242ca1"}, + {file = "orjson-3.10.18-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8e4b2ae732431127171b875cb2668f883e1234711d3c147ffd69fe5be51a8012"}, + {file = "orjson-3.10.18-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2d808e34ddb24fc29a4d4041dcfafbae13e129c93509b847b14432717d94b44f"}, + {file = "orjson-3.10.18-cp313-cp313-win32.whl", hash = "sha256:ad8eacbb5d904d5591f27dee4031e2c1db43d559edb8f91778efd642d70e6bea"}, + {file = "orjson-3.10.18-cp313-cp313-win_amd64.whl", hash = "sha256:aed411bcb68bf62e85588f2a7e03a6082cc42e5a2796e06e72a962d7c6310b52"}, + {file = "orjson-3.10.18-cp313-cp313-win_arm64.whl", hash = "sha256:f54c1385a0e6aba2f15a40d703b858bedad36ded0491e55d35d905b2c34a4cc3"}, + {file = "orjson-3.10.18-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:c95fae14225edfd699454e84f61c3dd938df6629a00c6ce15e704f57b58433bb"}, + {file = "orjson-3.10.18-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5232d85f177f98e0cefabb48b5e7f60cff6f3f0365f9c60631fecd73849b2a82"}, + {file = "orjson-3.10.18-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2783e121cafedf0d85c148c248a20470018b4ffd34494a68e125e7d5857655d1"}, + {file = "orjson-3.10.18-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e54ee3722caf3db09c91f442441e78f916046aa58d16b93af8a91500b7bbf273"}, + {file = "orjson-3.10.18-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2daf7e5379b61380808c24f6fc182b7719301739e4271c3ec88f2984a2d61f89"}, + {file = "orjson-3.10.18-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f39b371af3add20b25338f4b29a8d6e79a8c7ed0e9dd49e008228a065d07781"}, + {file = "orjson-3.10.18-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b819ed34c01d88c6bec290e6842966f8e9ff84b7694632e88341363440d4cc0"}, + {file = "orjson-3.10.18-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2f6c57debaef0b1aa13092822cbd3698a1fb0209a9ea013a969f4efa36bdea57"}, + {file = "orjson-3.10.18-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:755b6d61ffdb1ffa1e768330190132e21343757c9aa2308c67257cc81a1a6f5a"}, + {file = "orjson-3.10.18-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ce8d0a875a85b4c8579eab5ac535fb4b2a50937267482be402627ca7e7570ee3"}, + {file = "orjson-3.10.18-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:57b5d0673cbd26781bebc2bf86f99dd19bd5a9cb55f71cc4f66419f6b50f3d77"}, + {file = "orjson-3.10.18-cp39-cp39-win32.whl", hash = "sha256:951775d8b49d1d16ca8818b1f20c4965cae9157e7b562a2ae34d3967b8f21c8e"}, + {file = "orjson-3.10.18-cp39-cp39-win_amd64.whl", hash = "sha256:fdd9d68f83f0bc4406610b1ac68bdcded8c5ee58605cc69e643a06f4d075f429"}, + {file = "orjson-3.10.18.tar.gz", hash = "sha256:e8da3947d92123eda795b68228cafe2724815621fe35e8e320a9e9593a4bcd53"}, +] + +[[package]] +name = "packaging" +version = "25.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, +] + +[[package]] +name = "paginate" +version = "0.5.7" +description = "Divides large result sets into pages for easier browsing" +optional = false +python-versions = "*" +groups = ["main", "dev"] +files = [ + {file = "paginate-0.5.7-py2.py3-none-any.whl", hash = "sha256:b885e2af73abcf01d9559fd5216b57ef722f8c42affbb63942377668e35c7591"}, + {file = "paginate-0.5.7.tar.gz", hash = "sha256:22bd083ab41e1a8b4f3690544afb2c60c25e5c9a63a30fa2f483f6c60c8e5945"}, +] + +[package.extras] +dev = ["pytest", "tox"] +lint = ["black"] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "platformdirs" +version = "4.3.8" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4"}, + {file = "platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.14.1)"] + +[[package]] +name = "pluggy" +version = "1.6.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, + {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["coverage", "pytest", "pytest-benchmark"] + +[[package]] +name = "pre-commit" +version = "3.7.1" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "pre_commit-3.7.1-py2.py3-none-any.whl", hash = "sha256:fae36fd1d7ad7d6a5a1c0b0d5adb2ed1a3bda5a21bf6c3e5372073d7a11cd4c5"}, + {file = "pre_commit-3.7.1.tar.gz", hash = "sha256:8ca3ad567bc78a4972a3f1a477e94a79d4597e8140a6e0b651c5e33899c3654a"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + +[[package]] +name = "pre-commit" +version = "3.8.0" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pre_commit-3.8.0-py2.py3-none-any.whl", hash = "sha256:9a90a53bf82fdd8778d58085faf8d83df56e40dfe18f45b19446e26bf1b3a63f"}, + {file = "pre_commit-3.8.0.tar.gz", hash = "sha256:8bb6494d4a20423842e198980c9ecf9f96607a07ea29549e180eef9ae80fe7af"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + +[[package]] +name = "pycodestyle" +version = "2.10.0" +description = "Python style guide checker" +optional = false +python-versions = ">=3.6" +groups = ["main", "dev"] +files = [ + {file = "pycodestyle-2.10.0-py2.py3-none-any.whl", hash = "sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610"}, + {file = "pycodestyle-2.10.0.tar.gz", hash = "sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053"}, +] + +[[package]] +name = "pycodestyle" +version = "2.11.1" +description = "Python style guide checker" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, + {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, +] + +[[package]] +name = "pydantic" +version = "2.11.6" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic-2.11.6-py3-none-any.whl", hash = "sha256:a24478d2be1b91b6d3bc9597439f69ed5e87f68ebd285d86f7c7932a084b72e7"}, + {file = "pydantic-2.11.6.tar.gz", hash = "sha256:12b45cfb4af17e555d3c6283d0b55271865fb0b43cc16dd0d52749dc7abf70e7"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.33.2" +typing-extensions = ">=4.12.2" +typing-inspection = ">=0.4.0" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] + +[[package]] +name = "pydantic-core" +version = "2.33.2" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8"}, + {file = "pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b"}, + {file = "pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22"}, + {file = "pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640"}, + {file = "pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7"}, + {file = "pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65"}, + {file = "pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc"}, + {file = "pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab"}, + {file = "pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f"}, + {file = "pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d"}, + {file = "pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e"}, + {file = "pydantic_core-2.33.2-cp39-cp39-win32.whl", hash = "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27"}, + {file = "pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pydocstyle" +version = "6.3.0" +description = "Python docstring style checker" +optional = false +python-versions = ">=3.6" +groups = ["main", "dev"] +files = [ + {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, + {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, +] + +[package.dependencies] +snowballstemmer = ">=2.2.0" +tomli = {version = ">=1.2.3", optional = true, markers = "python_version < \"3.11\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli (>=1.2.3) ; python_version < \"3.11\""] + +[[package]] +name = "pyflakes" +version = "3.0.1" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.6" +groups = ["main", "dev"] +files = [ + {file = "pyflakes-3.0.1-py2.py3-none-any.whl", hash = "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf"}, + {file = "pyflakes-3.0.1.tar.gz", hash = "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"}, +] + +[[package]] +name = "pyflakes" +version = "3.1.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, + {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, +] + +[[package]] +name = "pygments" +version = "2.19.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, + {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pymdown-extensions" +version = "10.15" +description = "Extension pack for Python Markdown." +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "pymdown_extensions-10.15-py3-none-any.whl", hash = "sha256:46e99bb272612b0de3b7e7caf6da8dd5f4ca5212c0b273feb9304e236c484e5f"}, + {file = "pymdown_extensions-10.15.tar.gz", hash = "sha256:0e5994e32155f4b03504f939e501b981d306daf7ec2aa1cd2eb6bd300784f8f7"}, +] + +[package.dependencies] +markdown = ">=3.6" +pyyaml = "*" + +[package.extras] +extra = ["pygments (>=2.19.1)"] + +[[package]] +name = "pytest" +version = "8.2.2" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "pytest-8.2.2-py3-none-any.whl", hash = "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343"}, + {file = "pytest-8.2.2.tar.gz", hash = "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.5,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest" +version = "8.4.0" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "pytest-8.4.0-py3-none-any.whl", hash = "sha256:f40f825768ad76c0977cbacdf1fd37c6f7a468e460ea6a0636078f8972d4517e"}, + {file = "pytest-8.4.0.tar.gz", hash = "sha256:14d920b48472ea0dbf68e45b96cd1ffda4705f33307dcc86c676c1b5104838a6"}, +] + +[package.dependencies] +colorama = {version = ">=0.4", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1", markers = "python_version < \"3.11\""} +iniconfig = ">=1" +packaging = ">=20" +pluggy = ">=1.5,<2" +pygments = ">=2.7.2" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "6.2.1" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "pytest_cov-6.2.1-py3-none-any.whl", hash = "sha256:f5bc4c23f42f1cdd23c70b1dab1bbaef4fc505ba950d53e0081d0730dd7e86d5"}, + {file = "pytest_cov-6.2.1.tar.gz", hash = "sha256:25cc6cc0a5358204b8108ecedc51a9b57b34cc6b8c967cc2c01a4e00d8a67da2"}, +] + +[package.dependencies] +coverage = {version = ">=7.5", extras = ["toml"]} +pluggy = ">=1.2" +pytest = ">=6.2.5" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main", "dev"] +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + +[[package]] +name = "pyyaml-env-tag" +version = "1.1" +description = "A custom YAML tag for referencing environment variables in YAML files." +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "pyyaml_env_tag-1.1-py3-none-any.whl", hash = "sha256:17109e1a528561e32f026364712fee1264bc2ea6715120891174ed1b980d2e04"}, + {file = "pyyaml_env_tag-1.1.tar.gz", hash = "sha256:2eb38b75a2d21ee0475d6d97ec19c63287a7e140231e4214969d0eac923cd7ff"}, +] + +[package.dependencies] +pyyaml = "*" + +[[package]] +name = "regex" +version = "2024.11.6" +description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91"}, + {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0"}, + {file = "regex-2024.11.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:164d8b7b3b4bcb2068b97428060b2a53be050085ef94eca7f240e7947f1b080e"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3660c82f209655a06b587d55e723f0b813d3a7db2e32e5e7dc64ac2a9e86fde"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d22326fcdef5e08c154280b71163ced384b428343ae16a5ab2b3354aed12436e"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1ac758ef6aebfc8943560194e9fd0fa18bcb34d89fd8bd2af18183afd8da3a2"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:997d6a487ff00807ba810e0f8332c18b4eb8d29463cfb7c820dc4b6e7562d0cf"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02a02d2bb04fec86ad61f3ea7f49c015a0681bf76abb9857f945d26159d2968c"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f02f93b92358ee3f78660e43b4b0091229260c5d5c408d17d60bf26b6c900e86"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:06eb1be98df10e81ebaded73fcd51989dcf534e3c753466e4b60c4697a003b67"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:040df6fe1a5504eb0f04f048e6d09cd7c7110fef851d7c567a6b6e09942feb7d"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabbfc59f2c6edba2a6622c647b716e34e8e3867e0ab975412c5c2f79b82da2"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8447d2d39b5abe381419319f942de20b7ecd60ce86f16a23b0698f22e1b70008"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:da8f5fc57d1933de22a9e23eec290a0d8a5927a5370d24bda9a6abe50683fe62"}, + {file = "regex-2024.11.6-cp310-cp310-win32.whl", hash = "sha256:b489578720afb782f6ccf2840920f3a32e31ba28a4b162e13900c3e6bd3f930e"}, + {file = "regex-2024.11.6-cp310-cp310-win_amd64.whl", hash = "sha256:5071b2093e793357c9d8b2929dfc13ac5f0a6c650559503bb81189d0a3814519"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5478c6962ad548b54a591778e93cd7c456a7a29f8eca9c49e4f9a806dcc5d638"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c89a8cc122b25ce6945f0423dc1352cb9593c68abd19223eebbd4e56612c5b7"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:94d87b689cdd831934fa3ce16cc15cd65748e6d689f5d2b8f4f4df2065c9fa20"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1062b39a0a2b75a9c694f7a08e7183a80c63c0d62b301418ffd9c35f55aaa114"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:167ed4852351d8a750da48712c3930b031f6efdaa0f22fa1933716bfcd6bf4a3"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d548dafee61f06ebdb584080621f3e0c23fff312f0de1afc776e2a2ba99a74f"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a19f302cd1ce5dd01a9099aaa19cae6173306d1302a43b627f62e21cf18ac0"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bec9931dfb61ddd8ef2ebc05646293812cb6b16b60cf7c9511a832b6f1854b55"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9714398225f299aa85267fd222f7142fcb5c769e73d7733344efc46f2ef5cf89"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:202eb32e89f60fc147a41e55cb086db2a3f8cb82f9a9a88440dcfc5d37faae8d"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:4181b814e56078e9b00427ca358ec44333765f5ca1b45597ec7446d3a1ef6e34"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:068376da5a7e4da51968ce4c122a7cd31afaaec4fccc7856c92f63876e57b51d"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f2c4184420d881a3475fb2c6f4d95d53a8d50209a2500723d831036f7c45"}, + {file = "regex-2024.11.6-cp311-cp311-win32.whl", hash = "sha256:c36f9b6f5f8649bb251a5f3f66564438977b7ef8386a52460ae77e6070d309d9"}, + {file = "regex-2024.11.6-cp311-cp311-win_amd64.whl", hash = "sha256:02e28184be537f0e75c1f9b2f8847dc51e08e6e171c6bde130b2687e0c33cf60"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:52fb28f528778f184f870b7cf8f225f5eef0a8f6e3778529bdd40c7b3920796a"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdd6028445d2460f33136c55eeb1f601ab06d74cb3347132e1c24250187500d9"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805e6b60c54bf766b251e94526ebad60b7de0c70f70a4e6210ee2891acb70bf2"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b85c2530be953a890eaffde05485238f07029600e8f098cdf1848d414a8b45e4"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb26437975da7dc36b7efad18aa9dd4ea569d2357ae6b783bf1118dabd9ea577"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abfa5080c374a76a251ba60683242bc17eeb2c9818d0d30117b4486be10c59d3"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b7fa6606c2881c1db9479b0eaa11ed5dfa11c8d60a474ff0e095099f39d98e"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c32f75920cf99fe6b6c539c399a4a128452eaf1af27f39bce8909c9a3fd8cbe"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:982e6d21414e78e1f51cf595d7f321dcd14de1f2881c5dc6a6e23bbbbd68435e"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a7c2155f790e2fb448faed6dd241386719802296ec588a8b9051c1f5c481bc29"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149f5008d286636e48cd0b1dd65018548944e495b0265b45e1bffecce1ef7f39"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e5364a4502efca094731680e80009632ad6624084aff9a23ce8c8c6820de3e51"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0a86e7eeca091c09e021db8eb72d54751e527fa47b8d5787caf96d9831bd02ad"}, + {file = "regex-2024.11.6-cp312-cp312-win32.whl", hash = "sha256:32f9a4c643baad4efa81d549c2aadefaeba12249b2adc5af541759237eee1c54"}, + {file = "regex-2024.11.6-cp312-cp312-win_amd64.whl", hash = "sha256:a93c194e2df18f7d264092dc8539b8ffb86b45b899ab976aa15d48214138e81b"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a6ba92c0bcdf96cbf43a12c717eae4bc98325ca3730f6b130ffa2e3c3c723d84"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:525eab0b789891ac3be914d36893bdf972d483fe66551f79d3e27146191a37d4"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:086a27a0b4ca227941700e0b31425e7a28ef1ae8e5e05a33826e17e47fbfdba0"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bde01f35767c4a7899b7eb6e823b125a64de314a8ee9791367c9a34d56af18d0"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b583904576650166b3d920d2bcce13971f6f9e9a396c673187f49811b2769dc7"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c4de13f06a0d54fa0d5ab1b7138bfa0d883220965a29616e3ea61b35d5f5fc7"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cde6e9f2580eb1665965ce9bf17ff4952f34f5b126beb509fee8f4e994f143c"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d7f453dca13f40a02b79636a339c5b62b670141e63efd511d3f8f73fba162b3"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59dfe1ed21aea057a65c6b586afd2a945de04fc7db3de0a6e3ed5397ad491b07"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b97c1e0bd37c5cd7902e65f410779d39eeda155800b65fc4d04cc432efa9bc6e"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d1e379028e0fc2ae3654bac3cbbef81bf3fd571272a42d56c24007979bafb6"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:13291b39131e2d002a7940fb176e120bec5145f3aeb7621be6534e46251912c4"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f51f88c126370dcec4908576c5a627220da6c09d0bff31cfa89f2523843316d"}, + {file = "regex-2024.11.6-cp313-cp313-win32.whl", hash = "sha256:63b13cfd72e9601125027202cad74995ab26921d8cd935c25f09c630436348ff"}, + {file = "regex-2024.11.6-cp313-cp313-win_amd64.whl", hash = "sha256:2b3361af3198667e99927da8b84c1b010752fa4b1115ee30beaa332cabc3ef1a"}, + {file = "regex-2024.11.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3a51ccc315653ba012774efca4f23d1d2a8a8f278a6072e29c7147eee7da446b"}, + {file = "regex-2024.11.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ad182d02e40de7459b73155deb8996bbd8e96852267879396fb274e8700190e3"}, + {file = "regex-2024.11.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba9b72e5643641b7d41fa1f6d5abda2c9a263ae835b917348fc3c928182ad467"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40291b1b89ca6ad8d3f2b82782cc33807f1406cf68c8d440861da6304d8ffbbd"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cdf58d0e516ee426a48f7b2c03a332a4114420716d55769ff7108c37a09951bf"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a36fdf2af13c2b14738f6e973aba563623cb77d753bbbd8d414d18bfaa3105dd"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1cee317bfc014c2419a76bcc87f071405e3966da434e03e13beb45f8aced1a6"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50153825ee016b91549962f970d6a4442fa106832e14c918acd1c8e479916c4f"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea1bfda2f7162605f6e8178223576856b3d791109f15ea99a9f95c16a7636fb5"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:df951c5f4a1b1910f1a99ff42c473ff60f8225baa1cdd3539fe2819d9543e9df"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:072623554418a9911446278f16ecb398fb3b540147a7828c06e2011fa531e773"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f654882311409afb1d780b940234208a252322c24a93b442ca714d119e68086c"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:89d75e7293d2b3e674db7d4d9b1bee7f8f3d1609428e293771d1a962617150cc"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:f65557897fc977a44ab205ea871b690adaef6b9da6afda4790a2484b04293a5f"}, + {file = "regex-2024.11.6-cp38-cp38-win32.whl", hash = "sha256:6f44ec28b1f858c98d3036ad5d7d0bfc568bdd7a74f9c24e25f41ef1ebfd81a4"}, + {file = "regex-2024.11.6-cp38-cp38-win_amd64.whl", hash = "sha256:bb8f74f2f10dbf13a0be8de623ba4f9491faf58c24064f32b65679b021ed0001"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5704e174f8ccab2026bd2f1ab6c510345ae8eac818b613d7d73e785f1310f839"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:220902c3c5cc6af55d4fe19ead504de80eb91f786dc102fbd74894b1551f095e"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7e351589da0850c125f1600a4c4ba3c722efefe16b297de54300f08d734fbf"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5056b185ca113c88e18223183aa1a50e66507769c9640a6ff75859619d73957b"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e34b51b650b23ed3354b5a07aab37034d9f923db2a40519139af34f485f77d0"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5670bce7b200273eee1840ef307bfa07cda90b38ae56e9a6ebcc9f50da9c469b"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08986dce1339bc932923e7d1232ce9881499a0e02925f7402fb7c982515419ef"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93c0b12d3d3bc25af4ebbf38f9ee780a487e8bf6954c115b9f015822d3bb8e48"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:764e71f22ab3b305e7f4c21f1a97e1526a25ebdd22513e251cf376760213da13"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f056bf21105c2515c32372bbc057f43eb02aae2fda61052e2f7622c801f0b4e2"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:69ab78f848845569401469da20df3e081e6b5a11cb086de3eed1d48f5ed57c95"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:86fddba590aad9208e2fa8b43b4c098bb0ec74f15718bb6a704e3c63e2cef3e9"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:684d7a212682996d21ca12ef3c17353c021fe9de6049e19ac8481ec35574a70f"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a03e02f48cd1abbd9f3b7e3586d97c8f7a9721c436f51a5245b3b9483044480b"}, + {file = "regex-2024.11.6-cp39-cp39-win32.whl", hash = "sha256:41758407fc32d5c3c5de163888068cfee69cb4c2be844e7ac517a52770f9af57"}, + {file = "regex-2024.11.6-cp39-cp39-win_amd64.whl", hash = "sha256:b2837718570f95dd41675328e111345f9b7095d821bac435aac173ac80b19983"}, + {file = "regex-2024.11.6.tar.gz", hash = "sha256:7ab159b063c52a0333c884e4679f8d7a85112ee3078fe3d9004b2dd875585519"}, +] + +[[package]] +name = "requests" +version = "2.32.4" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c"}, + {file = "requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset_normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rich" +version = "13.9.4" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.8.0" +groups = ["main", "dev"] +files = [ + {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, + {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" +typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.11\""} + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "six" +version = "1.17.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main", "dev"] +files = [ + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, +] + +[[package]] +name = "snakeviz" +version = "2.2.2" +description = "A web-based viewer for Python profiler output" +optional = true +python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"dev\"" +files = [ + {file = "snakeviz-2.2.2-py3-none-any.whl", hash = "sha256:77e7b9c82f6152edc330040319b97612351cd9b48c706434c535c2df31d10ac5"}, + {file = "snakeviz-2.2.2.tar.gz", hash = "sha256:08028c6f8e34a032ff14757a38424770abb8662fb2818985aeea0d9bc13a7d83"}, +] + +[package.dependencies] +tornado = ">=2.0" + +[[package]] +name = "snowballstemmer" +version = "3.0.1" +description = "This package provides 32 stemmers for 30 languages generated from Snowball algorithms." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*" +groups = ["main", "dev"] +files = [ + {file = "snowballstemmer-3.0.1-py3-none-any.whl", hash = "sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064"}, + {file = "snowballstemmer-3.0.1.tar.gz", hash = "sha256:6d5eeeec8e9f84d4d56b847692bacf79bc2c8e90c7f80ca4444ff8b6f2e52895"}, +] + +[[package]] +name = "tomli" +version = "2.2.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +markers = "python_full_version <= \"3.11.0a6\"" +files = [ + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, +] + +[[package]] +name = "tornado" +version = "6.5.1" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +optional = true +python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"dev\"" +files = [ + {file = "tornado-6.5.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d50065ba7fd11d3bd41bcad0825227cc9a95154bad83239357094c36708001f7"}, + {file = "tornado-6.5.1-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9e9ca370f717997cb85606d074b0e5b247282cf5e2e1611568b8821afe0342d6"}, + {file = "tornado-6.5.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b77e9dfa7ed69754a54c89d82ef746398be82f749df69c4d3abe75c4d1ff4888"}, + {file = "tornado-6.5.1-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:253b76040ee3bab8bcf7ba9feb136436a3787208717a1fb9f2c16b744fba7331"}, + {file = "tornado-6.5.1-cp39-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:308473f4cc5a76227157cdf904de33ac268af770b2c5f05ca6c1161d82fdd95e"}, + {file = "tornado-6.5.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:caec6314ce8a81cf69bd89909f4b633b9f523834dc1a352021775d45e51d9401"}, + {file = "tornado-6.5.1-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:13ce6e3396c24e2808774741331638ee6c2f50b114b97a55c5b442df65fd9692"}, + {file = "tornado-6.5.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5cae6145f4cdf5ab24744526cc0f55a17d76f02c98f4cff9daa08ae9a217448a"}, + {file = "tornado-6.5.1-cp39-abi3-win32.whl", hash = "sha256:e0a36e1bc684dca10b1aa75a31df8bdfed656831489bc1e6a6ebed05dc1ec365"}, + {file = "tornado-6.5.1-cp39-abi3-win_amd64.whl", hash = "sha256:908e7d64567cecd4c2b458075589a775063453aeb1d2a1853eedb806922f568b"}, + {file = "tornado-6.5.1-cp39-abi3-win_arm64.whl", hash = "sha256:02420a0eb7bf617257b9935e2b754d1b63897525d8a289c9d65690d580b4dcf7"}, + {file = "tornado-6.5.1.tar.gz", hash = "sha256:84ceece391e8eb9b2b95578db65e920d2a61070260594819589609ba9bc6308c"}, +] + +[[package]] +name = "typing-extensions" +version = "4.14.0" +description = "Backported and Experimental Type Hints for Python 3.9+" +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "typing_extensions-4.14.0-py3-none-any.whl", hash = "sha256:a1514509136dd0b477638fc68d6a91497af5076466ad0fa6c338e44e359944af"}, + {file = "typing_extensions-4.14.0.tar.gz", hash = "sha256:8676b788e32f02ab42d9e7c61324048ae4c6d844a399eebace3d4979d75ceef4"}, +] + +[[package]] +name = "typing-inspection" +version = "0.4.1" +description = "Runtime typing introspection tools" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51"}, + {file = "typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28"}, +] + +[package.dependencies] +typing-extensions = ">=4.12.0" + +[[package]] +name = "urllib3" +version = "2.4.0" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813"}, + {file = "urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "virtualenv" +version = "20.31.2" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "virtualenv-20.31.2-py3-none-any.whl", hash = "sha256:36efd0d9650ee985f0cad72065001e66d49a6f24eb44d98980f630686243cf11"}, + {file = "virtualenv-20.31.2.tar.gz", hash = "sha256:e10c0a9d02835e592521be48b332b6caee6887f332c111aa79a09b9e79efc2af"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8) ; platform_python_implementation == \"PyPy\" or platform_python_implementation == \"GraalVM\" or platform_python_implementation == \"CPython\" and sys_platform == \"win32\" and python_version >= \"3.13\"", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10) ; platform_python_implementation == \"CPython\""] + +[[package]] +name = "watchdog" +version = "4.0.1" +description = "Filesystem events monitoring" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:da2dfdaa8006eb6a71051795856bedd97e5b03e57da96f98e375682c48850645"}, + {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e93f451f2dfa433d97765ca2634628b789b49ba8b504fdde5837cdcf25fdb53b"}, + {file = "watchdog-4.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ef0107bbb6a55f5be727cfc2ef945d5676b97bffb8425650dadbb184be9f9a2b"}, + {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:17e32f147d8bf9657e0922c0940bcde863b894cd871dbb694beb6704cfbd2fb5"}, + {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:03e70d2df2258fb6cb0e95bbdbe06c16e608af94a3ffbd2b90c3f1e83eb10767"}, + {file = "watchdog-4.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:123587af84260c991dc5f62a6e7ef3d1c57dfddc99faacee508c71d287248459"}, + {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:093b23e6906a8b97051191a4a0c73a77ecc958121d42346274c6af6520dec175"}, + {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:611be3904f9843f0529c35a3ff3fd617449463cb4b73b1633950b3d97fa4bfb7"}, + {file = "watchdog-4.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:62c613ad689ddcb11707f030e722fa929f322ef7e4f18f5335d2b73c61a85c28"}, + {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d4925e4bf7b9bddd1c3de13c9b8a2cdb89a468f640e66fbfabaf735bd85b3e35"}, + {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cad0bbd66cd59fc474b4a4376bc5ac3fc698723510cbb64091c2a793b18654db"}, + {file = "watchdog-4.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a3c2c317a8fb53e5b3d25790553796105501a235343f5d2bf23bb8649c2c8709"}, + {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c9904904b6564d4ee8a1ed820db76185a3c96e05560c776c79a6ce5ab71888ba"}, + {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:667f3c579e813fcbad1b784db7a1aaa96524bed53437e119f6a2f5de4db04235"}, + {file = "watchdog-4.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d10a681c9a1d5a77e75c48a3b8e1a9f2ae2928eda463e8d33660437705659682"}, + {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0144c0ea9997b92615af1d94afc0c217e07ce2c14912c7b1a5731776329fcfc7"}, + {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:998d2be6976a0ee3a81fb8e2777900c28641fb5bfbd0c84717d89bca0addcdc5"}, + {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e7921319fe4430b11278d924ef66d4daa469fafb1da679a2e48c935fa27af193"}, + {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f0de0f284248ab40188f23380b03b59126d1479cd59940f2a34f8852db710625"}, + {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bca36be5707e81b9e6ce3208d92d95540d4ca244c006b61511753583c81c70dd"}, + {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ab998f567ebdf6b1da7dc1e5accfaa7c6992244629c0fdaef062f43249bd8dee"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:dddba7ca1c807045323b6af4ff80f5ddc4d654c8bce8317dde1bd96b128ed253"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_armv7l.whl", hash = "sha256:4513ec234c68b14d4161440e07f995f231be21a09329051e67a2118a7a612d2d"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_i686.whl", hash = "sha256:4107ac5ab936a63952dea2a46a734a23230aa2f6f9db1291bf171dac3ebd53c6"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64.whl", hash = "sha256:6e8c70d2cd745daec2a08734d9f63092b793ad97612470a0ee4cbb8f5f705c57"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f27279d060e2ab24c0aa98363ff906d2386aa6c4dc2f1a374655d4e02a6c5e5e"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_s390x.whl", hash = "sha256:f8affdf3c0f0466e69f5b3917cdd042f89c8c63aebdb9f7c078996f607cdb0f5"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ac7041b385f04c047fcc2951dc001671dee1b7e0615cde772e84b01fbf68ee84"}, + {file = "watchdog-4.0.1-py3-none-win32.whl", hash = "sha256:206afc3d964f9a233e6ad34618ec60b9837d0582b500b63687e34011e15bb429"}, + {file = "watchdog-4.0.1-py3-none-win_amd64.whl", hash = "sha256:7577b3c43e5909623149f76b099ac49a1a01ca4e167d1785c76eb52fa585745a"}, + {file = "watchdog-4.0.1-py3-none-win_ia64.whl", hash = "sha256:d7b9f5f3299e8dd230880b6c55504a1f69cf1e4316275d1b215ebdd8187ec88d"}, + {file = "watchdog-4.0.1.tar.gz", hash = "sha256:eebaacf674fa25511e8867028d281e602ee6500045b57f43b08778082f7f8b44"}, +] + +[package.extras] +watchmedo = ["PyYAML (>=3.10)"] + +[[package]] +name = "watchdog" +version = "4.0.2" +description = "Filesystem events monitoring" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ede7f010f2239b97cc79e6cb3c249e72962404ae3865860855d5cbe708b0fd22"}, + {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a2cffa171445b0efa0726c561eca9a27d00a1f2b83846dbd5a4f639c4f8ca8e1"}, + {file = "watchdog-4.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c50f148b31b03fbadd6d0b5980e38b558046b127dc483e5e4505fcef250f9503"}, + {file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7c7d4bf585ad501c5f6c980e7be9c4f15604c7cc150e942d82083b31a7548930"}, + {file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:914285126ad0b6eb2258bbbcb7b288d9dfd655ae88fa28945be05a7b475a800b"}, + {file = "watchdog-4.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:984306dc4720da5498b16fc037b36ac443816125a3705dfde4fd90652d8028ef"}, + {file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1cdcfd8142f604630deef34722d695fb455d04ab7cfe9963055df1fc69e6727a"}, + {file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d7ab624ff2f663f98cd03c8b7eedc09375a911794dfea6bf2a359fcc266bff29"}, + {file = "watchdog-4.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:132937547a716027bd5714383dfc40dc66c26769f1ce8a72a859d6a48f371f3a"}, + {file = "watchdog-4.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:cd67c7df93eb58f360c43802acc945fa8da70c675b6fa37a241e17ca698ca49b"}, + {file = "watchdog-4.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:bcfd02377be80ef3b6bc4ce481ef3959640458d6feaae0bd43dd90a43da90a7d"}, + {file = "watchdog-4.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:980b71510f59c884d684b3663d46e7a14b457c9611c481e5cef08f4dd022eed7"}, + {file = "watchdog-4.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:aa160781cafff2719b663c8a506156e9289d111d80f3387cf3af49cedee1f040"}, + {file = "watchdog-4.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f6ee8dedd255087bc7fe82adf046f0b75479b989185fb0bdf9a98b612170eac7"}, + {file = "watchdog-4.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0b4359067d30d5b864e09c8597b112fe0a0a59321a0f331498b013fb097406b4"}, + {file = "watchdog-4.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:770eef5372f146997638d737c9a3c597a3b41037cfbc5c41538fc27c09c3a3f9"}, + {file = "watchdog-4.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eeea812f38536a0aa859972d50c76e37f4456474b02bd93674d1947cf1e39578"}, + {file = "watchdog-4.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b2c45f6e1e57ebb4687690c05bc3a2c1fb6ab260550c4290b8abb1335e0fd08b"}, + {file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:10b6683df70d340ac3279eff0b2766813f00f35a1d37515d2c99959ada8f05fa"}, + {file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f7c739888c20f99824f7aa9d31ac8a97353e22d0c0e54703a547a218f6637eb3"}, + {file = "watchdog-4.0.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c100d09ac72a8a08ddbf0629ddfa0b8ee41740f9051429baa8e31bb903ad7508"}, + {file = "watchdog-4.0.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f5315a8c8dd6dd9425b974515081fc0aadca1d1d61e078d2246509fd756141ee"}, + {file = "watchdog-4.0.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:2d468028a77b42cc685ed694a7a550a8d1771bb05193ba7b24006b8241a571a1"}, + {file = "watchdog-4.0.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f15edcae3830ff20e55d1f4e743e92970c847bcddc8b7509bcd172aa04de506e"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:936acba76d636f70db8f3c66e76aa6cb5136a936fc2a5088b9ce1c7a3508fc83"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_armv7l.whl", hash = "sha256:e252f8ca942a870f38cf785aef420285431311652d871409a64e2a0a52a2174c"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_i686.whl", hash = "sha256:0e83619a2d5d436a7e58a1aea957a3c1ccbf9782c43c0b4fed80580e5e4acd1a"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64.whl", hash = "sha256:88456d65f207b39f1981bf772e473799fcdc10801062c36fd5ad9f9d1d463a73"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:32be97f3b75693a93c683787a87a0dc8db98bb84701539954eef991fb35f5fbc"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_s390x.whl", hash = "sha256:c82253cfc9be68e3e49282831afad2c1f6593af80c0daf1287f6a92657986757"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:c0b14488bd336c5b1845cee83d3e631a1f8b4e9c5091ec539406e4a324f882d8"}, + {file = "watchdog-4.0.2-py3-none-win32.whl", hash = "sha256:0d8a7e523ef03757a5aa29f591437d64d0d894635f8a50f370fe37f913ce4e19"}, + {file = "watchdog-4.0.2-py3-none-win_amd64.whl", hash = "sha256:c344453ef3bf875a535b0488e3ad28e341adbd5a9ffb0f7d62cefacc8824ef2b"}, + {file = "watchdog-4.0.2-py3-none-win_ia64.whl", hash = "sha256:baececaa8edff42cd16558a639a9b0ddf425f93d892e8392a56bf904f5eff22c"}, + {file = "watchdog-4.0.2.tar.gz", hash = "sha256:b4dfbb6c49221be4535623ea4474a4d6ee0a9cef4a80b20c28db4d858b64e270"}, +] + +[package.extras] +watchmedo = ["PyYAML (>=3.10)"] + +[extras] +dev = ["black", "flake8", "isort", "mkdocs", "mkdocs-autorefs", "mkdocs-material", "mkdocstrings", "mypy", "pre-commit", "pydocstyle", "pytest", "pytest-cov", "rich", "snakeviz", "watchdog"] + +[metadata] +lock-version = "2.1" +python-versions = ">=3.10" +content-hash = "d8f5e91ef871be1f9bc3902d58cd12491d06d92eeb04c453133b94bf55e555eb" diff --git a/pyproject.toml b/pyproject.toml index 25f0a33..18ec349 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,6 +34,8 @@ keywords = [ ] dependencies = [ "pydantic>=2.5.0", + "orjson==3.10.18", + "rich==13.9.4", ] [project.optional-dependencies] @@ -51,6 +53,8 @@ dev = [ "mkdocs-material==9.5.26", "mkdocstrings[python]==0.25.1", "mkdocs-autorefs==0.5.0", + "snakeviz==2.2.2", + "rich==13.9.4", ] [project.urls] @@ -123,6 +127,7 @@ mkdocs = "^1.6.0" mkdocs-material = "^9.5.26" mkdocstrings = {version = "^0.25.1", extras = ["python"]} mkdocs-autorefs = "^0.5.0" +rich = "13.9.4" [tool.poetry.urls] "Homepage" = "https://github.com/v-pikulev/hario" diff --git a/src/hario_core/__init__.py b/src/hario_core/__init__.py index 4790429..db030ed 100644 --- a/src/hario_core/__init__.py +++ b/src/hario_core/__init__.py @@ -1,81 +1,16 @@ """ Hario Core package root. - -- Exposes main API: loading, parsing, enrichment, and extension registration - for HAR files. -- Imports and re-exports all core models, protocols, and utilities - for public use. -- See documentation for usage examples and extension patterns. """ -__version__ = "0.2.0" # Bump version after refactoring +__version__ = "0.4.0" -from hario_core.har_parser import entry_selector, parse, register_entry_model -from hario_core.interfaces import ( - EntryIdFn, - HarParser, - HarStorageRepository, - Processor, - Transformer, -) -from hario_core.models.extensions.chrome_devtools import DevToolsEntry -from hario_core.models.har_1_2 import ( - Browser, - Content, - Cookie, - Creator, - Entry, - HarLog, - Header, - Page, - PageTimings, - PostData, - PostParam, - QueryString, - Request, - Response, - Timings, -) -from hario_core.pipeline import Pipeline -from hario_core.utils.id import by_field, uuid -from hario_core.utils.transform import flatten, normalize_sizes, normalize_timings +from . import models, parse, transform __all__ = [ - # har_parser + # Models + "models", + # Parsing and validation "parse", - "entry_selector", - "register_entry_model", - # pipeline - "Pipeline", - # id utils - "by_field", - "uuid", - # transform utils - "flatten", - "normalize_sizes", - "normalize_timings", - # interfaces - "HarStorageRepository", - "HarParser", - "Processor", - "Transformer", - "EntryIdFn", - # models - "Entry", - "HarLog", - "Request", - "Response", - "Timings", - "Browser", - "Content", - "Cookie", - "Creator", - "Header", - "Page", - "PageTimings", - "PostData", - "PostParam", - "QueryString", - # extensions - "DevToolsEntry", + # Transforming + "transform", ] diff --git a/src/hario_core/interfaces.py b/src/hario_core/interfaces.py deleted file mode 100644 index d0229e7..0000000 --- a/src/hario_core/interfaces.py +++ /dev/null @@ -1,90 +0,0 @@ -from __future__ import annotations - -from typing import Any, Dict, List, Optional, Protocol, Tuple - -from hario_core.models.har_1_2 import Entry, HarLog - -""" -Type protocols and interfaces for hario-core. - -- Defines Protocols for HAR entries, enrichers, ID generators, and storage repositories. -- Enables type-safe extensibility and plug-in architecture. -- Used for static type checking and as contracts for core logic. -""" - -__all__ = [ - "HarParser", - "Processor", - "Transformer", - "EntryIdFn", - "HarStorageRepository", -] - - -class HarParser(Protocol): - """Protocol for a function that parses HAR data from a source.""" - - def __call__(self, src: Any) -> HarLog: - """Parses HAR data from a source.""" - ... - - -class Processor(Protocol): - """ - Protocol for a processor that can be called with a - source and returns a list of dicts. - """ - - def process(self, src: Any) -> List[Dict[str, Any]]: - """Processes the source and returns a list of dicts.""" - ... - - -class Transformer(Protocol): - """Protocol for a transformer that can be called with a dict.""" - - def __call__(self, entry: Entry) -> Dict[str, Any]: - """ - Transforms the dict in-place. - User can mutate/add/remove fields as needed. - """ - ... - - -class EntryIdFn(Protocol): - """Protocol for a function that generates an ID for a HAR entry.""" - - def __call__(self, entry: Entry) -> str: ... - - -class HarStorageRepository(Protocol): - """Interface (Protocol) for a repository that stores HAR data.""" - - def save(self, har_data: Dict[str, Any]) -> None: - """Saves a single HAR document.""" - ... - - def save_many(self, har_data: List[Dict[str, Any]]) -> Tuple[int, int, List[str]]: - """ - Saves multiple HAR documents in bulk. - Returns a tuple of (success_count, failure_count, errors). - """ - ... - - def get_by_id(self, doc_id: str) -> Dict[str, Any]: - """Retrieves a document by its ID.""" - ... - - def find_all(self, query: Optional[Dict[str, Any]] = None) -> List[Dict[str, Any]]: - """Finds all documents, optionally matching a query.""" - ... - - def wait_for_connection( - self, max_retries: int = 10, delay: int = 10 - ) -> Tuple[bool, str, str]: - """Waits for the storage to become available.""" - ... - - def create_index_if_not_exists(self, mapping: Dict[str, Any]) -> None: - """Creates the necessary index/table if it doesn't exist.""" - ... diff --git a/src/hario_core/models/__init__.py b/src/hario_core/models/__init__.py index e69de29..29f876a 100644 --- a/src/hario_core/models/__init__.py +++ b/src/hario_core/models/__init__.py @@ -0,0 +1,8 @@ +from .extensions.chrome_devtools import DevToolsEntry +from .har_1_2 import Entry, HarLog + +__all__ = [ + "Entry", + "HarLog", + "DevToolsEntry", +] diff --git a/src/hario_core/models/extensions/chrome_devtools.py b/src/hario_core/models/extensions/chrome_devtools.py index 036ad6a..67e3a0b 100644 --- a/src/hario_core/models/extensions/chrome_devtools.py +++ b/src/hario_core/models/extensions/chrome_devtools.py @@ -58,6 +58,13 @@ class DevToolsRequest(Request): requestId: Optional[str] = Field(None, alias="_requestId") +class DevToolsWebSocketMessage(BaseModel): + type: str + time: float + opcode: int + data: str + + class DevToolsEntry(Entry): """HAR Entry object with DevTools extensions.""" @@ -65,6 +72,9 @@ class DevToolsEntry(Entry): priority: Optional[str] = Field(None, alias="_priority") resourceType: str = Field(alias="_resourceType") connectionId: Optional[str] = Field(None, alias="_connectionId") + webSocketMessages: Optional[List[DevToolsWebSocketMessage]] = Field( + None, alias="_webSocketMessages" + ) # Override fields from base Entry to use the extended models request: DevToolsRequest diff --git a/src/hario_core/parse/__init__.py b/src/hario_core/parse/__init__.py new file mode 100644 index 0000000..438b86a --- /dev/null +++ b/src/hario_core/parse/__init__.py @@ -0,0 +1,14 @@ +from .har_parser import entry_selector, parse, register_entry_model, validate +from .interfaces import HarParser, JsonSource + +__all__ = [ + # Parsers and validators + "parse", + "validate", + # Utils + "register_entry_model", + "entry_selector", + # Interfaces + "HarParser", + "JsonSource", +] diff --git a/src/hario_core/har_parser.py b/src/hario_core/parse/har_parser.py similarity index 62% rename from src/hario_core/har_parser.py rename to src/hario_core/parse/har_parser.py index b1f55e1..de7fe08 100644 --- a/src/hario_core/har_parser.py +++ b/src/hario_core/parse/har_parser.py @@ -6,14 +6,15 @@ - Handles both standard HAR and Chrome DevTools extensions out of the box. """ -import json from pathlib import Path -from typing import IO, Any, Callable, Union, cast +from typing import Any, Callable, Dict, cast +import orjson from pydantic import ValidationError from hario_core.models.extensions.chrome_devtools import DevToolsEntry from hario_core.models.har_1_2 import Entry, HarLog +from hario_core.parse.interfaces import JsonSource # The registry for custom Entry models. # It's a list of (detector_function, model_class) tuples. @@ -36,21 +37,6 @@ def register_entry_model( ENTRY_MODEL_REGISTRY.insert(0, (detector, model)) -def entry_selector(entry_json: dict[str, Any]) -> type[Entry]: - """Selects an Entry model by checking the registry. - - It iterates through the registered detectors and returns the first model - that matches. If no custom model matches, it returns the base Entry model. - """ - for detector, model in ENTRY_MODEL_REGISTRY: - if detector(entry_json): - return model - return Entry # Default model - - -# --- Default registrations --- - - def is_devtools_entry(entry_json: dict[str, Any]) -> bool: """Detects if an entry is from Chrome DevTools by checking for keys starting with an underscore. @@ -58,27 +44,39 @@ def is_devtools_entry(entry_json: dict[str, Any]) -> bool: return any(key.startswith("_") for key in entry_json) +# --- Default registrations --- # Register the built-in DevTools extension register_entry_model(is_devtools_entry, DevToolsEntry) -JsonSource = Union[str, Path, bytes, bytearray, IO[Any]] +def entry_selector(entry_json: dict[str, Any]) -> type[Entry]: + """Selects an Entry model by checking the registry. + + It iterates through the registered detectors and returns the first model + that matches. If no custom model matches, it returns the base Entry model. + """ + for detector, model in ENTRY_MODEL_REGISTRY: + if detector(entry_json): + return model + return Entry # Default model -def _read_json(src: JsonSource) -> dict[str, Any]: +def _to_bytes(src: JsonSource) -> bytes: if isinstance(src, (str, Path)): with open(src, "rb") as fh: - return cast(dict[str, Any], json.load(fh)) + return fh.read() if isinstance(src, (bytes, bytearray)): - return cast(dict[str, Any], json.loads(src)) - # assume file‑like - return cast(dict[str, Any], json.load(src)) + return cast(bytes, src) + return cast(bytes, src.read()) + + +def _read_json(src: JsonSource) -> dict[str, Any]: + return cast(dict[str, Any], orjson.loads(_to_bytes(src))) def parse( src: JsonSource, - *, - entry_model_selector: Callable[[dict[str, Any]], type[Entry]] = entry_selector, + *args: Any, ) -> HarLog: """Parse *src* into a validated `HarLog` instance. @@ -91,18 +89,39 @@ def parse( data = _read_json(src) if not isinstance(data, dict): raise ValueError("Invalid HAR file: root element must be a JSON object") - log_data = data["log"] - raw_entries = log_data["entries"] + return validate(data) + except (KeyError, ValidationError, orjson.JSONDecodeError) as exc: + raise ValueError("Invalid HAR file") from exc - # Validate entries one by one using the selector - validated_entries = [ - entry_model_selector(entry).model_validate(entry) for entry in raw_entries - ] - # Replace raw entries with validated models - log_data["entries"] = validated_entries +def validate(har_dict: Dict[str, Any]) -> HarLog: + """ + Validate HAR-structure (dict) with support for extensions. + All entries are validated by one model, defined by the first entry. + Returns HarLog with Entry/DevToolsEntry. - # Validate the entire HarLog object at once - return HarLog.model_validate(log_data) - except (KeyError, ValidationError, json.JSONDecodeError) as exc: - raise ValueError("Invalid HAR file") from exc + Args: + har_dict: dict + The HAR-structure to validate. + + Returns: + HarLog + """ + if "log" not in har_dict: + raise ValueError("Invalid HAR file: missing 'log'") + if not isinstance(har_dict["log"], dict): + raise ValueError("Invalid HAR file: 'log' must be a dict") + if "entries" not in har_dict["log"]: + raise ValueError("Invalid HAR file: missing 'entries' in 'log'") + if not isinstance(har_dict["log"]["entries"], list): + raise ValueError("Invalid HAR file: 'entries' must be a list") + entries = har_dict["log"]["entries"] + if not entries: + log_copy = dict(har_dict["log"]) + log_copy["entries"] = [] + return HarLog.model_validate(log_copy) + model_cls = entry_selector(entries[0]) + validated_entries = [model_cls.model_validate(entry) for entry in entries] + log_copy = dict(har_dict["log"]) + log_copy["entries"] = validated_entries + return HarLog.model_validate(log_copy) diff --git a/src/hario_core/parse/interfaces.py b/src/hario_core/parse/interfaces.py new file mode 100644 index 0000000..87dde0b --- /dev/null +++ b/src/hario_core/parse/interfaces.py @@ -0,0 +1,16 @@ +from __future__ import annotations + +from pathlib import Path +from typing import IO, Any, Protocol, Union + +from hario_core.models.har_1_2 import HarLog + +JsonSource = Union[str, Path, bytes, bytearray, IO[Any]] + + +class HarParser(Protocol): + """Protocol for a function that parses HAR data from a source.""" + + def __call__(self, src: Any) -> HarLog: + """Parses HAR data from a source.""" + ... diff --git a/src/hario_core/pipeline.py b/src/hario_core/pipeline.py deleted file mode 100644 index 4044208..0000000 --- a/src/hario_core/pipeline.py +++ /dev/null @@ -1,52 +0,0 @@ -from typing import Any, Sequence - -from hario_core.models.har_1_2 import HarLog -from hario_core.utils.id import EntryIdFn -from hario_core.utils.transform import Transformer - -__all__ = ["Pipeline"] - - -class Pipeline: - """ - Pipeline for processing HAR data (HarLog, Pydantic model). - - Args: - id_fn: EntryIdFn - A function that generates an ID for an entry. - id_field: str - The field name to store the generated ID. - Defaults to "id". - transformers: Sequence[Transformer] - A sequence of transformers to apply to HAR entries. - Defaults to an empty sequence. - """ - - def __init__( - self, - id_fn: EntryIdFn, - id_field: str = "id", - transformers: Sequence[Transformer] = (), - ): - self.id_fn = id_fn - self.transformers = list(transformers) - self.id_field = id_field - - def process(self, har_log: HarLog) -> list[dict[str, Any]]: - """ - Process a HarLog object (already parsed HAR data, Pydantic model). - Returns a list of transformed dicts with assigned IDs. - """ - if not hasattr(har_log, "entries") or not isinstance(har_log.entries, list): - raise TypeError( - "Pipeline.process expects a HarLog (Pydantic model with .entries)" - ) - results = [] - for entry in har_log.entries: - entry_dict = entry.model_dump() - for transform in self.transformers: - entry_dict = transform(entry) - id = self.id_fn(entry) - entry_dict[self.id_field] = id - results.append(entry_dict) - return results diff --git a/src/hario_core/transform/__init__.py b/src/hario_core/transform/__init__.py new file mode 100644 index 0000000..64bce53 --- /dev/null +++ b/src/hario_core/transform/__init__.py @@ -0,0 +1,22 @@ +from .defaults import by_field, json_array_handler, uuid +from .interfaces import Processor, ProcessorConfig, Transformer +from .pipeline import Pipeline, PipelineConfig +from .transform import flatten, normalize_sizes, normalize_timings, set_id + +__all__ = [ + "Pipeline", + # Transformers + "flatten", + "normalize_sizes", + "normalize_timings", + "set_id", + # Utils + "by_field", + "uuid", + "json_array_handler", + "PipelineConfig", + # Interfaces + "Transformer", + "Processor", + "ProcessorConfig", +] diff --git a/src/hario_core/transform/defaults.py b/src/hario_core/transform/defaults.py new file mode 100644 index 0000000..cbe963f --- /dev/null +++ b/src/hario_core/transform/defaults.py @@ -0,0 +1,61 @@ +import hashlib +import uuid as uuid_lib +from datetime import datetime +from typing import Any, Dict, cast + +import orjson + + +class ByField: + """ + A class that generates a deterministic ID based on + the specified fields of an entry dictionary. + """ + + def __init__(self, fields: list[str]): + self.fields = fields + + def get_field_value(self, entry: Dict[str, Any], field_path: str) -> str: + value: Any = entry + for part in field_path.split("."): + if not isinstance(value, dict): + raise ValueError(f"Field '{field_path}' is not a dictionary") + value = value[part] + if value is None: + raise ValueError(f"Field '{field_path}' is None") + + # Special handling for datetime + if isinstance(value, datetime): + # Format in ISO 8601 with Z at the end + return value.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z" + return str(value) + + def __call__(self, entry: Dict[str, Any]) -> str: + raw_id_parts = [self.get_field_value(entry, field) for field in self.fields] + raw_id = ":".join(raw_id_parts).encode() + return hashlib.blake2b(raw_id, digest_size=16).hexdigest() + + +def by_field(fields: list[str]) -> ByField: + return ByField(fields) + + +class UUID: + def __call__(self, entry: Dict[str, Any]) -> str: + """ + Returns a function that generates a UUID for an entry. + """ + return str(uuid_lib.uuid4()) + + +def uuid() -> UUID: + return UUID() + + +def json_array_handler(arr: list[Any], path: str) -> str: + """ + JSON array handler that returns a compact JSON string. + """ + if not arr: + return "[]" + return cast(str, orjson.dumps(arr).decode("utf-8")) diff --git a/src/hario_core/transform/interfaces.py b/src/hario_core/transform/interfaces.py new file mode 100644 index 0000000..0c5d3e5 --- /dev/null +++ b/src/hario_core/transform/interfaces.py @@ -0,0 +1,38 @@ +from __future__ import annotations + +from typing import Any, Dict, List, Optional, Protocol, runtime_checkable + + +class Processor(Protocol): + """ + Protocol for a processor that can be called with a + source and returns a list of dicts. + """ + + def process(self, entries: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + """Processes the source and returns a list of dicts.""" + ... + + +class ProcessorConfig(Protocol): + """Protocol for a processor configuration.""" + + batch_size: int + processing_strategy: str + max_workers: Optional[int] + + +@runtime_checkable +class Transformer(Protocol): + """Protocol for transformers that process HAR entries.""" + + def __call__(self, data: Dict[str, Any]) -> Dict[str, Any]: + """Transform the data. + + Args: + data: The data to transform. + + Returns: + The transformed data. + """ + ... diff --git a/src/hario_core/transform/pipeline.py b/src/hario_core/transform/pipeline.py new file mode 100644 index 0000000..900f5b6 --- /dev/null +++ b/src/hario_core/transform/pipeline.py @@ -0,0 +1,79 @@ +from __future__ import annotations + +from dataclasses import dataclass +from typing import Any, Optional, Sequence + +from hario_core.transform.interfaces import Processor, ProcessorConfig, Transformer +from hario_core.transform.strategies import ( + AsyncStrategy, + ProcessingStrategy, + ProcessPoolStrategy, + SequentialStrategy, + ThreadPoolStrategy, +) + + +def _chunked(seq: list[Any], size: int) -> list[list[Any]]: + return [seq[i : i + size] for i in range(0, len(seq), size)] + + +@dataclass +class PipelineConfig(ProcessorConfig): + batch_size: int = 20000 + processing_strategy: str = "sequential" + max_workers: Optional[int] = None + + +DEFAULT_PIPELINE_CONFIG = PipelineConfig() + + +class Pipeline(Processor): + """ + Pipeline for processing HAR data (HarLog, Pydantic model). + Uses threading for parallel transformation. + + Args: + transformers: Sequence[Transformer] + A sequence of transformers to apply to HAR entries. + Defaults to an empty sequence. + config: PipelineConfig + Configuration object with batch_size, processing_strategy, max_workers. + Если не передан, используется DEFAULT_PIPELINE_CONFIG. + """ + + def __init__( + self, + transformers: Sequence[Transformer] = (), + config: PipelineConfig = DEFAULT_PIPELINE_CONFIG, + ): + self.transformers = list(transformers) + self.config = config + self.batch_size = self.config.batch_size + self.strategy = self._get_strategy( + self.config.processing_strategy, self.config.max_workers + ) + + def _get_strategy( + self, strategy_name: str, max_workers: Optional[int] + ) -> ProcessingStrategy: + strategies = { + "process": ProcessPoolStrategy(max_workers), + "thread": ThreadPoolStrategy(max_workers), + "sequential": SequentialStrategy(), + "async": AsyncStrategy(), + } + return strategies.get(strategy_name, ProcessPoolStrategy(max_workers)) + + def process(self, entries: list[dict[str, Any]]) -> list[dict[str, Any]]: + """ + Process a list of HAR entry dicts (model_dump'ed entries). + Returns a list of transformed dicts with assigned IDs. + """ + if not isinstance(entries, list) or ( + entries and not isinstance(entries[0], dict) + ): + raise TypeError( + "Pipeline.process expects a list of dicts (model_dump'ed entries)" + ) + batches = _chunked(entries, self.batch_size) + return self.strategy.process_batches(batches, self.transformers) diff --git a/src/hario_core/transform/strategies.py b/src/hario_core/transform/strategies.py new file mode 100644 index 0000000..7ef2459 --- /dev/null +++ b/src/hario_core/transform/strategies.py @@ -0,0 +1,110 @@ +from abc import ABC, abstractmethod +from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor, as_completed +from typing import Any, Dict, List, Optional + +from hario_core.transform.interfaces import Transformer +from hario_core.transform.worker import init_worker, process_batch + + +class ProcessingStrategy(ABC): + """ + Abstract base class for processing strategies. + + Args: + batches: List[List[Dict[str, Any]]] + A list of batches of HAR entries to process. + transformers: List[Transformer] + A list of transformers to apply to the HAR entries. + + Returns: + List[Dict[str, Any]] + """ + + @abstractmethod + def process_batches( + self, batches: List[List[Dict[str, Any]]], transformers: List[Transformer] + ) -> List[Dict[str, Any]]: + pass + + +class ProcessPoolStrategy(ProcessingStrategy): + """ + Processing strategy that uses a ProcessPoolExecutor to process batches in parallel. + """ + + def __init__(self, max_workers: Optional[int] = None): + self.max_workers = max_workers + + def process_batches( + self, batches: List[List[Dict[str, Any]]], transformers: List[Transformer] + ) -> List[Dict[str, Any]]: + results: List[Dict[str, Any]] = [] + with ProcessPoolExecutor( + max_workers=self.max_workers, + initializer=init_worker, + initargs=(transformers,), + ) as executor: + futures = [ + executor.submit(process_batch, batch, transformers) for batch in batches + ] + for future in as_completed(futures): + results.extend(future.result()) + return results + + +class ThreadPoolStrategy(ProcessingStrategy): + """ + Processing strategy that uses a ThreadPoolExecutor to process batches in parallel. + """ + + def __init__(self, max_workers: Optional[int] = None): + self.max_workers = max_workers + + def process_batches( + self, batches: List[List[Dict[str, Any]]], transformers: List[Transformer] + ) -> List[Dict[str, Any]]: + results: List[Dict[str, Any]] = [] + with ThreadPoolExecutor(max_workers=self.max_workers) as executor: + futures = [ + executor.submit(process_batch, batch, transformers) for batch in batches + ] + for future in as_completed(futures): + results.extend(future.result()) + return results + + +class SequentialStrategy(ProcessingStrategy): + """ + Processing strategy that processes batches sequentially. + """ + + def process_batches( + self, batches: List[List[Dict[str, Any]]], transformers: List[Transformer] + ) -> List[Dict[str, Any]]: + results: List[Dict[str, Any]] = [] + for batch in batches: + results.extend(process_batch(batch, transformers)) + return results + + +class AsyncStrategy(ProcessingStrategy): + """ + Processing strategy that processes batches asynchronously. + """ + + def process_batches( + self, batches: List[List[Dict[str, Any]]], transformers: List[Transformer] + ) -> List[Dict[str, Any]]: + import asyncio + + async def process_batch_async( + batch: List[Dict[str, Any]] + ) -> List[Dict[str, Any]]: + return process_batch(batch, transformers) + + async def process_all() -> List[List[Dict[str, Any]]]: + tasks = [process_batch_async(batch) for batch in batches] + return await asyncio.gather(*tasks) + + results = asyncio.run(process_all()) + return [item for sublist in results for item in sublist] diff --git a/src/hario_core/transform/transform.py b/src/hario_core/transform/transform.py new file mode 100644 index 0000000..23228ad --- /dev/null +++ b/src/hario_core/transform/transform.py @@ -0,0 +1,130 @@ +""" +Transformation logic for HAR data. +This module provides a set of functions that can be used to transform HAR data. +""" + +from typing import Any, Callable, Dict, Optional + +from hario_core.transform.defaults import json_array_handler +from hario_core.transform.interfaces import Transformer + + +class NormalizeSizes: + """ + A transformer that normalizes the sizes of the request and response. + """ + + def __call__(self, data: Dict[str, Any]) -> Dict[str, Any]: + for path in [ + ("request", "headersSize"), + ("request", "bodySize"), + ("response", "headersSize"), + ("response", "bodySize"), + ("response", "content", "size"), + ]: + parent = data + for key in path[:-1]: + parent = parent.get(key, {}) + last = path[-1] + if last in parent and isinstance(parent[last], int) and parent[last] < 0: + parent[last] = 0 + return data + + +def normalize_sizes() -> Transformer: + return NormalizeSizes() + + +class NormalizeTimings: + """ + A transformer that normalizes the timings of the request. + """ + + def __call__(self, data: Dict[str, Any]) -> Dict[str, Any]: + timing_fields = [ + ("timings", "blocked"), + ("timings", "dns"), + ("timings", "connect"), + ("timings", "send"), + ("timings", "wait"), + ("timings", "receive"), + ("timings", "ssl"), + ] + for path in timing_fields: + parent = data + for key in path[:-1]: + parent = parent.get(key, {}) + last = path[-1] + if ( + isinstance(parent, dict) + and last in parent + and isinstance(parent[last], (int, float)) + and parent[last] < 0 + ): + parent[last] = 0.0 + return data + + +def normalize_timings() -> Transformer: + return NormalizeTimings() + + +class Flatten(Transformer): + """ + A transformer that flattens the nested structure of the HAR data. + """ + + def __init__( + self, + separator: str = ".", + array_handler: Optional[Callable[[list[Any], str], Any]] = None, + ): + self.separator = separator + self.array_handler = array_handler or json_array_handler + + def _flatten( + self, obj: Any, parent_key: str = "", result: Optional[Dict[str, Any]] = None + ) -> Dict[str, Any]: + if result is None: + result = {} + if isinstance(obj, dict): + for k, v in obj.items(): + new_key = f"{parent_key}{self.separator}{k}" if parent_key else k + self._flatten(v, new_key, result) + elif isinstance(obj, list): + value = self.array_handler(obj, parent_key) + if isinstance(value, dict): + result.update(value) + else: + result[parent_key] = value + else: + result[parent_key] = obj + return result + + def __call__(self, doc: Dict[str, Any]) -> Dict[str, Any]: + return self._flatten(doc) + + +def flatten( + separator: str = ".", + array_handler: Optional[Callable[[list[Any], str], Any]] = None, +) -> Transformer: + return Flatten(separator, array_handler or json_array_handler) + + +class SetId: + """ + A transformer that sets the ID of the HAR data. + """ + + def __init__(self, id_fn: Callable[[Dict[str, Any]], str], id_field: str = "id"): + self.id_fn = id_fn + self.id_field = id_field + + def __call__(self, data: Dict[str, Any]) -> Dict[str, Any]: + data[self.id_field] = self.id_fn(data) + return data + + +def set_id(id_fn: Callable[[Dict[str, Any]], str], id_field: str = "id") -> Transformer: + return SetId(id_fn, id_field) diff --git a/src/hario_core/transform/worker.py b/src/hario_core/transform/worker.py new file mode 100644 index 0000000..d7f3a74 --- /dev/null +++ b/src/hario_core/transform/worker.py @@ -0,0 +1,46 @@ +from typing import Any, Dict, List + +from hario_core.transform.interfaces import Transformer + +_transformers: List[Transformer] = [] + + +def init_worker(transformers: List[Transformer]) -> None: + """ + Initialize the worker with the provided transformers. + + Args: + transformers: List of transformers to apply + """ + global _transformers + _transformers = transformers + + +def process_entry(entry_dict: Dict[str, Any]) -> Dict[str, Any]: + """ + Process an entry dictionary using the provided transformers. + + Args: + entry_dict: Dictionary representing an entry + """ + for transform in _transformers: + entry_dict = transform(entry_dict) + return entry_dict + + +def process_batch( + batch: List[Dict[str, Any]], transformers: List[Transformer] +) -> List[Dict[str, Any]]: + """ + Process a batch of entries using the provided transformers. + + Args: + batch: List of entry dictionaries to process + transformers: List of transformers to apply + + Returns: + List of processed entry dictionaries + """ + global _transformers + _transformers = transformers + return [process_entry(entry) for entry in batch] diff --git a/src/hario_core/utils/id.py b/src/hario_core/utils/id.py deleted file mode 100644 index e22ccf5..0000000 --- a/src/hario_core/utils/id.py +++ /dev/null @@ -1,37 +0,0 @@ -import hashlib -import uuid as uuid_lib -from typing import Any - -from hario_core.interfaces import EntryIdFn -from hario_core.models.har_1_2 import Entry - -__all__ = ["by_field", "uuid", "EntryIdFn"] - - -def by_field(fields: list[str]) -> EntryIdFn: - """ - Returns a lambda that generates a deterministic ID - based on the specified fields of an entry. - """ - - def get_field_value(entry: Entry, field_path: str) -> str: - value: Any = entry - for part in field_path.split("."): - value = getattr(value, part, None) - if value is None: - raise AttributeError(f"Field '{field_path}' not found in entry") - return str(value) - - def id_func(entry: Entry) -> str: - raw_id_parts = [get_field_value(entry, field) for field in fields] - raw_id = ":".join(raw_id_parts).encode() - return hashlib.blake2b(raw_id, digest_size=16).hexdigest() - - return id_func - - -def uuid() -> EntryIdFn: - """ - Returns a lambda that generates a UUID for an entry. - """ - return lambda entry: str(uuid_lib.uuid4()) diff --git a/src/hario_core/utils/path.py b/src/hario_core/utils/path.py deleted file mode 100644 index 4e298d2..0000000 --- a/src/hario_core/utils/path.py +++ /dev/null @@ -1,36 +0,0 @@ -import re -from typing import Any, Tuple, Union - - -def parse_path(path: str) -> Tuple[Union[str, int], ...]: - """ - Convert dot-path like 'foo.bar[0].baz' to tuple ('foo', 'bar', 0, 'baz'). - """ - parts = [] - for part in path.split("."): - m = re.match(r"(\w+)(\[(\d+)\])?", part) - if m: - parts.append(m.group(1)) - if m.group(3): - parts.append(int(m.group(3))) - return tuple(parts) - - -def get_or_create_by_path(obj: Any, path: Tuple[Union[str, int], ...]) -> Any: - """ - Walks the structure by path, creating dicts/lists as needed. - Returns the parent container for the last element in path. - """ - parent = obj - for key in path: - if isinstance(parent, list) and isinstance(key, int): - while len(parent) <= key: - parent.append({}) - parent = parent[key] - elif isinstance(parent, dict) and isinstance(key, str): - if key not in parent or parent[key] is None: - parent[key] = {} - parent = parent[key] - else: - raise TypeError(f"Invalid path: {path}") - return parent diff --git a/src/hario_core/utils/transform.py b/src/hario_core/utils/transform.py deleted file mode 100644 index 2214107..0000000 --- a/src/hario_core/utils/transform.py +++ /dev/null @@ -1,117 +0,0 @@ -""" -Transformation logic for HAR data. -This module provides a set of functions that can be used to transform HAR data. -""" - -import json -from typing import Any, Dict, Protocol - -from hario_core.models.har_1_2 import Entry - -__all__ = [ - "flatten", - "normalize_sizes", - "normalize_timings", -] - - -class Transformer(Protocol): - def __call__(self, entry: Entry) -> Dict[str, Any]: ... - - -def flatten(max_depth: int = 3, size_limit: int = 32_000) -> Transformer: - """ - Flattens the HAR data into a single level. - This is useful for storing HAR data in a database. - - Args: - max_depth: The maximum depth of the nested data to flatten. - size_limit: The maximum size (in bytes) of the nested data to flatten. - """ - - def transformer(entry: Entry) -> Dict[str, Any]: - doc = entry.model_dump() - - def _should_stringify(name: str, value: Any, depth: int) -> bool: - if not isinstance(value, (dict, list)): - return False - if ( - isinstance(value, list) - and len(json.dumps(value, separators=(",", ":"))) > size_limit - ): - return True - if depth >= max_depth: - return True - return False - - result = doc.copy() - queue = [(result, k, k, 1) for k in list(result.keys())] - while queue: - parent, key, path, depth = queue.pop(0) - value = parent[key] - if _should_stringify(path, value, depth): - parent[key] = json.dumps(value, ensure_ascii=False) - continue - if isinstance(value, dict): - for child_key in list(value.keys()): - queue.append((value, child_key, f"{path}.{child_key}", depth + 1)) - elif isinstance(value, list) and value and isinstance(value[0], dict): - for i, item in enumerate(value): - if isinstance(item, dict): - for child_key in list(item.keys()): - queue.append( - (item, child_key, f"{path}[{i}].{child_key}", depth + 1) - ) - return result - - return transformer - - -def normalize_sizes() -> Transformer: - def transformer(entry: Entry) -> Dict[str, Any]: - data = entry.model_dump() - for path in [ - ("request", "headersSize"), - ("request", "bodySize"), - ("response", "headersSize"), - ("response", "bodySize"), - ("response", "content", "size"), - ]: - parent = data - for key in path[:-1]: - parent = parent.get(key, {}) - last = path[-1] - if last in parent and isinstance(parent[last], int) and parent[last] < 0: - parent[last] = 0 - return data - - return transformer - - -def normalize_timings() -> Transformer: - def transformer(entry: Entry) -> Dict[str, Any]: - data = entry.model_dump() - timing_fields = [ - ("timings", "blocked"), - ("timings", "dns"), - ("timings", "connect"), - ("timings", "send"), - ("timings", "wait"), - ("timings", "receive"), - ("timings", "ssl"), - ] - for path in timing_fields: - parent = data - for key in path[:-1]: - parent = parent.get(key, {}) - last = path[-1] - if ( - isinstance(parent, dict) - and last in parent - and isinstance(parent[last], (int, float)) - and parent[last] < 0 - ): - parent[last] = 0.0 - return data - - return transformer diff --git a/tests/conftest.py b/tests/conftest.py index 28b0179..2c7279e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,10 +1,11 @@ """Pytest fixtures for HARP.""" -from typing import Any, Dict, cast +from typing import Any, Dict, List, cast import pytest -from hario_core.models.har_1_2 import Entry, HarLog +from hario_core.models import DevToolsEntry, Entry, HarLog +from hario_core.parse import validate from .samples import CHROME_DEVTOOLS_HAR, CLEANED_HAR @@ -31,7 +32,7 @@ def cleaned_entry() -> Dict[str, Any]: @pytest.fixture def chrome_devtools_entry_model() -> Entry: - return Entry.model_validate(CHROME_DEVTOOLS_HAR["log"]["entries"][0]) + return DevToolsEntry.model_validate(CHROME_DEVTOOLS_HAR["log"]["entries"][0]) @pytest.fixture @@ -39,11 +40,26 @@ def cleaned_entry_model() -> Entry: return Entry.model_validate(CLEANED_HAR["log"]["entries"][0]) +@pytest.fixture +def cleaned_log() -> HarLog: + return validate(CLEANED_HAR) + + @pytest.fixture def chrome_devtools_log() -> HarLog: - return HarLog.model_validate(CHROME_DEVTOOLS_HAR["log"]) + return validate(CHROME_DEVTOOLS_HAR) @pytest.fixture -def cleaned_log() -> HarLog: - return HarLog.model_validate(CLEANED_HAR["log"]) +def cleaned_entries(cleaned_log: HarLog) -> List[Dict[str, Any]]: + return cast(List[Dict[str, Any]], cleaned_log.model_dump()["entries"]) + + +@pytest.fixture +def chrome_devtools_entries(chrome_devtools_log: HarLog) -> List[Dict[str, Any]]: + return cast(List[Dict[str, Any]], chrome_devtools_log.model_dump()["entries"]) + + +@pytest.fixture +def entries_fixture(request: Any) -> Any: + return request.getfixturevalue(request.param) diff --git a/tests/samples.py b/tests/samples.py index 7e1efbb..8e63888 100644 --- a/tests/samples.py +++ b/tests/samples.py @@ -1,8 +1,9 @@ """Sample HAR data for testing purposes.""" -import json from typing import Any, Dict, List +import orjson + # Общий массив pages для обоих сэмплов PAGES: List[Dict[str, Any]] = [ { @@ -155,7 +156,7 @@ } } -CHROME_DEVTOOLS_HAR_BYTES: bytes = json.dumps(CHROME_DEVTOOLS_HAR).encode("utf-8") +CHROME_DEVTOOLS_HAR_BYTES: bytes = orjson.dumps(CHROME_DEVTOOLS_HAR) # Valid HAR 1.2 CLEANED_HAR: Dict[str, Any] = { @@ -271,7 +272,7 @@ } } -CLEANED_HAR_BYTES: bytes = json.dumps(CLEANED_HAR).encode("utf-8") +CLEANED_HAR_BYTES: bytes = orjson.dumps(CLEANED_HAR) # Edge-case: HAR without log field (based on real HAR) INVALID_HAR_NO_LOG: Dict[str, Any] = { diff --git a/tests/test_defaults.py b/tests/test_defaults.py new file mode 100644 index 0000000..f68bb70 --- /dev/null +++ b/tests/test_defaults.py @@ -0,0 +1,91 @@ +from typing import Any, Dict + +import pytest + +from hario_core.transform import by_field, json_array_handler, uuid + + +class TestDefaults: + def test_by_field_deterministic(self, cleaned_entry: Dict[str, Any]) -> None: + id_fn = by_field(["request.url", "startedDateTime"]) + id1 = id_fn(cleaned_entry) + id2 = id_fn(cleaned_entry) + assert id1 == id2 + + def test_by_field_different_fields(self, cleaned_entry: Dict[str, Any]) -> None: + from copy import deepcopy + + data1 = deepcopy(cleaned_entry) + data2 = deepcopy(cleaned_entry) + data2["request"]["url"] = "http://other-url.com" + id_fn = by_field(["request.url", "startedDateTime"]) + id1 = id_fn(data1) + id2 = id_fn(data2) + assert id1 != id2 + + def test_by_field_nested_field(self, cleaned_entry: Dict[str, Any]) -> None: + id_fn = by_field(["response.content.mimeType"]) + id_val = id_fn(cleaned_entry) + assert isinstance(id_val, str) + assert len(id_val) == 32 + + def test_by_field_dict_and_attr_access(self, cleaned_entry: Dict[str, Any]) -> None: + orig_size = cleaned_entry["response"]["content"]["size"] + id_fn = by_field(["response.content.size"]) + id_val = id_fn(cleaned_entry) + assert id_val == id_fn(cleaned_entry) + assert cleaned_entry["response"]["content"]["size"] == orig_size + + def test_uuid_different(self, cleaned_entry: Dict[str, Any]) -> None: + id_fn = uuid() + id1 = id_fn(cleaned_entry) + id2 = id_fn(cleaned_entry) + assert id1 != id2 + + def test_by_field_missing_field(self, cleaned_entry: Dict[str, Any]) -> None: + id_fn = by_field(["request.nonexistent"]) + with pytest.raises(KeyError): + id_fn(cleaned_entry) + + def test_by_field_not_dict_raises_value_error( + self, cleaned_entry: Dict[str, Any] + ) -> None: + # Make "url" a string, so the next step can't access "something" + id_fn = by_field(["request.url.something"]) + with pytest.raises( + ValueError, match="Field 'request.url.something' is not a dictionary" + ): + id_fn(cleaned_entry) + + def test_by_field_none_raises_value_error( + self, cleaned_entry: Dict[str, Any] + ) -> None: + # Make "url" a string, so the next step can't access "something" + cleaned_entry["request"]["nonexistent"] = None + id_fn = by_field(["request.nonexistent"]) + with pytest.raises(ValueError, match="Field 'request.nonexistent' is None"): + id_fn(cleaned_entry) + # remove the None + del cleaned_entry["request"]["nonexistent"] + + def test_uuid_unique(self, cleaned_entry: Dict[str, Any]) -> None: + uuid_fn = uuid() + ids = {uuid_fn(cleaned_entry) for _ in range(10)} + assert len(ids) == 10 + for val in ids: + assert isinstance(val, str) + assert len(val) == 36 + + def test_json_array_handler_empty(self) -> None: + assert json_array_handler([], "some.path") == "[]" + + def test_json_array_handler_numbers(self) -> None: + arr = [1, 2, 3] + result = json_array_handler(arr, "numbers") + assert result == "[1,2,3]" + + def test_json_array_handler_dicts(self) -> None: + arr = [{"a": 1}, {"b": 2}] + result = json_array_handler(arr, "dicts") + # orjson.dumps returns compact JSON without spaces + assert result == '[{"a":1},{"b":2}]' diff --git a/tests/test_har_parser.py b/tests/test_har_parser.py index 2d8b7f1..82c45c8 100644 --- a/tests/test_har_parser.py +++ b/tests/test_har_parser.py @@ -6,17 +6,23 @@ - Ensure correct error handling for invalid and edge-case inputs. """ +import io import json from pathlib import Path -from typing import Any, Dict +from typing import Any, Callable, Dict, Type from unittest.mock import patch +import orjson import pytest -from hario_core import har_parser -from hario_core.har_parser import entry_selector, parse, register_entry_model -from hario_core.models.extensions.chrome_devtools import DevToolsEntry -from hario_core.models.har_1_2 import Entry +from hario_core.models import DevToolsEntry, Entry, HarLog +from hario_core.parse import ( + entry_selector, + har_parser, + parse, + register_entry_model, + validate, +) from .samples import ( CHROME_DEVTOOLS_HAR, @@ -40,35 +46,71 @@ class SafariEntry(Entry): class TestHarParser: - def test_entry_selector_with_devtools(self) -> None: - """Tests that the DevTools model is correctly selected.""" - entry_json = CHROME_DEVTOOLS_HAR["log"]["entries"][0] - model = entry_selector(entry_json) - assert model is DevToolsEntry - - def test_entry_selector_with_clean_har(self) -> None: - """Tests that the default model is selected for a clean HAR.""" - entry_json = CLEANED_HAR["log"]["entries"][0] - model = entry_selector(entry_json) - assert model is Entry - - def test_load_har_with_bytes(self) -> None: - """Tests that a HAR file is correctly loaded from bytes.""" - har_log = parse(CHROME_DEVTOOLS_HAR_BYTES) - assert len(har_log.entries) == 1 - assert isinstance(har_log.entries[0], Entry) - - def test_load_har_with_clean_bytes(self) -> None: - """Tests loading a clean HAR from bytes.""" - har_log = parse(CLEANED_HAR_BYTES) + @pytest.mark.parametrize( + "har_input, loader, expected_type", + [ + ("cleaned_har", validate, Entry), + (CLEANED_HAR_BYTES, parse, Entry), + ("chrome_devtools_har", validate, DevToolsEntry), + (CHROME_DEVTOOLS_HAR_BYTES, parse, DevToolsEntry), + ], + ) + def test_load_har( + self, + request: Any, + har_input: Dict[str, Any], + loader: Callable[[Any], HarLog], + expected_type: Type[Entry], + ) -> None: + if isinstance(har_input, str): + har_input = request.getfixturevalue(har_input) + har_log = loader(har_input) + assert isinstance(har_log.entries, list) assert len(har_log.entries) == 1 - entry = har_log.entries[0] - assert isinstance(entry, Entry) - assert entry.request.url == "https://test.test/assets/css/f2aaccf1.css" + assert isinstance(har_log.entries[0], expected_type) + + @pytest.mark.parametrize( + "entry_fixture, expected_model", + [ + ("chrome_devtools_entry", DevToolsEntry), + ("cleaned_entry", Entry), + ], + ) + def test_entry_selector( + self, request: Any, entry_fixture: Dict[str, Any], expected_model: Type[Entry] + ) -> None: + entry_json = request.getfixturevalue(entry_fixture) + model = entry_selector(entry_json) + assert model is expected_model + + @pytest.mark.parametrize( + "har_dict, expected_type", + [ + ("cleaned_har", Entry), + ("chrome_devtools_har", DevToolsEntry), + ], + ) + def test_validate_with_dict( + self, request: Any, har_dict: Dict[str, Any], expected_type: Type[Entry] + ) -> None: + har_log = validate(request.getfixturevalue(har_dict)) + assert isinstance(har_log.entries[0], expected_type) + + @pytest.mark.parametrize( + "har_bytes, expected_type", + [ + (CLEANED_HAR_BYTES, Entry), + (CHROME_DEVTOOLS_HAR_BYTES, DevToolsEntry), + ], + ) + def test_load_har_with_bytes( + self, har_bytes: bytes, expected_type: Type[Entry] + ) -> None: + har_log = parse(har_bytes) + assert isinstance(har_log.entries[0], expected_type) def test_load_har_with_mixed_entries(self) -> None: - """Tests loading a HAR with mixed entry types.""" - har_log = parse(json.dumps(CHROME_DEVTOOLS_HAR).encode("utf-8")) + har_log = validate(CHROME_DEVTOOLS_HAR) assert len(har_log.entries) == 1 assert isinstance(har_log.entries[0], Entry) assert isinstance(har_log.entries[0], DevToolsEntry) @@ -76,7 +118,7 @@ def test_load_har_with_mixed_entries(self) -> None: assert har_log.entries[0].initiator is not None assert har_log.entries[0].initiator.type == "parser" - @patch("hario_core.har_parser.ENTRY_MODEL_REGISTRY", []) + @patch("hario_core.parse.har_parser.ENTRY_MODEL_REGISTRY", []) def test_register_entry_model(self) -> None: """Tests the registration of a custom entry model.""" @@ -107,7 +149,7 @@ def custom_detector(entry: Dict[str, Any]) -> bool: entry_data = dict(CHROME_DEVTOOLS_HAR["log"]["entries"][0]) # Test with a valid custom entry with patch( - "hario_core.har_parser.ENTRY_MODEL_REGISTRY", + "hario_core.parse.har_parser.ENTRY_MODEL_REGISTRY", [(custom_detector, CustomEntry)], ): model = entry_selector(entry_data) @@ -115,17 +157,10 @@ def custom_detector(entry: Dict[str, Any]) -> bool: # Test with a default entry when no custom model matches entry_data.pop("_connectionId") - with patch("hario_core.har_parser.ENTRY_MODEL_REGISTRY", []): + with patch("hario_core.parse.har_parser.ENTRY_MODEL_REGISTRY", []): model = entry_selector(entry_data) assert model is Entry - def test_load_har_with_dict(self) -> None: - """Tests loading a HAR from a dictionary by encoding it first.""" - result = parse(json.dumps(CLEANED_HAR).encode("utf-8")) - assert isinstance(result.entries, list) - assert len(result.entries) == 1 - assert isinstance(result.entries[0], Entry) - def test_load_har_from_file(self, tmp_path: Path) -> None: """Tests loading a HAR from a file path.""" file_path = tmp_path / "test.har" @@ -134,47 +169,74 @@ def test_load_har_from_file(self, tmp_path: Path) -> None: result = parse(file_path) assert isinstance(result.entries, list) - def test_load_har_with_invalid_json(self) -> None: + def test_parse_with_file_like(self, cleaned_har: Dict[str, Any]) -> None: + data = orjson.dumps(cleaned_har) + file_like = io.BytesIO(data) + har_log = parse(file_like) + assert hasattr(har_log, "entries") + assert isinstance(har_log.entries, list) + + @pytest.mark.parametrize( + "invalid_bytes", + [ + b"not a json", + orjson.dumps(INVALID_HAR_NO_LOG), + orjson.dumps(INVALID_HAR_NO_VERSION), + orjson.dumps(INVALID_HAR_NO_ENTRIES), + orjson.dumps(INVALID_HAR_LOG_EMPTY), + orjson.dumps(INVALID_HAR_LOG_WITH_VERSION_BUT_NO_ENTRIES), + orjson.dumps(INVALID_HAR_ROOT_NOT_DICT), + ], + ) + def test_load_har_invalid_cases(self, invalid_bytes: bytes) -> None: """Tests loading a HAR with invalid JSON content.""" - with pytest.raises(ValueError, match="Invalid HAR file"): - parse(b"not a json") - - def test_load_har_missing_log(self) -> None: - """Tests loading a HAR with a missing 'log' field.""" - with pytest.raises(ValueError, match="Invalid HAR file"): - parse(json.dumps(INVALID_HAR_NO_LOG).encode("utf-8")) + with pytest.raises(ValueError): + parse(invalid_bytes) def test_load_har_with_invalid_file_path(self) -> None: """Tests loading a HAR from a non-existent file path.""" with pytest.raises(FileNotFoundError): parse(Path("non_existent_file.har")) - def test_load_har_missing_version(self) -> None: - """Tests that a HAR file with a missing version raises an error.""" - with pytest.raises(ValueError, match="Invalid HAR file"): - parse(json.dumps(INVALID_HAR_NO_VERSION).encode("utf-8")) - - def test_load_har_missing_entries(self) -> None: - """Tests that a HAR file with a missing entries raises an error.""" - with pytest.raises(ValueError, match="Invalid HAR file"): - parse(json.dumps(INVALID_HAR_NO_ENTRIES).encode("utf-8")) - - def test_load_har_log_without_entries(self) -> None: - """Checks that ValueError is raised if 'log' has no 'entries'.""" - with pytest.raises(ValueError, match="Invalid HAR file"): - parse(json.dumps(INVALID_HAR_LOG_EMPTY).encode("utf-8")) - - def test_load_har_log_with_version_but_no_entries(self) -> None: - """ - Checks that ValueError is raised if 'log' has 'version' - and 'creator', but no 'entries'. - """ - with pytest.raises(ValueError, match="Invalid HAR file"): - parse( - json.dumps(INVALID_HAR_LOG_WITH_VERSION_BUT_NO_ENTRIES).encode("utf-8") - ) - - def test_load_har_root_not_dict(self) -> None: - """Checks that ValueError is raised if root element is not a dict.""" - with pytest.raises(ValueError, match="root element must be a JSON object"): - parse(json.dumps(INVALID_HAR_ROOT_NOT_DICT).encode("utf-8")) + def test_entry_selector_returns_default_entry( + self, cleaned_entry: Dict[str, Any] + ) -> None: + def never_true_detector(entry: Dict[str, Any]) -> bool: + return False + + register_entry_model(never_true_detector, DevToolsEntry) + model = entry_selector(cleaned_entry) + assert model is Entry + + def test_validate_empty_entries(self, cleaned_har: Dict[str, Any]) -> None: + har = dict(cleaned_har) + har["log"] = dict(har["log"]) + har["log"]["entries"] = [] + har_log = validate(har) + assert isinstance(har_log, HarLog) + assert har_log.entries == [] + + def test_validate_multiple_entries(self, cleaned_har: Dict[str, Any]) -> None: + har = dict(cleaned_har) + har["log"] = dict(har["log"]) + entry = har["log"]["entries"][0] + har["log"]["entries"] = [entry, entry] + har_log = validate(har) + assert isinstance(har_log, HarLog) + assert len(har_log.entries) == 2 + assert all(isinstance(e, Entry) for e in har_log.entries) + + def test_validate_with_invalid_log_structure( + self, cleaned_har: Dict[str, Any] + ) -> None: + har = dict(cleaned_har) + har["log"] = "not a dict" + with pytest.raises(ValueError): + validate(har) + + def test_validate_with_invalid_entries(self, cleaned_har: Dict[str, Any]) -> None: + har = dict(cleaned_har) + har["log"] = dict(har["log"]) + har["log"]["entries"] = dict() + with pytest.raises(ValueError): + validate(har) diff --git a/tests/test_id.py b/tests/test_id.py index 09da850..1f7745b 100644 --- a/tests/test_id.py +++ b/tests/test_id.py @@ -2,16 +2,14 @@ import pytest -from hario_core.models.har_1_2 import Entry -from hario_core.utils.id import by_field, uuid +from hario_core.transform import by_field, uuid class TestId: - def test_by_field_deterministic(self, cleaned_entry_model: Entry) -> None: + def test_by_field_deterministic(self, cleaned_entry: Dict[str, Any]) -> None: id_fn = by_field(["request.url", "startedDateTime"]) - id1 = id_fn(cleaned_entry_model) - id2 = id_fn(cleaned_entry_model) - assert isinstance(id1, str) + id1 = id_fn(cleaned_entry) + id2 = id_fn(cleaned_entry) assert id1 == id2 def test_by_field_different_fields(self, cleaned_entry: Dict[str, Any]) -> None: @@ -19,52 +17,61 @@ def test_by_field_different_fields(self, cleaned_entry: Dict[str, Any]) -> None: data1 = deepcopy(cleaned_entry) data2 = deepcopy(cleaned_entry) - entry1 = Entry.model_validate(data1) data2["request"]["url"] = "http://other-url.com" - entry2 = Entry.model_validate(data2) id_fn = by_field(["request.url", "startedDateTime"]) - id1 = id_fn(entry1) - id2 = id_fn(entry2) + id1 = id_fn(data1) + id2 = id_fn(data2) assert id1 != id2 - def test_by_field_nested_field( - self, cleaned_entry_model: Entry, cleaned_entry: Dict[str, Any] - ) -> None: + def test_by_field_nested_field(self, cleaned_entry: Dict[str, Any]) -> None: id_fn = by_field(["response.content.mimeType"]) - id_val = id_fn(cleaned_entry_model) + id_val = id_fn(cleaned_entry) assert isinstance(id_val, str) assert len(id_val) == 32 - from copy import deepcopy - data2 = deepcopy(cleaned_entry) - data2["response"]["content"]["mimeType"] = "other/type" - entry2 = Entry.model_validate(data2) - id_val2 = id_fn(entry2) - assert id_val != id_val2 + def test_by_field_dict_and_attr_access(self, cleaned_entry: Dict[str, Any]) -> None: + orig_size = cleaned_entry["response"]["content"]["size"] + id_fn = by_field(["response.content.size"]) + id_val = id_fn(cleaned_entry) + assert id_val == id_fn(cleaned_entry) + assert cleaned_entry["response"]["content"]["size"] == orig_size - def test_by_field_missing_field(self, cleaned_entry_model: Entry) -> None: + def test_uuid_different(self, cleaned_entry: Dict[str, Any]) -> None: + id_fn = uuid() + id1 = id_fn(cleaned_entry) + id2 = id_fn(cleaned_entry) + assert id1 != id2 + + def test_by_field_missing_field(self, cleaned_entry: Dict[str, Any]) -> None: id_fn = by_field(["request.nonexistent"]) - with pytest.raises(AttributeError): - id_fn(cleaned_entry_model) + with pytest.raises(KeyError): + id_fn(cleaned_entry) - def test_uuid_unique(self, cleaned_entry_model: Entry) -> None: + def test_uuid_unique(self, cleaned_entry: Dict[str, Any]) -> None: uuid_fn = uuid() - ids = {uuid_fn(cleaned_entry_model) for _ in range(10)} + ids = {uuid_fn(cleaned_entry) for _ in range(10)} assert len(ids) == 10 for val in ids: assert isinstance(val, str) assert len(val) == 36 - def test_by_field_dict_and_attr_access(self, cleaned_entry_model: Entry) -> None: - orig_size = cleaned_entry_model.response.content.size - id_fn = by_field(["response.content.size"]) - id_val = id_fn(cleaned_entry_model) - assert isinstance(id_val, str) - assert len(id_val) == 32 - cleaned_entry_model.response.content.size = orig_size + 1 - id_val2 = id_fn(cleaned_entry_model) - assert id_val != id_val2 - id_fn2 = by_field(["request.url"]) - id_val_obj = id_fn2(cleaned_entry_model) - assert isinstance(id_val_obj, str) - assert len(id_val_obj) == 32 + def test_by_field_not_dict_raises_value_error( + self, cleaned_entry: Dict[str, Any] + ) -> None: + # Make "url" a string, so the next step can't access "something" + id_fn = by_field(["request.url.something"]) + with pytest.raises( + ValueError, match="Field 'request.url.something' is not a dictionary" + ): + id_fn(cleaned_entry) + + def test_by_field_none_raises_value_error( + self, cleaned_entry: Dict[str, Any] + ) -> None: + # Make "url" a string, so the next step can't access "something" + cleaned_entry["request"]["nonexistent"] = None + id_fn = by_field(["request.nonexistent"]) + with pytest.raises(ValueError, match="Field 'request.nonexistent' is None"): + id_fn(cleaned_entry) + # remove the None + del cleaned_entry["request"]["nonexistent"] diff --git a/tests/test_pipeline.py b/tests/test_pipeline.py index 16c9214..ee2fb20 100644 --- a/tests/test_pipeline.py +++ b/tests/test_pipeline.py @@ -1,52 +1,103 @@ -from typing import Any, Dict +from typing import Any, Dict, List import pytest -from hario_core.models.har_1_2 import HarLog -from hario_core.pipeline import Pipeline -from hario_core.utils.id import by_field, uuid -from hario_core.utils.transform import normalize_sizes +from hario_core.transform import ( + Pipeline, + PipelineConfig, + by_field, + flatten, + normalize_sizes, + set_id, + uuid, +) class TestPipeline: - def test_pipeline_basic(self, cleaned_log: HarLog) -> None: + @pytest.mark.parametrize( + "entries_fixture", ["cleaned_entries", "chrome_devtools_entries"], indirect=True + ) + def test_pipeline_basic(self, entries_fixture: List[Dict[str, Any]]) -> None: pipeline = Pipeline( - id_fn=uuid(), + transformers=[set_id(uuid())], ) - results = pipeline.process(cleaned_log) + results = pipeline.process(entries_fixture) assert len(results) == 1 assert "id" in results[0] - assert ( - results[0]["request"]["url"] == "https://test.test/assets/css/f2aaccf1.css" - ) + assert results[0]["request"]["url"] == entries_fixture[0]["request"]["url"] - def test_pipeline_custom_id_field(self, cleaned_log: HarLog) -> None: + def test_pipeline_custom_id_field( + self, cleaned_entries: List[Dict[str, Any]] + ) -> None: pipeline = Pipeline( - id_fn=uuid(), - id_field="custom_id", + transformers=[set_id(uuid(), id_field="custom_id")], ) - results = pipeline.process(cleaned_log) + results = pipeline.process(cleaned_entries) assert "custom_id" in results[0] assert "id" not in results[0] - def test_pipeline_with_transformer(self, cleaned_log: HarLog) -> None: + def test_pipeline_with_transformer( + self, cleaned_entries: List[Dict[str, Any]] + ) -> None: pipeline = Pipeline( - id_fn=uuid(), - transformers=[normalize_sizes()], + transformers=[set_id(uuid()), normalize_sizes()], ) - results = pipeline.process(cleaned_log) + results = pipeline.process(cleaned_entries) assert results[0]["request"]["headersSize"] == 0 - def test_pipeline_with_deterministic_id(self, cleaned_log: HarLog) -> None: + def test_pipeline_with_deterministic_id( + self, cleaned_entries: List[Dict[str, Any]], cleaned_entry: Dict[str, Any] + ) -> None: + id_generator = by_field(["request.url", "startedDateTime"]) pipeline = Pipeline( - id_fn=by_field(["request.url", "startedDateTime"]), + transformers=[set_id(id_generator)], ) - results = pipeline.process(cleaned_log) - id1 = pipeline.id_fn(cleaned_log.entries[0]) + id1 = id_generator(cleaned_entry) + results = pipeline.process(cleaned_entries) assert results[0]["id"] == id1 def test_pipeline_invalid_input_typeerror(self) -> None: - pipeline = Pipeline(id_fn=uuid()) - not_harlog: Dict[str, Any] = {"entries": []} - with pytest.raises(TypeError, match="Pipeline.process expects a HarLog"): - pipeline.process(not_harlog) # type: ignore + pipeline = Pipeline(transformers=[set_id(uuid())]) + not_entries: Dict[str, Any] = {"entries": []} + with pytest.raises(TypeError, match="Pipeline.process expects a list of dicts"): + pipeline.process(not_entries) # type: ignore + + @pytest.mark.parametrize( + "config", + [ + PipelineConfig( + batch_size=2, + processing_strategy="process", + max_workers=4, + ), + PipelineConfig( + batch_size=2, + processing_strategy="thread", + max_workers=4, + ), + PipelineConfig( + batch_size=2, processing_strategy="sequential", max_workers=None + ), + PipelineConfig(batch_size=2, processing_strategy="async", max_workers=None), + ], + ) + def test_pipeline_strategies( + self, + cleaned_entries: List[Dict[str, Any]], + config: PipelineConfig, + ) -> None: + """Test Pipeline with different processing strategies.""" + pipeline = Pipeline( + transformers=[ + set_id(by_field(["request.url", "startedDateTime"])), + normalize_sizes(), + flatten(), + ], + config=config, + ) + results = pipeline.process(cleaned_entries) + assert len(results) == len(cleaned_entries) + assert "id" in results[0] + assert results[0]["request.headersSize"] == 0 + assert "request.headers" in results[0] + assert isinstance(results[0]["request.headers"], str) diff --git a/tests/test_transform.py b/tests/test_transform.py index e1286c8..56dd8a5 100644 --- a/tests/test_transform.py +++ b/tests/test_transform.py @@ -1,106 +1,17 @@ """ Unit tests for transformation logic in hario-core. - -- Test transformation of HAR entries for storage - (stringifying deep/large structures). -- Cover edge cases for nested dicts, lists, and missing content. """ from copy import deepcopy -from typing import Any, Dict +from typing import Any, Dict, List import pytest -from hario_core.models.har_1_2 import Entry -from hario_core.utils.transform import flatten, normalize_sizes, normalize_timings - -# Используем только реальные сэмплы через фикстуры +from hario_core.models import Entry +from hario_core.transform import flatten, normalize_sizes, normalize_timings class TestTransform: - def test_transform_max_depth(self, cleaned_entry: Dict[str, Any]) -> None: - doc = deepcopy(cleaned_entry) - doc["cache"]["level1"] = {"level2": {"level3": {"level4": "value"}}} - entry = Entry.model_validate(doc) - transformed = flatten()(entry) - val = transformed["cache"]["level1"] - if isinstance(val, Dict) and "level2" in val: - val2 = val["level2"] - if isinstance(val2, Dict) and "level3" in val2: - assert isinstance(val2["level3"], str) - else: - assert isinstance(val2, str) - else: - assert isinstance(val, str) - - def test_transform_size_limit(self, cleaned_entry: Dict[str, Any]) -> None: - doc = deepcopy(cleaned_entry) - large_list = [{"a": "b"}] * 5000 - doc["cache"]["data"] = large_list - entry = Entry.model_validate(doc) - transformed = flatten()(entry) - assert isinstance(transformed["cache"]["data"], str) - - def test_no_transform_needed(self, cleaned_entry: Dict[str, Any]) -> None: - doc = deepcopy(cleaned_entry) - doc["cache"]["level1"] = {"key": "value"} - doc["cache"]["list"] = [1, 2, 3] - entry = Entry.model_validate(doc) - original_doc = entry.model_dump() - transformed = flatten()(entry) - assert transformed == original_doc - - def test_transform_with_no_content(self, cleaned_entry: Dict[str, Any]) -> None: - doc = deepcopy(cleaned_entry) - doc["response"]["status"] = 200 - entry = Entry.model_validate(doc) - transformed = flatten()(entry) - assert "content" in transformed["response"] - assert transformed["response"]["status"] == 200 - - def test_transform_list_of_dicts(self, cleaned_entry: Dict[str, Any]) -> None: - doc = deepcopy(cleaned_entry) - doc["cache"]["level1"] = [ - {"level2_a": {"level3_a": {"level4_a": "value"}}}, - {"level2_b": {"level3_b": "value"}}, - ] - entry = Entry.model_validate(doc) - transformed = flatten()(entry) - assert isinstance(transformed["cache"]["level1"], list) - val = transformed["cache"]["level1"][0]["level2_a"] - if isinstance(val, Dict) and "level3_a" in val: - assert isinstance(val["level3_a"], str) - else: - assert isinstance(val, str) - val2 = transformed["cache"]["level1"][1]["level2_b"] - assert isinstance(val2, (Dict, str)) - - def test_flatten_depth_and_size_limit(self, cleaned_entry: Dict[str, Any]) -> None: - doc = deepcopy(cleaned_entry) - doc["cache"]["a"] = {"b": {"c": {"d": [1, 2, 3]}}} - entry = Entry.model_validate(doc) - flat = flatten(max_depth=2)(entry) - val = flat["cache"]["a"] - if isinstance(val, Dict) and "b" in val: - bval = val["b"] - assert isinstance(bval, str) - assert '"c": {"d": [1, 2, 3]}' in bval - else: - assert isinstance(val, str) - big_list = list(range(10000)) - doc2 = deepcopy(cleaned_entry) - doc2["cache"]["arr"] = big_list - entry2 = Entry.model_validate(doc2) - flat2 = flatten(size_limit=100)(entry2) - assert isinstance(flat2["cache"]["arr"], str) - doc3 = deepcopy(cleaned_entry) - doc3["cache"]["x"] = 1 - doc3["cache"]["y"] = [1, 2, 3] - entry3 = Entry.model_validate(doc3) - flat3 = flatten()(entry3) - assert flat3["cache"]["x"] == 1 - assert flat3["cache"]["y"] == [1, 2, 3] - def test_normalize_sizes(self, cleaned_entry: Dict[str, Any]) -> None: doc = deepcopy(cleaned_entry) doc["request"]["headersSize"] = -1 @@ -109,7 +20,7 @@ def test_normalize_sizes(self, cleaned_entry: Dict[str, Any]) -> None: doc["response"]["bodySize"] = -2 doc["response"]["content"]["size"] = -100 entry = Entry.model_validate(doc) - result = normalize_sizes()(entry) + result = normalize_sizes()(entry.model_dump()) assert result["request"]["headersSize"] == 0 assert result["response"]["bodySize"] == 0 assert result["response"]["content"]["size"] == 0 @@ -124,7 +35,7 @@ def test_normalize_timings(self, cleaned_entry: Dict[str, Any]) -> None: doc["timings"]["receive"] = -100 doc["timings"]["ssl"] = -3 entry = Entry.model_validate(doc) - result = normalize_timings()(entry) + result = normalize_timings()(entry.model_dump()) t = result["timings"] assert t["blocked"] == 0.0 assert t["dns"] == 0.0 @@ -134,48 +45,6 @@ def test_normalize_timings(self, cleaned_entry: Dict[str, Any]) -> None: assert t["connect"] == 0 assert t["send"] == 1 - def test_flatten_empty_and_noop(self, cleaned_entry_model: Entry) -> None: - flat = flatten()(cleaned_entry_model) - assert flat == cleaned_entry_model.model_dump() - doc2 = deepcopy(cleaned_entry_model.model_dump()) - doc2["cache"]["a"] = 1 - doc2["cache"]["b"] = 2 - entry2 = Entry.model_validate(doc2) - flat2 = flatten()(entry2) - assert flat2 == entry2.model_dump() - entry3 = deepcopy(cleaned_entry_model) - entry3.cache["a"] = {} - flat3 = flatten()(entry3) - assert flat3 == entry3.model_dump() - - def test_flatten_non_dict_input(self) -> None: - try: - flatten()([1, 2, 3]) # type: ignore - except Exception: - pass - else: - assert False, "flatten should raise an error on non-Dict" - - def test_flatten_basic(self, cleaned_entry_model: Entry) -> None: - flat = flatten()(cleaned_entry_model) - assert isinstance(flat, Dict) - assert "request" in flat - - def test_flatten_deep_structure(self, cleaned_entry_model: Entry) -> None: - entry = deepcopy(cleaned_entry_model) - entry.cache["deep"] = {"a": {"b": {"c": {"d": 1}}}} - flat = flatten(max_depth=2)(entry) - val = flat["cache"]["deep"] - if isinstance(val, Dict) and "a" in val: - aval = val["a"] - if isinstance(aval, Dict) and "b" in aval: - bval = aval["b"] - assert isinstance(bval, str) - else: - assert isinstance(aval, str) - else: - assert isinstance(val, str) - def test_entry_validation_error_on_missing_fields( self, cleaned_entry: Dict[str, Any] ) -> None: @@ -191,3 +60,110 @@ def test_entry_validation_error_on_missing_fields( entry_missing.pop(field, None) with pytest.raises(Exception): Entry.model_validate(entry_missing) + + def test_flatten_basic_headers(self, cleaned_entry: Dict[str, Any]) -> None: + entry = Entry.model_validate(cleaned_entry) + flat = flatten()(entry.model_dump()) + # Check that nested keys became flat + assert "request.headers" in flat + assert isinstance(flat["request.headers"], str) + assert "response.headers" in flat + assert isinstance(flat["response.headers"], str) + + def test_flatten_custom_array_handler(self, cleaned_entry: Dict[str, Any]) -> None: + entry = Entry.model_validate(cleaned_entry) + # array_handler returns a string with path and array length + flat = flatten(array_handler=lambda arr, path: f"{path}:{len(arr)}")( + entry.model_dump() + ) + assert "request.headers" in flat + assert isinstance(flat["request.headers"], str) + expected = f"request.headers:{len(cleaned_entry['request']['headers'])}" + assert flat["request.headers"] == expected + + def test_flatten_separator(self, cleaned_entry: Dict[str, Any]) -> None: + entry = Entry.model_validate(cleaned_entry) + flat = flatten(separator="__")(entry.model_dump()) + assert "request__headers" in flat + assert "response__headers" in flat + + def test_flatten_headers_to_keys_by_name( + self, cleaned_entry: Dict[str, Any] + ) -> None: + from urllib.parse import quote + + def handler(arr: List[Dict[str, Any]], path: str) -> Any: + return { + f"{path}.{quote(item['name'], safe='')}": item["value"] + for item in arr + if isinstance(item, dict) and "name" in item and "value" in item + } + + entry = Entry.model_validate(cleaned_entry) + flat = flatten(array_handler=handler)(entry.model_dump()) + # Check for user-agent and :authority keys + assert any("user-agent" in k for k in flat) + assert any("%3Aauthority" in k for k in flat) + # Value matches original + headers = {h["name"]: h["value"] for h in cleaned_entry["request"]["headers"]} + for k in flat: + if "user-agent" in k: + assert flat[k] == headers["user-agent"] + if "%3Aauthority" in k: + assert flat[k] == headers[":authority"] + + def test_flatten_default_array_handler_is_str( + self, cleaned_entry: Dict[str, Any] + ) -> None: + entry = Entry.model_validate(cleaned_entry) + flat = flatten()(entry.model_dump()) + # By default, headers is a string + assert isinstance(flat["request.headers"], str) + assert "user-agent" in flat["request.headers"] + + def test_flatten_array_handler_returns_multiple_keys( + self, cleaned_entry: Dict[str, Any] + ) -> None: + def handler(arr: List[Dict[str, Any]], path: str) -> Any: + return { + f"{path}.len": len(arr), + f"{path}.first_name": ( + arr[0]["name"] if arr and "name" in arr[0] else None + ), + } + + entry = Entry.model_validate(cleaned_entry) + flat = flatten(array_handler=handler)(entry.model_dump()) + assert flat["request.headers.len"] == len(cleaned_entry["request"]["headers"]) + assert ( + flat["request.headers.first_name"] + == cleaned_entry["request"]["headers"][0]["name"] + ) + + def test_flatten_array_handler_returns_dict( + self, cleaned_entry: Dict[str, Any] + ) -> None: + def handler(arr: List[Dict[str, Any]], path: str) -> Any: + return {f"{path}.foo.bar": 1} + + entry = Entry.model_validate(cleaned_entry) + flat = flatten(array_handler=handler)(entry.model_dump()) + # Key is as returned by handler, not escaped + assert "request.headers.foo.bar" in flat + assert flat["request.headers.foo.bar"] == 1 + + def test_flatten_header_by_name(self, cleaned_entry: Dict[str, Any]) -> None: + def handler(arr: List[Dict[str, Any]], path: str) -> Any: + return { + f"{path}_{item['name']}": item["value"] + for item in arr + if isinstance(item, dict) and "name" in item and "value" in item + } + + entry = Entry.model_validate(cleaned_entry) + flat = flatten(separator="_", array_handler=handler)(entry.model_dump()) + # There should be a key with the encoded ':authority' name + assert "request_headers_:authority" in flat + # Value should match the original + headers = {h["name"]: h["value"] for h in cleaned_entry["request"]["headers"]} + assert flat["request_headers_:authority"] == headers[":authority"]