@@ -16,7 +16,11 @@ Parses a HAR file from a path, bytes, or file-like object and returns a validate
1616
1717** Signature:**
1818``` python
19- def parse (src : str | Path | bytes | bytearray | IO [Any], * , entry_model_selector : Callable[[dict[str , Any]], type[Entry]] = entry_selector) -> HarLog
19+ def parse (
20+ src : str | Path | bytes | bytearray | IO [Any],
21+ * ,
22+ entry_model_selector : Callable[[dict[str , Any]], type[Entry]] = entry_selector,
23+ ) -> HarLog
2024```
2125
2226- `src` : Path, bytes , or file - like object containing HAR JSON .
@@ -42,7 +46,10 @@ Register a custom Pydantic model and detector function for new HAR entry formats
4246
4347** Signature:**
4448```python
45- def register_entry_model(detector: Callable[[dict[str , Any]], bool ], model: type[Entry]) -> None
49+ def register_entry_model(
50+ detector: Callable[[dict[str , Any]], bool ],
51+ model: type[Entry],
52+ ) -> None
4653```
4754
4855- `detector` : Function that takes an entry dict and returns True if the model should be used.
@@ -104,6 +111,11 @@ Returns a deterministic ID function based on specified fields of a HAR entry.
104111
105112** Signature:**
106113```python
114+ def by_field(fields: list[str ]) -> EntryIdFn
115+ ```
116+
117+ ** Example:**
118+ ```python
107119from hario_core.utils import by_field
108120id_fn = by_field([" request.url" , " startedDateTime" ])
109121```
@@ -114,6 +126,11 @@ Returns a function that generates a random UUID for each entry.
114126
115127** Signature:**
116128```python
129+ def uuid() -> EntryIdFn
130+ ```
131+
132+ ** Example:**
133+ ```python
117134from hario_core.utils import uuid
118135id_fn = uuid()
119136```
@@ -130,8 +147,10 @@ Flattens nested structures in a HAR entry to a single level, stringifying deep o
130147
131148** Signature:**
132149```python
133- from hario_core.utils import flatten
134- transform = flatten(max_depth = 3 , size_limit = 32000 )
150+ def flatten(
151+ max_depth: int = 3 ,
152+ size_limit: int = 32_000 ,
153+ ) -> Transformer
135154```
136155- `max_depth` : Maximum depth to keep as dicts/ lists (default: 3 ).
137156- `size_limit` : Maximum size (in bytes ) for nested data before stringifying (default: 32 ,000 ).
@@ -147,8 +166,7 @@ Normalizes negative size fields in request/response to zero.
147166
148167** Signature:**
149168```python
150- from hario_core.utils import normalize_sizes
151- transform = normalize_sizes()
169+ def normalize_sizes() -> Transformer
152170```
153171
154172# ## `normalize_timings`
@@ -157,8 +175,7 @@ Normalizes negative timing fields in entry.timings to zero.
157175
158176** Signature:**
159177```python
160- from hario_core.utils import normalize_timings
161- transform = normalize_timings()
178+ def normalize_timings() -> Transformer
162179```
163180
164181-- -
@@ -171,18 +188,14 @@ A high-level class for processing HAR data: transforming and assigning IDs. You
171188
172189** Signature:**
173190```python
174- from hario_core import Pipeline, by_field, flatten, parse
175-
176- pipeline = Pipeline(
177- id_fn = by_field([" request.url" , " startedDateTime" ]),
178- id_field = " entry_id" , # optional, default is "id"
179- transformers = [flatten()], # optional
180- )
181-
182- har_log = parse(" example.har" )
183- results = pipeline.process(har_log)
184- for entry in results:
185- print (entry[" entry_id" ], entry[" request" ][" url" ])
191+ class Pipeline:
192+ def __init__ (
193+ self ,
194+ id_fn: EntryIdFn,
195+ id_field: str = " id" ,
196+ transformers: Sequence[Transformer] = (),
197+ ) -> None
198+ def process(self , har_log: HarLog) -> list[dict[str , Any]]
186199```
187200
188201- `id_fn` : Function to generate an ID for each entry.
0 commit comments