|
| 1 | +from collections.abc import Callable, Iterable, Iterator |
| 2 | +from itertools import tee |
| 3 | +from typing import TypeVar |
| 4 | + |
| 5 | +from .basics import first, fnot |
| 6 | +from .compose import chain as fchain |
| 7 | + |
| 8 | +T = TypeVar("T") |
| 9 | + |
| 10 | + |
| 11 | +def recursive_tee(iterable, n=2): |
| 12 | + """ |
| 13 | + A deep, recursive version of itertools.tee. |
| 14 | +
|
| 15 | + It splits 'iterable' into 'n' independent iterators. |
| 16 | + Crucially, if it encounters an item that is itself an Iterator, |
| 17 | + it recursively 'tees' that item into 'n' independent copies |
| 18 | + before yielding it. |
| 19 | + """ |
| 20 | + # 1. We must work with an iterator to use tee |
| 21 | + it = iter(iterable) |
| 22 | + |
| 23 | + # 2. Define a generator that processes items as they pass through. |
| 24 | + # This is the "Lazy Mapper" that handles the splitting logic. |
| 25 | + def splitter(): |
| 26 | + for item in it: |
| 27 | + if isinstance(item, Iterator): |
| 28 | + # RECURSION: The item is an iterator, so we must split it |
| 29 | + # into n copies (one for each branch we are creating) |
| 30 | + yield recursive_tee(item, n) |
| 31 | + else: |
| 32 | + # BASE CASE: The item is simple (int, string, etc). |
| 33 | + # Just return n references to the same object. |
| 34 | + yield (item,) * n |
| 35 | + |
| 36 | + # 3. Create a master stream that yields tuples of split items: |
| 37 | + # e.g. ( (item1_copyA, item1_copyB), (item2_copyA, item2_copyB), ... ) |
| 38 | + # We tee this master stream so every output branch can access the tuples. |
| 39 | + streams_of_tuples = tee(splitter(), n) |
| 40 | + |
| 41 | + # 4. Unzip: Create n generators. |
| 42 | + # The i-th generator picks the i-th element from the stream of tuples. |
| 43 | + return tuple((t[i] for t in stream) for i, stream in enumerate(streams_of_tuples)) |
| 44 | + |
| 45 | + |
| 46 | +def dropwhile_safe(predicate, iterable): |
| 47 | + """ |
| 48 | + A safe, deep-aware implementation of dropwhile. |
| 49 | + """ |
| 50 | + # We iterate over the main sequence |
| 51 | + outer_iter = iter(iterable) |
| 52 | + |
| 53 | + for item in outer_iter: |
| 54 | + # 1. RECURSIVE SPLIT: |
| 55 | + # Use recursive_tee instead of standard tee. |
| 56 | + # If 'item' is a nested iterator tree, both copies are fully independent. |
| 57 | + check_copy, keep_copy = recursive_tee(item, 2) |
| 58 | + |
| 59 | + # 2. CHECK: |
| 60 | + # Predicate can consume 'check_copy' arbitrarily (even deeply). |
| 61 | + if predicate(check_copy): |
| 62 | + continue # Drop and move to next |
| 63 | + |
| 64 | + # 3. YIELD & RESUME: |
| 65 | + # Yield the pristine 'keep_copy' and the rest of the outer stream. |
| 66 | + yield keep_copy |
| 67 | + yield from outer_iter |
| 68 | + return |
| 69 | + |
| 70 | + |
| 71 | +def takefirst(predicate: Callable[[T], bool], items: Iterable[T]): |
| 72 | + return first(iter(dropwhile_safe(fchain([fnot, predicate]), iter(items)))) |
0 commit comments