Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ optional = [
# todo document these?
"orjson", # for my.core.serialize
"pyfzf_iter", # for my.core.denylist
"cachew>=0.15.20231019",
"cachew>=0.22.20251013", # min version that makes type = syntax properly work
"mypy", # used for config checks
"colorlog", # for colored logs
"enlighten", # for CLI progress bars
Expand Down
4 changes: 0 additions & 4 deletions ruff.toml
Original file line number Diff line number Diff line change
Expand Up @@ -98,10 +98,6 @@ lint.ignore = [
"PLC0415", # "imports should be at the top level" -- not realistic

"ARG001", # ugh, kinda annoying when using pytest fixtures

# FIXME hmm. Need to figure out if cachew works fine with type = defined types before updating things..
"UP047", # non-pep695-generic-function
"UP040", # non-pep695-type-alias
]

lint.per-file-ignores."src/my/core/compat.py" = [
Expand Down
4 changes: 2 additions & 2 deletions src/my/coding/commits.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ def _commits(_repos: list[Path]) -> Iterator[Commit]:
yield from _cached_commits(r)


def _cached_commits_path(p: Path) -> str:
def _cached_commits_path(p: Path) -> Path | str:
p = cache_dir() / 'my.coding.commits:_cached_commits' / str(p.absolute()).strip("/")
p.mkdir(parents=True, exist_ok=True)
return str(p)
Expand All @@ -208,7 +208,7 @@ def _cached_commits_path(p: Path) -> str:
@mcachew(
depends_on=_repo_depends_on,
logger=log,
cache_path=_cached_commits_path,
cache_path=_cached_commits_path, # type: ignore[arg-type] # hmm mypy seems confused here? likely a but in type + paramspec handling...
)
def _cached_commits(repo: Path) -> Iterator[Commit]:
log.debug('processing %s', repo)
Expand Down
22 changes: 7 additions & 15 deletions src/my/core/cachew.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
from typing import (
TYPE_CHECKING,
Any,
TypeVar,
cast,
overload,
)
Expand All @@ -16,7 +15,7 @@

from . import warnings

PathIsh = str | Path # avoid circular import from .common
type PathIsh = str | Path # avoid circular import from .common


def disable_cachew() -> None:
Expand Down Expand Up @@ -120,31 +119,24 @@ def _mcachew_impl(cache_path=_cache_path_dflt, **kwargs):


if TYPE_CHECKING:
R = TypeVar('R')
from typing import ParamSpec

P = ParamSpec('P')
CC = Callable[P, R] # need to give it a name, if inlined into bound=, mypy runs in a bug
PathProvider = PathIsh | Callable[P, PathIsh]
type PathProvider[**P] = PathIsh | Callable[P, PathIsh]
# NOTE: in cachew, HashFunction type returns str
# however in practice, cachew always calls str for its result
# so perhaps better to switch it to Any in cachew as well
HashFunction = Callable[P, Any]

F = TypeVar('F', bound=Callable)
type HashFunction[**P] = Callable[P, Any]

# we need two versions due to @doublewrap
# this is when we just annotate as @cachew without any args
@overload
def mcachew(fun: F) -> F: ...
def mcachew[F: Callable](fun: F) -> F: ...

@overload
def mcachew(
cache_path: PathProvider | None = ...,
def mcachew[F, **P](
cache_path: PathProvider[P] | None = ..., # ty: ignore[too-many-positional-arguments]
*,
force_file: bool = ...,
cls: type | None = ...,
depends_on: HashFunction = ...,
depends_on: HashFunction[P] = ..., # ty: ignore[too-many-positional-arguments]
logger: logging.Logger | None = ...,
chunk_by: int = ...,
synthetic_key: str | None = ...,
Expand Down
13 changes: 4 additions & 9 deletions src/my/core/cfg.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,17 +6,15 @@
import sys
from collections.abc import Callable, Iterator
from contextlib import ExitStack, contextmanager
from typing import Any, TypeVar
from typing import Any

Attrs = dict[str, Any]

C = TypeVar('C')
type Attrs = dict[str, Any]


# todo not sure about it, could be overthinking...
# but short enough to change later
# TODO document why it's necessary?
def make_config(cls: type[C], migration: Callable[[Attrs], Attrs] = lambda x: x) -> C:
def make_config[C](cls: type[C], migration: Callable[[Attrs], Attrs] = lambda x: x) -> C:
user_config = cls.__base__
old_props = {
# NOTE: deliberately use gettatr to 'force' class properties here
Expand All @@ -34,11 +32,8 @@ def make_config(cls: type[C], migration: Callable[[Attrs], Attrs] = lambda x: x)
return cls(**params)


F = TypeVar('F')


@contextmanager
def _override_config(config: F) -> Iterator[F]:
def _override_config[F](config: F) -> Iterator[F]:
'''
Temporary override for config's parameters, useful for testing/fake data/etc.
'''
Expand Down
1 change: 0 additions & 1 deletion src/my/core/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,6 @@ def caller() -> str:
traceback.print_stack()

if guess_compression:

from kompress import CPath, is_compressed

# note: ideally we'd just wrap everything in CPath for simplicity, however
Expand Down
10 changes: 4 additions & 6 deletions src/my/core/denylist.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,24 +14,22 @@
from collections import defaultdict
from collections.abc import Iterator, Mapping
from pathlib import Path
from typing import Any, TypeVar
from typing import Any

import click
from more_itertools import seekable

from .serialize import dumps
from .warnings import medium

T = TypeVar("T")
type DenyMap = Mapping[str, set[Any]]

DenyMap = Mapping[str, set[Any]]


def _default_key_func(obj: T) -> str:
def _default_key_func[T](obj: T) -> str:
return str(obj)


class DenyList:
class DenyList[T]:
def __init__(self, denylist_file: Path | str) -> None:
self.file = Path(denylist_file).expanduser().absolute()
self._deny_raw_list: list[dict[str, Any]] = []
Expand Down
4 changes: 2 additions & 2 deletions src/my/core/discovery_pure.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,12 +23,12 @@
import re
from collections.abc import Iterable, Sequence
from pathlib import Path
from typing import Any, NamedTuple, TypeAlias, cast
from typing import Any, NamedTuple, cast

'''
None means that requirements weren't defined (different from empty requirements)
'''
Requires: TypeAlias = Sequence[str] | None
type Requires = Sequence[str] | None


class HPIModule(NamedTuple):
Expand Down
34 changes: 11 additions & 23 deletions src/my/core/error.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,55 +10,46 @@
from collections.abc import Callable, Iterable, Iterator
from datetime import date, datetime
from itertools import tee
from typing import (
Any,
Literal,
TypeAlias,
TypeVar,
cast,
)
from typing import Any, Literal, cast

from .types import Json
from .warnings import medium

T = TypeVar('T')
E = TypeVar('E', bound=Exception) # TODO make covariant?
type ResT[T, E: Exception] = T | E

ResT: TypeAlias = T | E

Res: TypeAlias = ResT[T, Exception]
type Res[T] = ResT[T, Exception]

ErrorPolicy = Literal["yield", "raise", "drop"]


def notnone(x: T | None) -> T:
def notnone[T](x: T | None) -> T:
assert x is not None
return x


def unwrap(res: Res[T]) -> T:
def unwrap[T](res: Res[T]) -> T:
if isinstance(res, Exception):
raise res
return res


def drop_exceptions(itr: Iterator[Res[T]]) -> Iterator[T]:
def drop_exceptions[T](itr: Iterator[Res[T]]) -> Iterator[T]:
"""Return non-errors from the iterable"""
for o in itr:
if isinstance(o, Exception):
continue
yield o


def raise_exceptions(itr: Iterable[Res[T]]) -> Iterator[T]:
def raise_exceptions[T](itr: Iterable[Res[T]]) -> Iterator[T]:
"""Raise errors from the iterable, stops the select function"""
for o in itr:
if isinstance(o, Exception):
raise o
yield o


def warn_exceptions(itr: Iterable[Res[T]], warn_func: Callable[[Exception], None] | None = None) -> Iterator[T]:
def warn_exceptions[T](itr: Iterable[Res[T]], warn_func: Callable[[Exception], None] | None = None) -> Iterator[T]:
# if not provided, use the 'warnings' module
if warn_func is None:

Expand All @@ -76,12 +67,12 @@ def _warn_func(e: Exception) -> None:


# TODO deprecate in favor of Exception.add_note?
def echain(ex: E, cause: Exception) -> E:
def echain[E: Exception](ex: E, cause: Exception) -> E:
ex.__cause__ = cause
return ex


def split_errors(l: Iterable[ResT[T, E]], ET: type[E]) -> tuple[Iterable[T], Iterable[E]]:
def split_errors[T, E: Exception](l: Iterable[ResT[T, E]], ET: type[E]) -> tuple[Iterable[T], Iterable[E]]:
# TODO would be nice to have ET=Exception default? but it causes some mypy complaints?
vit, eit = tee(l)
# TODO ugh, not sure if I can reconcile type checking and runtime and convince mypy that ET and E are the same type?
Expand All @@ -96,10 +87,7 @@ def split_errors(l: Iterable[ResT[T, E]], ET: type[E]) -> tuple[Iterable[T], Ite
return (values, errors)


K = TypeVar('K')


def sort_res_by(items: Iterable[Res[T]], key: Callable[[Any], K]) -> list[Res[T]]:
def sort_res_by[T, K](items: Iterable[Res[T]], key: Callable[[Any], K]) -> list[Res[T]]:
"""
Sort a sequence potentially interleaved with errors/entries on which the key can't be computed.
The general idea is: the error sticks to the non-error entry that follows it
Expand Down
12 changes: 7 additions & 5 deletions src/my/core/freezer.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,15 +54,17 @@ def untyped(self):


def test_freezer() -> None:
val = _A(x={
'an_int': 123,
'an_any': [1, 2, 3],
})
val = _A(
x={
'an_int': 123,
'an_any': [1, 2, 3],
}
)
af = Freezer(_A)
fval = af.freeze(val)

fd = vars(fval)
assert fd['typed'] == 123
assert fd['typed'] == 123
assert fd['untyped'] == [1, 2, 3]


Expand Down
7 changes: 2 additions & 5 deletions src/my/core/hpi_compat.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
import pkgutil
import re
from collections.abc import Iterator, MutableSequence, Sequence
from typing import Any, TypeVar
from typing import Any

from . import warnings

Expand Down Expand Up @@ -112,11 +112,8 @@ def _get_dal(cfg, module_name: str):
return import_module(f'my.config.repos.{module_name}.dal')


V = TypeVar('V')


# named to be kinda consistent with more_itertools, e.g. more_itertools.always_iterable
class always_supports_sequence(Iterator[V]):
class always_supports_sequence[V](Iterator[V]):
"""
Helper to make migration from Sequence/List to Iterable/Iterator type backwards compatible in runtime
"""
Expand Down
2 changes: 1 addition & 1 deletion src/my/core/internal.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Utils specific to hpi core, shouldn't really be used by HPI modules
"""

from __future__ import annotations

import importlib.metadata
Expand Down Expand Up @@ -35,7 +36,6 @@ def warn_if_not_using_src_layout(path: Sequence[str]) -> None:
# nothing to check
return


from . import warnings

MSG = '''
Expand Down
6 changes: 4 additions & 2 deletions src/my/core/konsume.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,9 +142,11 @@ def wrap(j, *, throw=True) -> Iterator[Zoomable]:
if not c.this_consumed(): # TODO hmm. how does it figure out if it's consumed???
if throw:
# TODO need to keep a full path or something...
raise UnconsumedError(f'''
raise UnconsumedError(
f'''
Expected {c} to be fully consumed by the parser.
'''.lstrip())
'''.lstrip()
)
else:
# TODO log?
pass
Expand Down
18 changes: 6 additions & 12 deletions src/my/core/orgmode.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,13 @@
Various helpers for reading org-mode data
"""

from collections.abc import Callable, Iterable
from datetime import datetime

from more_itertools import one
from orgparse import OrgNode
from orgparse.extra import Table


def parse_org_datetime(s: str) -> datetime:
s = s.strip('[]')
Expand All @@ -23,24 +28,13 @@ def parse_org_datetime(s: str) -> datetime:

# TODO I guess want to borrow inspiration from bs4? element type <-> tag; and similar logic for find_one, find_all

from collections.abc import Callable, Iterable
from typing import TypeVar

from orgparse import OrgNode

V = TypeVar('V')


def collect(n: OrgNode, cfun: Callable[[OrgNode], Iterable[V]]) -> Iterable[V]:
def collect[V](n: OrgNode, cfun: Callable[[OrgNode], Iterable[V]]) -> Iterable[V]:
yield from cfun(n)
for c in n.children:
yield from collect(c, cfun)


from more_itertools import one
from orgparse.extra import Table


def one_table(o: OrgNode) -> Table:
return one(collect(o, lambda n: (x for x in n.body_rich if isinstance(x, Table))))

Expand Down
Loading
Loading