Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
69 changes: 26 additions & 43 deletions .ci/release
Original file line number Diff line number Diff line change
@@ -1,65 +1,48 @@
#!/usr/bin/env python3
'''
Run [[file:scripts/release][scripts/release]] to deploy Python package onto [[https://pypi.org][PyPi]] and [[https://test.pypi.org][test PyPi]].
Deploys Python package onto [[https://pypi.org][PyPi]] or [[https://test.pypi.org][test PyPi]].

The script expects =TWINE_PASSWORD= environment variable to contain the [[https://pypi.org/help/#apitoken][PyPi token]] (not the password!).
- running manually

The script can be run manually.
It's also running as =pypi= job in [[file:.github/workflows/main.yml][Github Actions config]]. Packages are deployed on:
- every master commit, onto test pypi
- every new tag, onto production pypi
You'll need =UV_PUBLISH_TOKEN= env variable

You'll need to set =TWINE_PASSWORD= and =TWINE_PASSWORD_TEST= in [[https://help.github.com/en/actions/configuring-and-managing-workflows/creating-and-storing-encrypted-secrets#creating-encrypted-secrets][secrets]]
for Github Actions deployment to work.
- running on Github Actions

Instead of env variable, relies on configuring github as Trusted publisher (https://docs.pypi.org/trusted-publishers/) -- both for test and regular pypi

It's running as =pypi= job in [[file:.github/workflows/main.yml][Github Actions config]].
Packages are deployed on:
- every master commit, onto test pypi
- every new tag, onto production pypi
'''

UV_PUBLISH_TOKEN = 'UV_PUBLISH_TOKEN'

import argparse
import os
import sys
from pathlib import Path
from subprocess import check_call
import shutil

is_ci = os.environ.get('CI') is not None


def main() -> None:
import argparse
p = argparse.ArgumentParser()
p.add_argument('--test', action='store_true', help='use test pypi')
p.add_argument('--use-test-pypi', action='store_true')
args = p.parse_args()

extra = []
if args.test:
extra.extend(['--repository', 'testpypi'])
publish_url = ['--publish-url', 'https://test.pypi.org/legacy/'] if args.use_test_pypi else []

root = Path(__file__).absolute().parent.parent
os.chdir(root) # just in case

if is_ci:
# see https://github.com/actions/checkout/issues/217
check_call('git fetch --prune --unshallow'.split())

dist = root / 'dist'
if dist.exists():
shutil.rmtree(dist)

check_call(['python3', '-m', 'build'])

TP = 'TWINE_PASSWORD'
password = os.environ.get(TP)
if password is None:
print(f"WARNING: no {TP} passed", file=sys.stderr)
import pip_secrets
password = pip_secrets.token_test if args.test else pip_secrets.token # meh

check_call([
'python3', '-m', 'twine',
'upload', *dist.iterdir(),
*extra,
], env={
'TWINE_USERNAME': '__token__',
TP: password,
**os.environ,
})
os.chdir(root) # just in case

check_call(['uv', 'build', '--clear'])

if not is_ci:
# CI relies on trusted publishers so doesn't need env variable
assert UV_PUBLISH_TOKEN in os.environ, f'no {UV_PUBLISH_TOKEN} passed'

check_call(['uv', 'publish', *publish_url])


if __name__ == '__main__':
Expand Down
56 changes: 0 additions & 56 deletions .ci/release-uv

This file was deleted.

8 changes: 4 additions & 4 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ jobs:
# ugh https://github.com/actions/toolkit/blob/main/docs/commands.md#path-manipulation
- run: echo "$HOME/.local/bin" >> $GITHUB_PATH

- uses: actions/checkout@v5
- uses: actions/checkout@v6
with:
submodules: recursive
fetch-depth: 0 # nicer to have all git history when debugging/for tests
Expand Down Expand Up @@ -93,7 +93,7 @@ jobs:
# ugh https://github.com/actions/toolkit/blob/main/docs/commands.md#path-manipulation
- run: echo "$HOME/.local/bin" >> $GITHUB_PATH

- uses: actions/checkout@v5
- uses: actions/checkout@v6
with:
submodules: recursive
fetch-depth: 0 # pull all commits to correctly infer vcs version
Expand All @@ -109,9 +109,9 @@ jobs:
- name: 'release to test pypi'
# always deploy merged master to test pypi
if: github.event.ref == format('refs/heads/{0}', github.event.repository.master_branch)
run: .ci/release-uv --use-test-pypi
run: .ci/release --use-test-pypi

- name: 'release to prod pypi'
# always deploy tags to release pypi
if: startsWith(github.event.ref, 'refs/tags/')
run: .ci/release-uv
run: .ci/release
57 changes: 0 additions & 57 deletions conftest.py

This file was deleted.

8 changes: 4 additions & 4 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -34,12 +34,13 @@ Homepage = "https://github.com/karlicoss/cachew"
optional = [
"colorlog",
]

[dependency-groups]
# TODO: not sure, on the one hand could just use 'standard' dev dependency group
# On the other hand, it's a bit annoying that it's always included by default?
# To make sure it's not included, need to use `uv run --exact --no-default-groups ...`
testing = [
"pytest",
"pytest>=9", # need version 9 for proper namespace package support
"ruff",

"pytz",
Expand All @@ -53,10 +54,9 @@ testing = [
]
typecheck = [
{ include-group = "testing" },

"mypy",
"lxml", # for mypy coverage
"ty>=0.0.1a22",
"lxml", # for mypy html coverage
"ty>=0.0.3",

"types-pytz", # optional runtime only dependency

Expand Down
6 changes: 5 additions & 1 deletion pytest.ini
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,13 @@
# discover files that don't follow test_ naming. Useful to keep tests along with the source code
python_files = *.py

# this setting only impacts package/module naming under pytest, not the discovery
# this is necessary for --pyargs to discover implicit namespace packages correctly
consider_namespace_packages = true

# see https://docs.pytest.org/en/stable/reference/reference.html#confval-strict
# disable for now -- some macos tests ('file backend') are flaky
# strict = true

addopts =
# prevent pytest cache from being created... it craps into project dir and I never use it anyway
-p no:cacheprovider
Expand Down
16 changes: 8 additions & 8 deletions src/cachew/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -278,11 +278,11 @@ def cachew_error(e: Exception, *, logger: logging.Logger) -> None:
@doublewrap
def cachew_impl[**P](
func=None, # TODO should probably type it after switch to python 3.10/proper paramspec
cache_path: PathProvider[P] | None = use_default_path, # ty: ignore[too-many-positional-arguments] # see https://github.com/astral-sh/ty/issues/157
cache_path: PathProvider[P] | None = use_default_path,
*,
force_file: bool = False,
cls: type | tuple[Kind, type] | None = None,
depends_on: HashFunction[P] = default_hash, # ty: ignore[too-many-positional-arguments]
depends_on: HashFunction[P] = default_hash,
logger: logging.Logger | None = None,
chunk_by: int = 100,
# NOTE: allowed values for chunk_by depend on the system.
Expand Down Expand Up @@ -435,7 +435,7 @@ def _func(*args, **kwargs):
@functools.wraps(func)
def binder(*args, **kwargs):
kwargs['_cachew_context'] = ctx
res = cachew_wrapper(*args, **kwargs) # ty: ignore[missing-argument]
res = cachew_wrapper(*args, **kwargs)

if use_kind == 'single':
lres = list(res)
Expand All @@ -457,11 +457,11 @@ def cachew[F: Callable](fun: F) -> F: ...
# but at least it works for checking that cachew_path and depdns_on have the same args :shrug:
@overload
def cachew[F, **P](
cache_path: PathProvider[P] | None = ..., # ty: ignore[too-many-positional-arguments]
cache_path: PathProvider[P] | None = ...,
*,
force_file: bool = ...,
cls: type | tuple[Kind, type] | None = ...,
depends_on: HashFunction[P] = ..., # ty: ignore[too-many-positional-arguments]
depends_on: HashFunction[P] = ...,
logger: logging.Logger | None = ...,
chunk_by: int = ...,
synthetic_key: str | None = ...,
Expand Down Expand Up @@ -568,10 +568,10 @@ def _module_is_disabled(module_name: str, logger: logging.Logger) -> bool:
class Context[**P]:
# fmt: off
func : Callable
cache_path : PathProvider[P] # ty: ignore[too-many-positional-arguments]
cache_path : PathProvider[P]
force_file : bool
cls_ : type
depends_on : HashFunction[P] # ty: ignore[too-many-positional-arguments]
depends_on : HashFunction[P]
logger : logging.Logger
chunk_by : int
synthetic_key: str | None
Expand Down Expand Up @@ -612,7 +612,7 @@ def composite_hash(self, *args, **kwargs) -> dict[str, Any]:

def cachew_wrapper[**P](
*args,
_cachew_context: Context[P], # ty: ignore[too-many-positional-arguments]
_cachew_context: Context[P],
**kwargs,
):
C = _cachew_context
Expand Down
4 changes: 2 additions & 2 deletions src/cachew/legacy.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def get_union_args(cls) -> Optional[tuple[type]]:
args = cls.__args__
args = tuple(e for e in args if e is not type(None))
assert len(args) > 0
return args
return args # ty: ignore[invalid-return-type]


def is_union(cls) -> bool:
Expand Down Expand Up @@ -313,7 +313,7 @@ class NTBinder(Generic[NT]):

@staticmethod
def make(tp: type[NT], name: Optional[str] = None) -> 'NTBinder[NT]':
tp, optional = strip_optional(tp)
tp, optional = strip_optional(tp) # ty: ignore[invalid-assignment]
union: Optional[type]
fields: tuple[Any, ...]
primitive: bool
Expand Down
4 changes: 2 additions & 2 deletions src/cachew/tests/marshall.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ def union_hook(data, type_):
jsons: list[Json] = [None for _ in range(count)]
with profile(test_name + ':serialize'), timer(f'serializing {count} objects of type {Type}'):
for i in range(count):
jsons[i] = to_json(objects[i])
jsons[i] = to_json(objects[i]) # ty: ignore[invalid-assignment]

strs: list[bytes] = [None for _ in range(count)] # type: ignore[misc]
with profile(test_name + ':json_dump'), timer(f'json dump {count} objects of type {Type}'):
Expand Down Expand Up @@ -165,7 +165,7 @@ def union_hook(data, type_):
objects2 = [None for _ in range(count)]
with profile(test_name + ':deserialize'), timer(f'deserializing {count} objects of type {Type}'):
for i in range(count):
objects2[i] = from_json(jsons2[i])
objects2[i] = from_json(jsons2[i]) # ty: ignore[invalid-argument-type]

assert objects[:100] + objects[-100:] == objects2[:100] + objects2[-100:]

Expand Down
9 changes: 6 additions & 3 deletions tox.ini
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
[tox]
minversion = 3.21
minversion = 4

# relies on the correct version of Python installed
# (we rely on CI for the test matrix)
envlist = ruff,tests,mypy,ty

# https://github.com/tox-dev/tox/issues/20#issuecomment-247788333
# hack to prevent .tox from crapping to the project directory
toxworkdir = {env:TOXWORKDIR_BASE:}{toxinidir}/.tox
Expand All @@ -23,8 +26,8 @@ set_env =
# generally this is more robust and safer, prevents weird issues later on
PYTHONSAFEPATH=1

# default is 'editable', in which tox builds wheel first for some reason? not sure if makes much sense
package = uv-editable
runner = uv-venv-lock-runner
uv_sync_locked = false


[testenv:ruff]
Expand Down