Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
69 changes: 26 additions & 43 deletions .ci/release
Original file line number Diff line number Diff line change
@@ -1,65 +1,48 @@
#!/usr/bin/env python3
'''
Run [[file:scripts/release][scripts/release]] to deploy Python package onto [[https://pypi.org][PyPi]] and [[https://test.pypi.org][test PyPi]].
Deploys Python package onto [[https://pypi.org][PyPi]] or [[https://test.pypi.org][test PyPi]].

The script expects =TWINE_PASSWORD= environment variable to contain the [[https://pypi.org/help/#apitoken][PyPi token]] (not the password!).
- running manually

The script can be run manually.
It's also running as =pypi= job in [[file:.github/workflows/main.yml][Github Actions config]]. Packages are deployed on:
- every master commit, onto test pypi
- every new tag, onto production pypi
You'll need =UV_PUBLISH_TOKEN= env variable

You'll need to set =TWINE_PASSWORD= and =TWINE_PASSWORD_TEST= in [[https://help.github.com/en/actions/configuring-and-managing-workflows/creating-and-storing-encrypted-secrets#creating-encrypted-secrets][secrets]]
for Github Actions deployment to work.
- running on Github Actions

Instead of env variable, relies on configuring github as Trusted publisher (https://docs.pypi.org/trusted-publishers/) -- both for test and regular pypi

It's running as =pypi= job in [[file:.github/workflows/main.yml][Github Actions config]].
Packages are deployed on:
- every master commit, onto test pypi
- every new tag, onto production pypi
'''

UV_PUBLISH_TOKEN = 'UV_PUBLISH_TOKEN'

import argparse
import os
import sys
from pathlib import Path
from subprocess import check_call
import shutil

is_ci = os.environ.get('CI') is not None


def main() -> None:
import argparse
p = argparse.ArgumentParser()
p.add_argument('--test', action='store_true', help='use test pypi')
p.add_argument('--use-test-pypi', action='store_true')
args = p.parse_args()

extra = []
if args.test:
extra.extend(['--repository', 'testpypi'])
publish_url = ['--publish-url', 'https://test.pypi.org/legacy/'] if args.use_test_pypi else []

root = Path(__file__).absolute().parent.parent
os.chdir(root) # just in case

if is_ci:
# see https://github.com/actions/checkout/issues/217
check_call('git fetch --prune --unshallow'.split())

dist = root / 'dist'
if dist.exists():
shutil.rmtree(dist)

check_call(['python3', '-m', 'build'])

TP = 'TWINE_PASSWORD'
password = os.environ.get(TP)
if password is None:
print(f"WARNING: no {TP} passed", file=sys.stderr)
import pip_secrets
password = pip_secrets.token_test if args.test else pip_secrets.token # meh

check_call([
'python3', '-m', 'twine',
'upload', *dist.iterdir(),
*extra,
], env={
'TWINE_USERNAME': '__token__',
TP: password,
**os.environ,
})
os.chdir(root) # just in case

check_call(['uv', 'build', '--clear'])

if not is_ci:
# CI relies on trusted publishers so doesn't need env variable
assert UV_PUBLISH_TOKEN in os.environ, f'no {UV_PUBLISH_TOKEN} passed'

check_call(['uv', 'publish', *publish_url])


if __name__ == '__main__':
Expand Down
56 changes: 0 additions & 56 deletions .ci/release-uv

This file was deleted.

8 changes: 4 additions & 4 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ jobs:
# ugh https://github.com/actions/toolkit/blob/main/docs/commands.md#path-manipulation
- run: echo "$HOME/.local/bin" >> $GITHUB_PATH

- uses: actions/checkout@v5
- uses: actions/checkout@v6
with:
submodules: recursive
fetch-depth: 0 # nicer to have all git history when debugging/for tests
Expand Down Expand Up @@ -98,7 +98,7 @@ jobs:
# ugh https://github.com/actions/toolkit/blob/main/docs/commands.md#path-manipulation
- run: echo "$HOME/.local/bin" >> $GITHUB_PATH

- uses: actions/checkout@v5
- uses: actions/checkout@v6
with:
submodules: recursive
fetch-depth: 0 # pull all commits to correctly infer vcs version
Expand All @@ -114,9 +114,9 @@ jobs:
- name: 'release to test pypi'
# always deploy merged master to test pypi
if: github.event.ref == format('refs/heads/{0}', github.event.repository.master_branch)
run: .ci/release-uv --use-test-pypi
run: .ci/release --use-test-pypi

- name: 'release to prod pypi'
# always deploy tags to release pypi
if: startsWith(github.event.ref, 'refs/tags/')
run: .ci/release-uv
run: .ci/release
57 changes: 0 additions & 57 deletions conftest.py

This file was deleted.

12 changes: 2 additions & 10 deletions demo.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,16 +23,8 @@ def run() -> None:
ignore=ignore_patterns('.tox*'), # tox dir might have broken symlinks while tests are running in parallel
)

# 2. prepare repositories you'd be using. For this demo we only set up Hypothesis
tox = 'TOX' in os.environ
if tox: # tox doesn't like --user flag
check_call(f'{python} -m pip install git+https://github.com/karlicoss/hypexport.git'.split())
else:
try:
import hypexport # noqa: F401
except ModuleNotFoundError:
check_call(f'{python} -m pip --user git+https://github.com/karlicoss/hypexport.git'.split())

# 2. setup modules you'd be using. For this demo we only set up Hypothesis
check_call(f'{python} -m my.core module install my.hypothesis'.split())

# 3. prepare some demo Hypothesis data
hypothesis_backups = Path('backups/hypothesis').resolve()
Expand Down
6 changes: 3 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ optional = [
# On the other hand, it's a bit annoying that it's always included by default?
# To make sure it's not included, need to use `uv run --exact --no-default-groups ...`
testing = [
"pytest",
"pytest>=9", # need version 9 for proper namespace package support
"ruff",

# used in some tests.. although shouldn't rely on it
Expand All @@ -64,8 +64,8 @@ testing = [
typecheck = [
{ include-group = "testing" },
"mypy",
"lxml", # for mypy coverage
"ty>=0.0.1a22",
"lxml", # for mypy html coverage
"ty==0.0.1a35",

"HPI[optional]",
"orgparse", # for my.core.orgmode
Expand Down
5 changes: 4 additions & 1 deletion pytest.ini
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,12 @@
# discover files that don't follow test_ naming. Useful to keep tests along with the source code
python_files = *.py

# this setting only impacts package/module naming under pytest, not the discovery
# this is necessary for --pyargs to discover implicit namespace packages correctly
consider_namespace_packages = true

# see https://docs.pytest.org/en/stable/reference/reference.html#confval-strict
strict = true

addopts =
# prevent pytest cache from being created... it craps into project dir and I never use it anyway
-p no:cacheprovider
Expand Down
2 changes: 1 addition & 1 deletion src/my/coding/commits.py
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,7 @@ def _cached_commits_path(p: Path) -> Path | str:

# per-repo commits, to use cachew
@mcachew(
depends_on=_repo_depends_on,
depends_on=_repo_depends_on, # ty: ignore[invalid-argument-type] # not sure why? possibly a bug
logger=log,
cache_path=_cached_commits_path, # type: ignore[arg-type] # hmm mypy seems confused here? likely a but in type + paramspec handling...
)
Expand Down
2 changes: 1 addition & 1 deletion src/my/core/_deprecated/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,4 @@ def connect_readonly(db: PathIsh):
# todo not sure if mode=ro has any benefit, but it doesn't work on read-only filesystems
# maybe it should autodetect readonly filesystems and apply this? not sure
creator = lambda: sqlite_connect_immutable(db)
return dataset.connect('sqlite:///', engine_kwargs={'creator': creator})
return dataset.connect('sqlite:///', engine_kwargs={'creator': creator}) # ty: ignore[unresolved-attribute]
4 changes: 2 additions & 2 deletions src/my/core/cachew.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,11 +132,11 @@ def mcachew[F: Callable](fun: F) -> F: ...

@overload
def mcachew[F, **P](
cache_path: PathProvider[P] | None = ..., # ty: ignore[too-many-positional-arguments]
cache_path: PathProvider[P] | None = ...,
*,
force_file: bool = ...,
cls: type | None = ...,
depends_on: HashFunction[P] = ..., # ty: ignore[too-many-positional-arguments]
depends_on: HashFunction[P] = ...,
logger: logging.Logger | None = ...,
chunk_by: int = ...,
synthetic_key: str | None = ...,
Expand Down
5 changes: 3 additions & 2 deletions src/my/core/cfg.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import sys
from collections.abc import Callable, Iterator
from contextlib import ExitStack, contextmanager
from typing import Any
from typing import Any, cast

type Attrs = dict[str, Any]

Expand Down Expand Up @@ -90,7 +90,8 @@ def tmp_config(*, modules: ModuleRegex | None = None, config=None):

import my.config

with ExitStack() as module_reload_stack, _override_config(my.config) as new_config:
_config = cast(Any, my.config) # cast since ty doens't like module here (mypy infers ModuleType anyway)
with ExitStack() as module_reload_stack, _override_config(_config) as new_config:
if config is not None:
overrides = {k: v for k, v in vars(config).items() if not k.startswith('__')}
for k, v in overrides.items():
Expand Down
2 changes: 1 addition & 1 deletion src/my/core/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ def prop(cls) -> str:
return 'hello'

res = C.prop
assert_type(res, str) # ty: ignore[type-assertion-failure]
assert_type(res, str)
assert res == 'hello'


Expand Down
2 changes: 1 addition & 1 deletion src/my/core/core_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ def matches(specs: Sequence[str]) -> str | None:
def _reset_config() -> Iterator[Config]:
# todo maybe have this decorator for the whole of my.config?
from .cfg import _override_config
with _override_config(config) as cc:
with _override_config(config) as cc: # ty: ignore[invalid-argument-type]
cc.enabled_modules = None
cc.disabled_modules = None
cc.cache_dir = None
Expand Down
1 change: 0 additions & 1 deletion src/my/core/influxdb.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,6 @@ def dit() -> Iterable[Json]:
'fields': fields,
}


# "The optimal batch size is 5000 lines of line protocol."
# some chunking is def necessary, otherwise it fails
inserted = 0
Expand Down
4 changes: 3 additions & 1 deletion src/my/core/kompress.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
from . import warnings

warnings.high('my.core.kompress is deprecated. Install and use "kompress" library directly in your module (see https://github.com/karlicoss/kompress )')
warnings.high(
'my.core.kompress is deprecated. Install and use "kompress" library directly in your module (see https://github.com/karlicoss/kompress )'
)

from typing import TYPE_CHECKING

Expand Down
2 changes: 1 addition & 1 deletion src/my/core/orgmode.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ def parse_org_datetime(s: str) -> datetime:
# todo not sure about these... fallback on 00:00?
# ("%Y-%m-%d %a" , date),
# ("%Y-%m-%d" , date),
]:
]: # fmt: skip
try:
return datetime.strptime(s, fmt)
except ValueError:
Expand Down
4 changes: 2 additions & 2 deletions src/my/core/pandas.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,10 @@

if TYPE_CHECKING:
import pandas as pd
from pandas._typing import S1 # meh

type DataFrameT = pd.DataFrame
type SeriesT[T] = pd.Series[T]
from pandas._typing import S1 # meh
SeriesT = pd.Series

# huh interesting -- with from __future__ import annotations don't even need else clause here?
# but still if other modules import these we do need some fake runtime types here..
Expand Down
Loading