diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
new file mode 100644
index 0000000..44fb911
--- /dev/null
+++ b/.github/workflows/lint.yml
@@ -0,0 +1,24 @@
+name: lint
+on:
+ pull_request:
+ types: ['opened', 'edited', 'reopened', 'synchronize']
+jobs:
+ lint:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ with:
+ ref: ${{ github.event.pull_request.head.sha }}
+
+ - uses: actions/setup-python@v4
+ name: Install Python
+ with:
+ python-version: '3.10'
+
+ - name: Install Dependencies
+ run: pip install black isort
+
+ - name: Lint Python Code
+ run: |
+ black --check pynixify
+ isort --check pynixify
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 08bc33b..36ba0ce 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -1,6 +1,5 @@
name: "Test"
on:
- pull_request:
push:
schedule:
- cron: '5 19 * * 5' # At 19:05 on Friday
diff --git a/pynixify/base.py b/pynixify/base.py
index 494f98d..49775aa 100644
--- a/pynixify/base.py
+++ b/pynixify/base.py
@@ -15,16 +15,21 @@
# along with this program. If not, see .
import json
-from pathlib import Path
from dataclasses import dataclass
-from typing import Optional, Dict
-from packaging.version import Version, LegacyVersion, parse as parse_original
+from pathlib import Path
+from typing import Dict, Optional
+
+from packaging.version import LegacyVersion, Version
+from packaging.version import parse as parse_original
+
@dataclass
class PackageMetadata:
description: Optional[str]
license: Optional[str]
url: Optional[str]
+ _fmt: Optional[str] = "pyproject"
+
@dataclass
class Package:
@@ -38,9 +43,10 @@ def attr(self) -> str:
raise NotImplementedError()
async def metadata(self) -> PackageMetadata:
- from pynixify.package_requirements import run_nix_build, NixBuildError
+ from pynixify.package_requirements import NixBuildError, run_nix_build
+
source = await self.source()
- if source.name.endswith('.whl'):
+ if source.name.endswith(".whl"):
# Some nixpkgs packages use a wheel as source, which don't have a
# setup.py file. For now, ignore them assume they have no metadata
return PackageMetadata(
@@ -52,23 +58,23 @@ async def metadata(self) -> PackageMetadata:
assert nix_expression_path.exists()
nix_store_path = await run_nix_build(
str(nix_expression_path),
- '--no-out-link',
- '--no-build-output',
- '--arg',
- 'file',
- str(source.resolve())
+ "--no-out-link",
+ "--no-build-output",
+ "--arg",
+ "file",
+ str(source.resolve()),
)
- if (nix_store_path / 'failed').exists():
- print(f'Error parsing metadata of {source}. Assuming it has no metadata.')
+ if (nix_store_path / "failed").exists():
+ print(f"Error parsing metadata of {source}. Assuming it has no metadata.")
return PackageMetadata(
url=None,
description=None,
license=None,
)
- with (nix_store_path / 'meta.json').open() as fp:
+ with (nix_store_path / "meta.json").open() as fp:
metadata = json.load(fp)
try:
- version: Optional[str] = metadata.pop('version')
+ version: Optional[str] = metadata.pop("version")
except KeyError:
pass
else:
@@ -78,6 +84,7 @@ async def metadata(self) -> PackageMetadata:
self.version = Version(version)
return PackageMetadata(**metadata)
+
# mypy hack
def parse_version(version: str) -> Version:
v = parse_original(version)
diff --git a/pynixify/command.py b/pynixify/command.py
index cfcc6b8..b2e0c9d 100644
--- a/pynixify/command.py
+++ b/pynixify/command.py
@@ -14,63 +14,54 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
-import re
-import os
-import asyncio
import argparse
+import asyncio
+import re
from pathlib import Path
+from typing import Dict, List, Optional, Tuple
from urllib.parse import urlparse
-from typing import List, Dict, Optional, Tuple
+
+from packaging.requirements import Requirement
+from packaging.utils import canonicalize_name
from pkg_resources import parse_requirements
+
import pynixify.nixpkgs_sources
from pynixify.base import Package
-from pynixify.nixpkgs_sources import (
- NixpkgsData,
- load_nixpkgs_data,
- set_max_jobs,
-)
-from pynixify.pypi_api import (
- PyPICache,
- PyPIData,
-)
-from pynixify.version_chooser import (
- VersionChooser,
- ChosenPackageRequirements,
- evaluate_package_requirements,
-)
-from pynixify.expression_builder import (
- build_nix_expression,
- build_overlayed_nixpkgs,
- build_overlay_expr,
- build_shell_nix_expression,
- nixfmt,
-)
-from pynixify.pypi_api import (
- PyPIPackage,
- get_path_hash,
-)
-from packaging.requirements import Requirement
-from packaging.utils import canonicalize_name
+from pynixify.expression_builder import build_nix_expression # noqa
+from pynixify.expression_builder import build_overlay_expr # noqa
+from pynixify.expression_builder import build_overlayed_nixpkgs # noqa
+from pynixify.expression_builder import build_shell_nix_expression # noqa
+from pynixify.expression_builder import nixfmt # noqa; noqa
+from pynixify.nixpkgs_sources import set_max_jobs # noqa
+from pynixify.nixpkgs_sources import NixpkgsData, load_nixpkgs_data # noqa
+from pynixify.pypi_api import get_path_hash # noqa
+from pynixify.pypi_api import PyPICache, PyPIData, PyPIPackage # noqa
+from pynixify.version_chooser import ChosenPackageRequirements # noqa
+from pynixify.version_chooser import VersionChooser # noqa
+from pynixify.version_chooser import evaluate_package_requirements # noqa
async def _build_version_chooser(
- load_test_requirements_for: List[str],
- ignore_test_requirements_for: List[str],
- load_all_test_requirements: bool) -> VersionChooser:
+ load_test_requirements_for: List[str],
+ ignore_test_requirements_for: List[str],
+ load_all_test_requirements: bool,
+) -> VersionChooser:
nixpkgs_data = NixpkgsData(await load_nixpkgs_data({}))
pypi_cache = PyPICache()
pypi_data = PyPIData(pypi_cache)
+
def should_load_tests(package_name):
if canonicalize_name(package_name) in [
- canonicalize_name(n)
- for n in ignore_test_requirements_for
- ]:
+ canonicalize_name(n) for n in ignore_test_requirements_for
+ ]:
return False
return load_all_test_requirements or canonicalize_name(package_name) in [
- canonicalize_name(n)
- for n in load_test_requirements_for]
+ canonicalize_name(n) for n in load_test_requirements_for
+ ]
+
version_chooser = VersionChooser(
- nixpkgs_data, pypi_data,
+ nixpkgs_data,
+ pypi_data,
req_evaluate=evaluate_package_requirements,
should_load_tests=should_load_tests,
)
@@ -79,106 +70,131 @@ def should_load_tests(package_name):
def main():
parser = argparse.ArgumentParser(
- description=(
- 'Nix expression generator for Python packages.'
- ))
- parser.add_argument('requirement', nargs='*')
+ description=("Nix expression generator for Python packages.")
+ )
+ parser.add_argument("requirement", nargs="*")
parser.add_argument(
- '-l', '--local',
- metavar='NAME',
+ "-l",
+ "--local",
+ metavar="NAME",
help=(
'Create a "python.pkgs.NAME" derivation using the current '
- 'directory as source. Useful for packaging projects with a '
- 'setup.py.'
- ))
+ "directory as source. Useful for packaging projects with a "
+ "setup.py."
+ ),
+ )
parser.add_argument(
- '--nixpkgs',
+ "--nixpkgs",
help=(
- 'URL to a tarball containing the nixpkgs source. When specified, '
- 'the generated expressions will use it instead of , '
- 'improving reproducibility.'
- ))
+ "URL to a tarball containing the nixpkgs source. When specified, "
+ "the generated expressions will use it instead of , "
+ "improving reproducibility."
+ ),
+ )
parser.add_argument(
- '-o', '--output',
- metavar='DIR',
- default='pynixify/',
+ "-o",
+ "--output",
+ metavar="DIR",
+ default="pynixify/",
help=(
"Directory in which pynixify will save the generated Nix "
"expressions. If if doesn't exist, it will be automatically "
"created. [default. pynixify/]"
- ))
+ ),
+ )
parser.add_argument(
- '-O', '--overlay-only',
- action='store_true',
- help=(
- "Generate only overlay expresion."
- ))
+ "-O",
+ "--overlay-only",
+ action="store_true",
+ help=("Generate only overlay expresion."),
+ )
parser.add_argument(
- '--all-tests',
- action='store_true',
+ "--all-tests",
+ action="store_true",
help=(
"Include test requirements in all generated expressions, "
"except for those explicitly excluded with --ignore-tests."
- ))
+ ),
+ )
parser.add_argument(
- '--ignore-tests',
- metavar='PACKAGES',
+ "--ignore-tests",
+ metavar="PACKAGES",
help=(
"Comma-separated list of packages for which we don't want "
"their test requirements to be loaded."
- ))
+ ),
+ )
parser.add_argument(
- '--tests',
- metavar='PACKAGES',
+ "--tests",
+ metavar="PACKAGES",
help=(
"Comma-separated list of packages for which we do want "
"their test requirements to be loaded."
- ))
+ ),
+ )
parser.add_argument(
- '-r',
- metavar='REQUIREMENTS_FILE',
- action='append',
+ "-r",
+ metavar="REQUIREMENTS_FILE",
+ action="append",
help=(
"A filename whose content is a PEP 508 compliant list of "
"dependencies. It can be specified multiple times to use more "
"than one file. Note that pip-specific options, such as "
"'-e git+https....' are not supported."
- ))
+ ),
+ )
parser.add_argument(
- '--max-jobs',
+ "--max-jobs",
type=int,
help=(
"Sets the maximum number of concurrent nix-build processes "
"executed by pynixify. If it isn't specified, it will be set to "
"the number of CPUs in the system."
- ))
+ ),
+ )
+ parser.add_argument(
+ "-p",
+ "--py",
+ default="python3",
+ help=(
+ "Name of the nixpkgs python interpreter package to install in the "
+ "generated shell.nix. Defaults to 'python3'."
+ ),
+ )
args = parser.parse_args()
- asyncio.run(_main_async(
- requirements=args.requirement,
- requirement_files=args.r or [],
- local=args.local,
- output_dir=args.output,
- nixpkgs=args.nixpkgs,
- load_all_test_requirements=args.all_tests,
- load_test_requirements_for=args.tests.split(',') if args.tests else [],
- ignore_test_requirements_for=args.ignore_tests.split(',') if args.ignore_tests else [],
- max_jobs=args.max_jobs,
- generate_only_overlay=args.overlay_only,
- ))
+ asyncio.run(
+ _main_async(
+ requirements=args.requirement,
+ requirement_files=args.r or [],
+ local=args.local,
+ output_dir=args.output,
+ nixpkgs=args.nixpkgs,
+ load_all_test_requirements=args.all_tests,
+ load_test_requirements_for=args.tests.split(",") if args.tests else [],
+ ignore_test_requirements_for=args.ignore_tests.split(",")
+ if args.ignore_tests
+ else [],
+ max_jobs=args.max_jobs,
+ generate_only_overlay=args.overlay_only,
+ interpreter=args.py,
+ )
+ )
-async def _main_async(
- requirements: List[str],
- requirement_files: List[str],
- local: Optional[str],
- nixpkgs: Optional[str],
- output_dir: Optional[str],
- load_test_requirements_for: List[str],
- ignore_test_requirements_for: List[str],
- load_all_test_requirements: bool,
- max_jobs: Optional[int],
- generate_only_overlay:bool):
+async def _main_async(
+ requirements: List[str],
+ requirement_files: List[str],
+ local: Optional[str],
+ nixpkgs: Optional[str],
+ output_dir: Optional[str],
+ load_test_requirements_for: List[str],
+ ignore_test_requirements_for: List[str],
+ load_all_test_requirements: bool,
+ max_jobs: Optional[int],
+ generate_only_overlay: bool,
+ interpreter: str,
+):
if nixpkgs is not None:
pynixify.nixpkgs_sources.NIXPKGS_URL = nixpkgs
@@ -186,8 +202,10 @@ async def _main_async(
set_max_jobs(max_jobs)
version_chooser: VersionChooser = await _build_version_chooser(
- load_test_requirements_for, ignore_test_requirements_for,
- load_all_test_requirements)
+ load_test_requirements_for,
+ ignore_test_requirements_for,
+ load_all_test_requirements,
+ )
if local is not None:
await version_chooser.require_local(local, Path.cwd())
@@ -201,14 +219,11 @@ async def _main_async(
for req_ in requirements:
all_requirements.append(Requirement(req_))
- await asyncio.gather(*(
- version_chooser.require(req)
- for req in all_requirements
- ))
+ await asyncio.gather(*(version_chooser.require(req) for req in all_requirements))
- output_dir = output_dir or 'pynixify'
+ output_dir = output_dir or "pynixify"
base_path = Path.cwd() / output_dir
- packages_path = base_path / 'packages'
+ packages_path = base_path / "packages"
packages_path.mkdir(parents=True, exist_ok=True)
overlays: Dict[str, Path] = {}
@@ -226,37 +241,36 @@ async def write_package_expression(package: PyPIPackage):
meta = await package.metadata()
try:
(pname, ext) = await get_pypi_data(
- package.download_url,
- str(package.version),
- sha256
+ package.download_url, str(package.version), sha256
)
except RuntimeError:
- expr = build_nix_expression(
- package, reqs, meta, sha256)
+ expr = build_nix_expression(package, reqs, meta, sha256)
else:
expr = build_nix_expression(
- package, reqs, meta, sha256, fetchPypi=(pname, ext))
- expression_dir = (packages_path / f'{package.pypi_name}/')
+ package, reqs, meta, sha256, fetchPypi=(pname, ext)
+ )
+ expression_dir = packages_path / f"{package.pypi_name}/"
expression_dir.mkdir(exist_ok=True)
- expression_path = expression_dir / 'default.nix'
- with expression_path.open('w') as fp:
+ expression_path = expression_dir / "default.nix"
+ with expression_path.open("w") as fp:
fp.write(await nixfmt(expr))
expression_path = expression_path.relative_to(base_path)
overlays[package.attr] = expression_path
- await asyncio.gather(*(
- write_package_expression(package)
- for package in version_chooser.all_pypi_packages()
- ))
+ await asyncio.gather(
+ *(
+ write_package_expression(package)
+ for package in version_chooser.all_pypi_packages()
+ )
+ )
if generate_only_overlay:
- with (base_path / 'overlay.nix').open('w') as fp:
+ with (base_path / "overlay.nix").open("w") as fp:
expr = build_overlay_expr(overlays)
fp.write(await nixfmt(expr))
return
-
- with (base_path / 'nixpkgs.nix').open('w') as fp:
+ with (base_path / "nixpkgs.nix").open("w") as fp:
if nixpkgs is None:
expr = build_overlayed_nixpkgs(overlays)
else:
@@ -274,15 +288,15 @@ async def write_package_expression(package: PyPIPackage):
assert p is not None
packages.append(p)
- with (base_path / 'shell.nix').open('w') as fp:
- expr = build_shell_nix_expression(packages)
+ with (base_path / "shell.nix").open("w") as fp:
+ expr = build_shell_nix_expression(packages, interpreter)
fp.write(await nixfmt(expr))
async def get_url_hash(url: str, unpack=True) -> str:
- cmd = ['nix-prefetch-url']
+ cmd = ["nix-prefetch-url"]
if unpack:
- cmd.append('--unpack')
+ cmd.append("--unpack")
cmd.append(url)
proc = await asyncio.create_subprocess_exec(
@@ -293,7 +307,7 @@ async def get_url_hash(url: str, unpack=True) -> str:
(stdout, _) = await proc.communicate()
status = await proc.wait()
if status != 0:
- raise RuntimeError(f'Could not get hash of URL: {url}')
+ raise RuntimeError(f"Could not get hash of URL: {url}")
return stdout.decode().strip()
@@ -306,21 +320,19 @@ async def get_pypi_data(url: str, version: str, sha256: str) -> Tuple[str, str]:
builtins.fetchurl, so our generated expression should do it too.
"""
filename = Path(urlparse(url).path).name
- match = re.match(
- f'(?P.+)-{re.escape(version)}\\.(?P.+)',
- filename
- )
+ match = re.match(f"(?P.+)-{re.escape(version)}\\.(?P.+)", filename)
if match is None:
- raise RuntimeError(f'Cannot build mirror://pypi URL from original URL: {url}')
+ raise RuntimeError(f"Cannot build mirror://pypi URL from original URL: {url}")
- pname, ext = match.group('pname'), match.group('ext')
+ pname, ext = match.group("pname"), match.group("ext")
# See /pkgs/development/python-modules/ansiwrap/default.nix
# "mirror://pypi/${builtins.substring 0 1 pname}/${pname}/${pname}-${version}.${extension}";
- url = f'mirror://pypi/{pname[0]}/{pname}/{pname}-{version}.{ext}'
+ url = f"mirror://pypi/{pname[0]}/{pname}/{pname}-{version}.{ext}"
newhash = await get_url_hash(url, unpack=False)
if newhash != sha256:
- raise RuntimeError(f'Invalid hash for URL: {url}')
+ raise RuntimeError(f"Invalid hash for URL: {url}")
return (pname, ext)
-if __name__ == '__main__':
+
+if __name__ == "__main__":
main()
diff --git a/pynixify/data/flitcore_patch.diff b/pynixify/data/flitcore_patch.diff
new file mode 100644
index 0000000..c703f33
--- /dev/null
+++ b/pynixify/data/flitcore_patch.diff
@@ -0,0 +1,40 @@
+diff --git a/flit_core/flit_core/buildapi.py b/flit_core/flit_core/buildapi.py
+index 963bf61..5190b7e 100644
+--- a/flit_core/flit_core/buildapi.py
++++ b/flit_core/flit_core/buildapi.py
+@@ -3,6 +3,7 @@ import logging
+ import io
+ import os
+ import os.path as osp
++import sys
+ from pathlib import Path
+
+ from .common import (
+@@ -13,6 +14,19 @@ from .config import read_flit_config
+ from .wheel import make_wheel_in, _write_wheel_file
+ from .sdist import SdistBuilder
+
++def _write_pynixify_files(config_settings, deps):
++ if config_settings is not None and "PYNIXIFY_OUT" in config_settings:
++ from pathlib import Path
++ import json
++ pynix_out = Path(config_settings['PYNIXIFY_OUT'])
++ for target in ("tests", "setup", "install"):
++ fp = (pynix_out / ("%s_requires.txt" % target)).open("w")
++ fp.write('\n'.join([str(req) for req in deps]))
++ fp.write('\nflit_core')
++ fp.close()
++ with (pynix_out / 'meta.json').open('w') as fp:
++ json.dump({"version": None, "url": None, "license": None, "description": None}, fp)
++
+ log = logging.getLogger(__name__)
+
+ # PEP 517 specifies that the CWD will always be the source tree
+@@ -70,6 +84,7 @@ prepare_metadata_for_build_editable = prepare_metadata_for_build_wheel
+ def build_wheel(wheel_directory, config_settings=None, metadata_directory=None):
+ """Builds a wheel, places it in wheel_directory"""
+ info = make_wheel_in(pyproj_toml, Path(wheel_directory))
++ _write_pynixify_files(config_settings, [])
+ return info.file.name
+
+ def build_editable(wheel_directory, config_settings=None, metadata_directory=None):
diff --git a/pynixify/data/hatchling_patch.diff b/pynixify/data/hatchling_patch.diff
new file mode 100644
index 0000000..bb1f3a4
--- /dev/null
+++ b/pynixify/data/hatchling_patch.diff
@@ -0,0 +1,88 @@
+diff --git a/src/hatchling/build.py b/src/hatchling/build.py
+index d79c1e2e..c85a837e 100644
+--- a/src/hatchling/build.py
++++ b/src/hatchling/build.py
+@@ -1,6 +1,20 @@
+ import os
+
+
++def _write_pynixify_files(config_settings, deps):
++ if config_settings is not None and "PYNIXIFY_OUT" in config_settings:
++ from pathlib import Path
++ import json
++ pynix_out = Path(config_settings['PYNIXIFY_OUT'])
++ for target in ("tests", "setup", "install"):
++ fp = (pynix_out / ("%s_requires.txt" % target)).open("w")
++ fp.write('\n'.join([str(req) for req in deps]))
++ fp.write('\nhatchling\nhatch-vcs')
++ fp.close()
++ with (pynix_out / 'meta.json').open('w') as fp:
++ json.dump({"version": None, "url": None, "license": None, "description": None}, fp)
++
++
+ def get_requires_for_build_sdist(config_settings=None):
+ """
+ https://peps.python.org/pep-0517/#get-requires-for-build-sdist
+@@ -8,6 +22,7 @@ def get_requires_for_build_sdist(config_settings=None):
+ from hatchling.builders.sdist import SdistBuilder
+
+ builder = SdistBuilder(os.getcwd())
++ _write_pynixify_files(config_settings, builder.config.dependencies)
+ return builder.config.dependencies
+
+
+@@ -18,6 +33,7 @@ def build_sdist(sdist_directory, config_settings=None):
+ from hatchling.builders.sdist import SdistBuilder
+
+ builder = SdistBuilder(os.getcwd())
++ _write_pynixify_files(config_settings, builder.config.dependencies)
+ return os.path.basename(next(builder.build(sdist_directory, ['standard'])))
+
+
+@@ -28,6 +44,7 @@ def get_requires_for_build_wheel(config_settings=None):
+ from hatchling.builders.wheel import WheelBuilder
+
+ builder = WheelBuilder(os.getcwd())
++ _write_pynixify_files(config_settings, builder.config.dependencies)
+ return builder.config.dependencies
+
+
+@@ -38,6 +55,7 @@ def build_wheel(wheel_directory, config_settings=None, metadata_directory=None):
+ from hatchling.builders.wheel import WheelBuilder
+
+ builder = WheelBuilder(os.getcwd())
++ _write_pynixify_files(config_settings, builder.config.dependencies)
+ return os.path.basename(next(builder.build(wheel_directory, ['standard'])))
+
+
+@@ -48,6 +66,7 @@ def get_requires_for_build_editable(config_settings=None):
+ from hatchling.builders.wheel import WheelBuilder
+
+ builder = WheelBuilder(os.getcwd())
++ _write_pynixify_files(config_settings, builder.config.dependencies)
+ return builder.config.dependencies
+
+
+@@ -58,6 +77,7 @@ def build_editable(wheel_directory, config_settings=None, metadata_directory=Non
+ from hatchling.builders.wheel import WheelBuilder
+
+ builder = WheelBuilder(os.getcwd())
++ _write_pynixify_files(config_settings, builder.config.dependencies)
+ return os.path.basename(next(builder.build(wheel_directory, ['editable'])))
+
+
+@@ -89,6 +109,7 @@ if 'PIP_BUILD_TRACKER' not in os.environ:
+
+ with open(os.path.join(directory, 'METADATA'), 'w', encoding='utf-8') as f:
+ f.write(builder.config.core_metadata_constructor(builder.metadata))
++ _write_pynixify_files(config_settings, builder.config.dependencies)
+
+ return os.path.basename(directory)
+
+@@ -110,5 +131,6 @@ if 'PIP_BUILD_TRACKER' not in os.environ:
+
+ with open(os.path.join(directory, 'METADATA'), 'w', encoding='utf-8') as f:
+ f.write(builder.config.core_metadata_constructor(builder.metadata, extra_dependencies=extra_dependencies))
++ _write_pynixify_files(config_settings, builder.config.dependencies)
+
+ return os.path.basename(directory)
diff --git a/pynixify/data/parse_setuppy_data.nix b/pynixify/data/parse_setuppy_data.nix
index b2194c0..11b798b 100644
--- a/pynixify/data/parse_setuppy_data.nix
+++ b/pynixify/data/parse_setuppy_data.nix
@@ -1,43 +1,133 @@
-{ file, stdenv ? (import { }).stdenv, lib ? (import { }).lib
-, unzip ? (import { }).unzip, python ? (import { }).python3
-}:
+{ file, pkgs ? import (builtins.fetchGit {
+ name = "nixos-22.11";
+ url = "https://github.com/nixos/nixpkgs/";
+ # `git ls-remote https://github.com/nixos/nixpkgs nixos-unstable`
+ ref = "refs/heads/nixos-22.11";
+ rev = "6c591e7adc514090a77209f56c9d0c551ab8530d";
+}) { } }:
let
removeExt = fileName: builtins.elemAt (builtins.split "\\." fileName) 0;
- patchedSetuptools = python.pkgs.setuptools.overrideAttrs (ps: {
- # src = (import {}).lib.cleanSource ./setuptools;
-
+ patchedSetuptools = pkgs.python3.pkgs.setuptools.overrideAttrs (ps: {
patches = [ ./setuptools_patch.diff ];
- patchFlags = lib.optionals (lib.versionOlder "61" python.pkgs.setuptools.version) ["--merge" "-p1"];
-
+ patchFlags = pkgs.lib.optionals
+ (pkgs.lib.versionOlder "61" pkgs.python3.pkgs.setuptools.version) [
+ "--merge"
+ "-p1"
+ ];
});
- pythonWithPackages = python.withPackages (ps: [ patchedSetuptools ]);
+ setuptoolsscm = pkgs.python3.pkgs.buildPythonPackage rec {
+ pname = "setuptools-scm";
+ version = "7.0.5";
+
+ src = pkgs.python3.pkgs.fetchPypi {
+ pname = "setuptools_scm";
+ inherit version;
+ sha256 = "sha256-Ax4Tr3cdb4krlBrbbqBFRbv5Hrxc5ox4qvP/9uH7SEQ=";
+ };
+
+ propagatedBuildInputs = [
+ pkgs.python3.pkgs.packaging
+ pkgs.python3.pkgs.typing-extensions
+ pkgs.python3.pkgs.tomli
+ patchedSetuptools
+ ];
+
+ pythonImportsCheck = [ "setuptools_scm" ];
+
+ # check in passthru.tests.pytest to escape infinite recursion on pytest
+ doCheck = false;
+ };
+ hatchling = pkgs.python3.pkgs.hatchling.overrideAttrs
+ (ps: { patches = [ ./hatchling_patch.diff ]; });
+ hatchvcs = pkgs.python3.pkgs.buildPythonPackage rec {
+ pname = "hatch-vcs";
+ version = "0.2.0";
+ format = "pyproject";
+
+ disabled = pkgs.python3.pkgs.pythonOlder "3.7";
+
+ src = pkgs.python3.pkgs.fetchPypi {
+ pname = "hatch_vcs";
+ inherit version;
+ sha256 = "sha256-mRPXM7NO7JuwNF0GJsoyFlpK0t4V0c5kPDbQnKkIq/8=";
+ };
+
+ nativeBuildInputs = [ hatchling ];
+
+ propagatedBuildInputs = [ hatchling setuptoolsscm ];
+
+ checkInputs = [ pkgs.git pkgs.python3.pkgs.pytestCheckHook ];
+
+ disabledTests = [
+ # incompatible with setuptools-scm>=7
+ # https://github.com/ofek/hatch-vcs/issues/8
+ "test_write"
+ ];
+
+ pythonImportsCheck = [ "hatch_vcs" ];
+ };
+ patchedflitcore = pkgs.python3.pkgs.flit-core.overrideAttrs
+ (ps: { patches = [ ./flitcore_patch.diff ]; });
+ flitscm = pkgs.python3.pkgs.buildPythonPackage rec {
+ pname = "flit-scm";
+ version = "1.7.0";
+
+ format = "pyproject";
+
+ src = pkgs.fetchFromGitLab {
+ owner = "WillDaSilva";
+ repo = "flit_scm";
+ rev = version;
+ sha256 = "sha256-K5sH+oHgX/ftvhkY+vIg6wUokAP96YxrTWds3tnEtyg=";
+ leaveDotGit = true;
+ };
+
+ nativeBuildInputs =
+ [ patchedflitcore setuptoolsscm pkgs.python3.pkgs.tomli pkgs.git ];
+ propagatedBuildInputs = [ patchedflitcore setuptoolsscm ]
+ ++ pkgs.lib.optionals (pkgs.python3.pkgs.pythonOlder "3.11")
+ [ pkgs.python3.pkgs.tomli ];
+ };
+
+ pythonWithPackages = pkgs.python3.withPackages (ps: [
+ patchedSetuptools
+ setuptoolsscm
+ hatchling
+ hatchvcs
+ flitscm
+ pkgs.python3.pkgs.pip
+ ]);
cleanSource = src:
- lib.cleanSourceWith {
+ pkgs.lib.cleanSourceWith {
filter = name: type:
- lib.cleanSourceFilter name type && builtins.baseNameOf (toString name)
- != "pynixify";
+ pkgs.lib.cleanSourceFilter name type
+ && builtins.baseNameOf (toString name) != "pynixify";
name = builtins.baseNameOf src;
inherit src;
};
-in stdenv.mkDerivation {
+in pkgs.stdenv.mkDerivation {
name = "setup.py_data_${removeExt (builtins.baseNameOf file)}";
src = cleanSource file;
- nativeBuildInputs = [ unzip ];
- buildInputs = [ pythonWithPackages ];
+ nativeBuildInputs = [ pkgs.unzip ];
+ buildInputs = [ pythonWithPackages pkgs.hatch ];
configurePhase = ''
true # We don't want to execute ./configure
'';
buildPhase = ''
mkdir -p $out
- if ! PYNIXIFY=1 python setup.py install; then
- # Indicate that fetching the result failed, but let the build succeed
- touch $out/failed
+ if PYNIXIFY=1 python setup.py install; then
+ exit 0
+ fi
+ if ${pkgs.python3.pkgs.pip}/bin/pip --no-cache-dir wheel --config-settings PYNIXIFY_OUT=$out --no-build-isolation $PWD; then
+ exit 0
fi
+ # Indicate that fetching the result failed, but let the build succeed
+ touch $out/failed
'';
dontInstall = true;
}
diff --git a/pynixify/data/setuptools_patch.diff b/pynixify/data/setuptools_patch.diff
index a3e4532..59915d0 100644
--- a/pynixify/data/setuptools_patch.diff
+++ b/pynixify/data/setuptools_patch.diff
@@ -2,7 +2,7 @@ diff --git a/setuptools/__init__.py b/setuptools/__init__.py
index 83882511..259effd5 100644
--- a/setuptools/__init__.py
+++ b/setuptools/__init__.py
-@@ -155,14 +155,63 @@ def _install_setup_requires(attrs):
+@@ -155,14 +155,64 @@ def _install_setup_requires(attrs):
# Honor setup.cfg's options.
dist.parse_config_files(ignore_option_errors=True)
@@ -56,6 +56,7 @@ index 83882511..259effd5 100644
+ meta_attrs = {'description', 'url', 'license', 'version'}
+ for meta_attr in meta_attrs:
+ meta[meta_attr] = attrs.get(meta_attr)
++ meta['_fmt'] = 'setuptools'
+ with (out / 'meta.json').open('w') as fp:
+ json.dump(meta, fp)
+ else:
diff --git a/pynixify/exceptions.py b/pynixify/exceptions.py
index 190954f..60240d8 100644
--- a/pynixify/exceptions.py
+++ b/pynixify/exceptions.py
@@ -14,14 +14,18 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
+
class PackageNotFound(Exception):
pass
+
class NoMatchingVersionFound(Exception):
pass
+
class IntegrityError(Exception):
pass
+
class NixBuildError(Exception):
pass
diff --git a/pynixify/expression_builder.py b/pynixify/expression_builder.py
index cbd89d6..c03c6f9 100644
--- a/pynixify/expression_builder.py
+++ b/pynixify/expression_builder.py
@@ -16,14 +16,13 @@
import asyncio
from pathlib import Path
-from typing import Iterable, Mapping, List, Set, Optional, Tuple
+from typing import Iterable, List, Mapping, Optional, Set, Tuple
+
from mako.template import Template
-from pynixify.version_chooser import (
- VersionChooser,
- ChosenPackageRequirements,
-)
-from pynixify.base import PackageMetadata, Package
+
+from pynixify.base import Package, PackageMetadata
from pynixify.pypi_api import PyPIPackage
+from pynixify.version_chooser import ChosenPackageRequirements, VersionChooser
DISCLAIMER = """
# WARNING: This file was automatically generated. You should avoid editing it.
@@ -32,12 +31,14 @@
"""
-expression_template = Template("""${DISCLAIMER}
+expression_template = Template(
+ """${DISCLAIMER}
{ ${', '.join(args)} }:
buildPythonPackage rec {
pname = ${package.pypi_name | nix};
version = ${version | nix};
+ format = ${metadata._fmt | nix};
% if package.local_source:
src = lib.cleanSource ../../..;
@@ -88,9 +89,11 @@
% endif
};
}
-""")
+"""
+)
-overlayed_nixpkgs_template = Template("""${DISCLAIMER}
+overlayed_nixpkgs_template = Template(
+ """${DISCLAIMER}
{ overlays ? [ ], ... }@args:
let
pynixifyOverlay = self: super: {
@@ -112,7 +115,7 @@
packageOverrides = self: super: {
% for (package_name, path) in overlays.items():
- ${package_name} =
+ ${"_" + package_name} =
self.callPackage
${'' if path.is_absolute() else './'}${str(path).replace('/default.nix', '')} {};
@@ -120,10 +123,12 @@
};
in import nixpkgs (args // { overlays = [ pynixifyOverlay ] ++ overlays; })
-""")
+"""
+)
-shell_nix_template = Template("""${DISCLAIMER}
- { python ? "python3" }:
+shell_nix_template = Template(
+ """${DISCLAIMER}
+ { python ? "${interpreter}" }:
let
pkgs = import ./nixpkgs.nix {};
pythonPkg = builtins.getAttr python pkgs;
@@ -138,79 +143,87 @@
]))
];
}
-""")
+"""
+)
+
def build_nix_expression(
- package: PyPIPackage,
- requirements: ChosenPackageRequirements,
- metadata: PackageMetadata,
- sha256: str,
- fetchPypi: Optional[Tuple[str, str]] = None,
- ) -> str:
- non_python_dependencies = ['lib', 'fetchPypi', 'buildPythonPackage']
+ package: PyPIPackage,
+ requirements: ChosenPackageRequirements,
+ metadata: PackageMetadata,
+ sha256: str,
+ fetchPypi: Optional[Tuple[str, str]] = None,
+) -> str:
+ non_python_dependencies = ["lib", "fetchPypi", "buildPythonPackage"]
runtime_requirements: List[str] = [
- p.attr for p in requirements.runtime_requirements]
- build_requirements: List[str] = [
- p.attr for p in requirements.build_requirements]
- test_requirements: List[str] = [
- p.attr for p in requirements.test_requirements]
+ p.attr for p in requirements.runtime_requirements
+ ]
+ build_requirements: List[str] = [p.attr for p in requirements.build_requirements]
+ test_requirements: List[str] = [p.attr for p in requirements.test_requirements]
args: List[str]
- args = sorted(set(
- non_python_dependencies + runtime_requirements +
- test_requirements + build_requirements))
+ args = sorted(
+ set(
+ non_python_dependencies
+ + runtime_requirements
+ + test_requirements
+ + build_requirements
+ )
+ )
version = str(package.version)
nix = escape_string
return expression_template.render(DISCLAIMER=DISCLAIMER, **locals())
+
def build_overlay_expr(overlays: Mapping[str, Path]):
- return Template("""
+ return Template(
+ """
self: super: {
% for (package_name, path) in overlays.items():
- ${package_name} =
+ ${"_" + package_name} =
self.callPackage
${'' if path.is_absolute() else './'}${str(path).replace('/default.nix', '')} {};
% endfor
- }""").render(overlays=overlays)
+ }"""
+ ).render(overlays=overlays)
+
def build_overlayed_nixpkgs(
- overlays: Mapping[str, Path],
- nixpkgs: Optional[Tuple[str, str]] = None
- ) -> str:
+ overlays: Mapping[str, Path], nixpkgs: Optional[Tuple[str, str]] = None
+) -> str:
nix = escape_string
# Sort dictionary keys to ensure pynixify/nixpkgs.nix will have the
# same contents in different pynixify runs.
- overlays = {
- k: overlays[k]
- for k in sorted(overlays.keys())
- }
+ overlays = {k: overlays[k] for k in sorted(overlays.keys())}
# Taken from Interpreters section in https://nixos.org/nixpkgs/manual/#reference
interpreters = [
- 'python2',
- 'python27',
- 'python3',
- 'python35',
- 'python36',
- 'python37',
- 'python38',
- 'python39',
- 'python310'
+ "python2",
+ "python27",
+ "python3",
+ "python35",
+ "python36",
+ "python37",
+ "python38",
+ "python39",
+ "python310",
]
return overlayed_nixpkgs_template.render(DISCLAIMER=DISCLAIMER, **locals())
-def build_shell_nix_expression(packages: List[Package]) -> str:
- return shell_nix_template.render(DISCLAIMER=DISCLAIMER, packages=packages)
+def build_shell_nix_expression(packages: List[Package], interpreter: str) -> str:
+ return shell_nix_template.render(
+ DISCLAIMER=DISCLAIMER, packages=packages, interpreter=interpreter
+ )
async def nixfmt(expr: str) -> str:
proc = await asyncio.create_subprocess_exec(
- 'nixfmt',
+ "nixfmt",
stdout=asyncio.subprocess.PIPE,
stdin=asyncio.subprocess.PIPE,
)
@@ -219,15 +232,16 @@ async def nixfmt(expr: str) -> str:
(stdout, _) = await proc.communicate()
status = await proc.wait()
if status:
- raise TypeError(f'nixfmt failed')
+ raise TypeError(f"nixfmt failed")
return stdout.decode()
+
def escape_string(string: str) -> str:
# Based on the documentation in https://nixos.org/nix/manual/#idm140737322106128
- string = string.replace('\\', '\\\\')
+ string = string.replace("\\", "\\\\")
string = string.replace('"', '\\"')
- string = string.replace('\n', '\\n')
- string = string.replace('\t', '\\t')
- string = string.replace('\r', '\\r')
- string = string.replace('${', '\\${')
+ string = string.replace("\n", "\\n")
+ string = string.replace("\t", "\\t")
+ string = string.replace("\r", "\\r")
+ string = string.replace("${", "\\${")
return f'"{string}"'
diff --git a/pynixify/nixpkgs_sources.py b/pynixify/nixpkgs_sources.py
index f2f435a..99369fa 100644
--- a/pynixify/nixpkgs_sources.py
+++ b/pynixify/nixpkgs_sources.py
@@ -14,21 +14,24 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
-import sys
-import json
import asyncio
-from pathlib import Path
-from typing import Sequence, Any, Optional
+import json
+import sys
from collections import defaultdict
from multiprocessing import cpu_count
-from packaging.utils import canonicalize_name
+from pathlib import Path
+from typing import Any, Optional, Sequence
+
from packaging.requirements import Requirement
+from packaging.utils import canonicalize_name
from packaging.version import Version
+
from pynixify.base import Package, parse_version
-from pynixify.exceptions import PackageNotFound, NixBuildError
+from pynixify.exceptions import NixBuildError, PackageNotFound
NIXPKGS_URL: Optional[str] = None
+
class NixPackage(Package):
def __init__(self, *, attr: str, version: Version):
self.version = version
@@ -55,24 +58,21 @@ async def source(self, extra_args=[]):
name = "ATTR_dummy_src";
destination = "/setup.py";
}
- """.replace('ATTR', self.attr)
- args = [
- '--no-out-link',
- '--no-build-output',
- '-E',
- expr
- ]
+ """.replace(
+ "ATTR", self.attr
+ )
+ args = ["--no-out-link", "--no-build-output", "-E", expr]
args += extra_args
return await run_nix_build(*args)
def __str__(self):
- return f'NixPackage(attr={self.attr}, version={self.version})'
+ return f"NixPackage(attr={self.attr}, version={self.version})"
class NixpkgsData:
def __init__(self, data):
data_defaultdict: Any = defaultdict(list)
- for (k, v) in data.items():
+ for k, v in data.items():
data_defaultdict[canonicalize_name(k)] += v
self.__data = dict(data_defaultdict)
@@ -80,9 +80,9 @@ def from_pypi_name(self, name: str) -> Sequence[NixPackage]:
try:
data = self.__data[canonicalize_name(name)]
except KeyError:
- raise PackageNotFound(f'{name} is not defined in nixpkgs')
+ raise PackageNotFound(f"{name} is not defined in nixpkgs")
return [
- NixPackage(attr=drv['attr'], version=parse_version(drv['version']))
+ NixPackage(attr=drv["attr"], version=parse_version(drv["version"]))
for drv in data
]
@@ -94,16 +94,17 @@ def from_requirement(self, req: Requirement) -> Sequence[NixPackage]:
async def load_nixpkgs_data(extra_args):
nix_expression_path = Path(__file__).parent / "data" / "pythonPackages.nix"
args = [
- '--eval',
- '--strict',
- '--json',
+ "--eval",
+ "--strict",
+ "--json",
str(nix_expression_path),
]
args += extra_args
if NIXPKGS_URL is not None:
- args += ['-I', f'nixpkgs={NIXPKGS_URL}']
+ args += ["-I", f"nixpkgs={NIXPKGS_URL}"]
proc = await asyncio.create_subprocess_exec(
- 'nix-instantiate', *args, stdout=asyncio.subprocess.PIPE)
+ "nix-instantiate", *args, stdout=asyncio.subprocess.PIPE
+ )
(stdout, _) = await proc.communicate()
status = await proc.wait()
assert status == 0
@@ -114,35 +115,34 @@ async def load_nixpkgs_data(extra_args):
async def _run_nix_build(*args: Sequence[str], retries=0, max_retries=5) -> Path:
if NIXPKGS_URL is not None:
# TODO fix mypy hack
- args_ = list(args) + ['-I', f'nixpkgs={NIXPKGS_URL}']
+ args_ = list(args) + ["-I", f"nixpkgs={NIXPKGS_URL}"]
else:
args_ = list(args)
# TODO remove mypy ignore below and fix compatibility with mypy 0.790
proc = await asyncio.create_subprocess_exec(
- 'nix-build', *args_, stdout=asyncio.subprocess.PIPE, # type: ignore
- stderr=asyncio.subprocess.PIPE)
+ "nix-build",
+ *args_,
+ stdout=asyncio.subprocess.PIPE, # type: ignore
+ stderr=asyncio.subprocess.PIPE,
+ )
(stdout, stderr) = await proc.communicate()
status = await proc.wait()
- if b'all build users are currently in use' in stderr and retries < max_retries:
+ if b"all build users are currently in use" in stderr and retries < max_retries:
# perform an expotential backoff and retry
# TODO think a way to avoid relying in the error message
sys.stderr.write(
- f'warning: All build users are currently in use. '
- f'Retrying in {2**retries} seconds\n'
- )
- await asyncio.sleep(2**retries)
- return await run_nix_build(
- *args,
- retries=retries+1,
- max_retries=max_retries
+ f"warning: All build users are currently in use. "
+ f"Retrying in {2**retries} seconds\n"
)
+ await asyncio.sleep(2**retries) # noqa
+ return await run_nix_build(*args, retries=retries + 1, max_retries=max_retries)
elif retries >= max_retries:
- sys.stderr.write(f'error: Giving up after {max_retries} failed retries\n')
+ sys.stderr.write(f"error: Giving up after {max_retries} failed retries\n")
if status:
print(stderr.decode(), file=sys.stderr)
- raise NixBuildError(f'nix-build failed with code {status}')
+ raise NixBuildError(f"nix-build failed with code {status}")
return Path(stdout.strip().decode())
diff --git a/pynixify/package_requirements.py b/pynixify/package_requirements.py
index 5a17cc5..85a820a 100644
--- a/pynixify/package_requirements.py
+++ b/pynixify/package_requirements.py
@@ -14,13 +14,15 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
+from dataclasses import dataclass
from pathlib import Path
from typing import List
-from dataclasses import dataclass
+
from packaging.requirements import Requirement
from pkg_resources import parse_requirements
-from pynixify.nixpkgs_sources import run_nix_build
+
from pynixify.exceptions import NixBuildError
+from pynixify.nixpkgs_sources import run_nix_build
@dataclass
@@ -32,12 +34,12 @@ class PackageRequirements:
@classmethod
def from_result_path(cls, result_path: Path):
attr_mapping = {
- 'build_requirements': Path('setup_requires.txt'),
- 'test_requirements': Path('tests_requires.txt'),
- 'runtime_requirements': Path('install_requires.txt'),
+ "build_requirements": Path("setup_requires.txt"),
+ "test_requirements": Path("tests_requires.txt"),
+ "runtime_requirements": Path("install_requires.txt"),
}
kwargs = {}
- for (attr, filename) in attr_mapping.items():
+ for attr, filename in attr_mapping.items():
with (result_path / filename).open() as fp:
# Convert from Requirement.parse to Requirement
reqs = [Requirement(str(r)) for r in parse_requirements(fp)]
@@ -47,27 +49,27 @@ def from_result_path(cls, result_path: Path):
async def eval_path_requirements(path: Path) -> PackageRequirements:
nix_expression_path = Path(__file__).parent / "data" / "parse_setuppy_data.nix"
- if path.name.endswith('.whl'):
+ if path.name.endswith(".whl"):
# Some nixpkgs packages use a wheel as source, which don't have a
# setup.py file. For now, ignore them assume they have no dependencies
- print(f'{path} is a wheel file instead of a source distribution. '
- f'Assuming it has no dependencies.')
+ print(
+ f"{path} is a wheel file instead of a source distribution. "
+ f"Assuming it has no dependencies."
+ )
return PackageRequirements(
- build_requirements=[],
- test_requirements=[],
- runtime_requirements=[]
+ build_requirements=[], test_requirements=[], runtime_requirements=[]
)
assert nix_expression_path.exists()
nix_store_path = await run_nix_build(
str(nix_expression_path),
- '--no-out-link',
- '--no-build-output',
- '--arg',
- 'file',
- str(path.resolve())
+ "--no-out-link",
+ "--no-build-output",
+ "--arg",
+ "file",
+ str(path.resolve()),
)
- if (nix_store_path / 'failed').exists():
- print(f'Error parsing requirements of {path}. Assuming it has no dependencies.')
+ if (nix_store_path / "failed").exists():
+ print(f"Error parsing requirements of {path}. Assuming it has no dependencies.")
return PackageRequirements(
build_requirements=[],
test_requirements=[],
diff --git a/pynixify/pypi_api.py b/pynixify/pypi_api.py
index 3b2a960..c613172 100644
--- a/pynixify/pypi_api.py
+++ b/pynixify/pypi_api.py
@@ -14,26 +14,25 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
-import os
-import sys
-import json
import asyncio
import hashlib
-import aiohttp
-import aiofiles
-from typing import Sequence, Optional, List
-from pathlib import Path
-from dataclasses import dataclass, field
-from urllib.parse import urlunparse
+import json
+import os
+import sys
from abc import ABCMeta, abstractmethod
-from urllib.parse import quote, urlparse
-from packaging.utils import canonicalize_name
+from dataclasses import dataclass, field
+from pathlib import Path
+from typing import List, Optional, Sequence
+from urllib.parse import quote, urlparse, urlunparse
+
+import aiofiles
+import aiohttp
from packaging.requirements import Requirement
+from packaging.utils import canonicalize_name
from packaging.version import Version, parse
+
from pynixify.base import Package, parse_version
-from pynixify.exceptions import (
- IntegrityError
-)
+from pynixify.exceptions import IntegrityError
class ABCPyPICache(metaclass=ABCMeta):
@@ -58,9 +57,10 @@ async def source(self, extra_args=[]) -> Path:
if self.local_source is not None:
return self.local_source
downloaded_file: Path = await self.pypi_cache.fetch_url(
- self.download_url, self.sha256)
+ self.download_url, self.sha256
+ )
h = hashlib.sha256()
- with downloaded_file.open('rb') as fp:
+ with downloaded_file.open("rb") as fp:
while True:
data = fp.read(65536)
if not data:
@@ -83,7 +83,7 @@ def attr(self):
return self.pypi_name
def __str__(self):
- return f'PyPIPackage(attr={self.attr}, version={self.version})'
+ return f"PyPIPackage(attr={self.attr}, version={self.version})"
class PyPIData:
@@ -93,31 +93,34 @@ def __init__(self, pypi_cache):
async def from_requirement(self, req: Requirement) -> Sequence[PyPIPackage]:
response = await self.pypi_cache.fetch(canonicalize_name(req.name))
matching = []
- for (version, version_dist) in response['releases'].items():
+ for version, version_dist in response["releases"].items():
try:
- data = next(e for e in version_dist if e['packagetype'] == 'sdist')
+ data = next(e for e in version_dist if e["packagetype"] == "sdist")
except StopIteration:
continue
if version in req.specifier:
- matching.append(PyPIPackage(
- sha256=data['digests']['sha256'],
- version=parse_version(version),
- download_url=data['url'],
- pypi_name=canonicalize_name(req.name),
- pypi_cache=self.pypi_cache,
- ))
+ matching.append(
+ PyPIPackage(
+ sha256=data["digests"]["sha256"],
+ version=parse_version(version),
+ download_url=data["url"],
+ pypi_name=canonicalize_name(req.name),
+ pypi_cache=self.pypi_cache,
+ )
+ )
return matching
class PyPICache:
async def fetch(self, package_name):
- url = f'https://pypi.org/pypi/{quote(package_name)}/json'
+ url = f"https://pypi.org/pypi/{quote(package_name)}/json"
async with aiohttp.ClientSession(raise_for_status=True) as session:
async with session.get(url) as response:
return await response.json()
async def fetch_url(self, url, sha256) -> Path:
from pynixify.expression_builder import escape_string
+
expr = f"""
builtins.fetchurl {{
url = {escape_string(url)};
@@ -132,12 +135,16 @@ async def fetch_url(self, url, sha256) -> Path:
async def nix_instantiate(expr: str, attr=None, **kwargs):
extra_args: List[str] = []
if attr is not None:
- extra_args += ['--attr', attr]
- for (k, v) in kwargs.items():
- extra_args += ['--arg', k, v]
+ extra_args += ["--attr", attr]
+ for k, v in kwargs.items():
+ extra_args += ["--arg", k, v]
proc = await asyncio.create_subprocess_exec(
- 'nix-instantiate', '--json', '--eval', '-', *extra_args,
+ "nix-instantiate",
+ "--json",
+ "--eval",
+ "-",
+ *extra_args,
stdin=asyncio.subprocess.PIPE,
stdout=asyncio.subprocess.PIPE,
)
@@ -150,16 +157,19 @@ async def nix_instantiate(expr: str, attr=None, **kwargs):
async def get_path_hash(path: Path) -> str:
- url = urlunparse((
- 'file',
- '',
- str(path.resolve()),
- '',
- '',
- '',
- ))
+ url = urlunparse(
+ (
+ "file",
+ "",
+ str(path.resolve()),
+ "",
+ "",
+ "",
+ )
+ )
proc = await asyncio.create_subprocess_exec(
- 'nix-prefetch-url', url,
+ "nix-prefetch-url",
+ url,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE,
)
@@ -167,5 +177,5 @@ async def get_path_hash(path: Path) -> str:
status = await proc.wait()
if status:
print(stderr.decode(), file=sys.stderr)
- raise RuntimeError(f'nix-prefetch-url failed with code {status}')
+ raise RuntimeError(f"nix-prefetch-url failed with code {status}")
return stdout.decode().strip()
diff --git a/pynixify/version_chooser.py b/pynixify/version_chooser.py
index debe2ee..866879e 100644
--- a/pynixify/version_chooser.py
+++ b/pynixify/version_chooser.py
@@ -16,29 +16,30 @@
import asyncio
import operator
-from pathlib import Path
from dataclasses import dataclass
-from typing import Any, Dict, Callable, Awaitable, Optional, List, Tuple
+from pathlib import Path
+from typing import Any, Awaitable, Callable, Dict, List, Optional, Tuple
+
from packaging.requirements import Requirement
-from packaging.utils import canonicalize_name
from packaging.specifiers import SpecifierSet
+from packaging.utils import canonicalize_name
+
from pynixify.base import Package, parse_version
-from pynixify.nixpkgs_sources import NixpkgsData, NixPackage
-from pynixify.pypi_api import PyPIData, PyPIPackage
-from pynixify.package_requirements import (
- PackageRequirements,
- eval_path_requirements,
-)
-from pynixify.exceptions import (
- NoMatchingVersionFound,
- PackageNotFound,
-)
+from pynixify.exceptions import NoMatchingVersionFound, PackageNotFound
+from pynixify.nixpkgs_sources import NixPackage, NixpkgsData
+from pynixify.package_requirements import PackageRequirements # noqa
+from pynixify.package_requirements import eval_path_requirements # noqa
+from pynixify.pypi_api import PyPIData, PyPIPackage # noqa
+
class VersionChooser:
- def __init__(self, nixpkgs_data: NixpkgsData, pypi_data: PyPIData,
- req_evaluate: Callable[[Package], Awaitable[PackageRequirements]],
- should_load_tests: Callable[[str], bool] = lambda _: False,
- ):
+ def __init__(
+ self,
+ nixpkgs_data: NixpkgsData,
+ pypi_data: PyPIData,
+ req_evaluate: Callable[[Package], Awaitable[PackageRequirements]],
+ should_load_tests: Callable[[str], bool] = lambda _: False,
+ ):
self.nixpkgs_data = nixpkgs_data
self.pypi_data = pypi_data
self._choosed_packages: Dict[str, Tuple[Package, SpecifierSet]] = {}
@@ -46,7 +47,7 @@ def __init__(self, nixpkgs_data: NixpkgsData, pypi_data: PyPIData,
self.evaluate_requirements = req_evaluate
self.should_load_tests = should_load_tests
- async def require(self, r: Requirement, coming_from: Optional[Package]=None):
+ async def require(self, r: Requirement, coming_from: Optional[Package] = None):
pkg: Package
if r.marker and not r.marker.evaluate():
@@ -58,16 +59,20 @@ async def require(self, r: Requirement, coming_from: Optional[Package]=None):
is_in_nixpkgs = False
else:
is_in_nixpkgs = True
- if (isinstance(coming_from, NixPackage) and
- is_in_nixpkgs and
- not self.nixpkgs_data.from_requirement(r)):
+ if (
+ isinstance(coming_from, NixPackage)
+ and is_in_nixpkgs
+ and not self.nixpkgs_data.from_requirement(r)
+ ):
# This shouldn't happen in an ideal world. Unfortunately,
# nixpkgs does some patching to packages to disable some
# requirements. Because we don't use these patches, the
# dependency resolution would fail if we don't ignore the
# requirement.
- print(f"warning: ignoring requirement {r} from {coming_from} "
- f"because there is no matching version in nixpkgs packages")
+ print(
+ f"warning: ignoring requirement {r} from {coming_from} "
+ f"because there is no matching version in nixpkgs packages"
+ )
return
print(f'Resolving {r}{f" (from {coming_from})" if coming_from else ""}')
@@ -81,9 +86,9 @@ async def require(self, r: Requirement, coming_from: Optional[Package]=None):
self._choosed_packages[canonicalize_name(r.name)] = (pkg, specifier)
if pkg.version not in specifier:
raise NoMatchingVersionFound(
- f'New requirement '
+ f"New requirement "
f'{r}{f" (from {coming_from})" if coming_from else ""} '
- f'does not match already installed {r.name}=={str(pkg.version)}'
+ f"does not match already installed {r.name}=={str(pkg.version)}"
)
return
@@ -112,32 +117,38 @@ async def require(self, r: Requirement, coming_from: Optional[Package]=None):
found_pypi = False
if not found_nixpkgs and not found_pypi:
- raise PackageNotFound(f'{r.name} not found in PyPI nor nixpkgs')
+ raise PackageNotFound(f"{r.name} not found in PyPI nor nixpkgs")
if not pkgs:
raise NoMatchingVersionFound(str(r))
- pkg = max(pkgs, key=operator.attrgetter('version'))
+ pkg = max(pkgs, key=operator.attrgetter("version"))
self._choosed_packages[canonicalize_name(r.name)] = (pkg, r.specifier)
reqs: PackageRequirements = await self.evaluate_requirements(pkg)
if isinstance(pkg, NixPackage) or (
- not self.should_load_tests(canonicalize_name(r.name))):
+ not self.should_load_tests(canonicalize_name(r.name))
+ ):
reqs.test_requirements = []
- await asyncio.gather(*(
- self.require(req, coming_from=pkg)
- for req in (reqs.runtime_requirements + reqs.test_requirements +
- reqs.build_requirements)
- ))
+ await asyncio.gather(
+ *(
+ self.require(req, coming_from=pkg)
+ for req in (
+ reqs.runtime_requirements
+ + reqs.test_requirements
+ + reqs.build_requirements
+ )
+ )
+ )
async def require_local(self, pypi_name: str, src: Path):
assert pypi_name not in self._choosed_packages
package = PyPIPackage(
pypi_name=pypi_name,
- download_url='',
- sha256='',
- version=parse_version('0.1dev'),
+ download_url="",
+ sha256="",
+ version=parse_version("0.1dev"),
pypi_cache=self.pypi_data.pypi_cache,
local_source=src,
)
@@ -153,13 +164,15 @@ def package_for(self, package_name: str) -> Optional[Package]:
def all_pypi_packages(self) -> List[PyPIPackage]:
return [
- v[0] for v in self._choosed_packages.values()
+ v[0]
+ for v in self._choosed_packages.values()
if isinstance(v[0], PyPIPackage)
]
async def evaluate_package_requirements(
- pkg: Package, extra_args=[]) -> PackageRequirements:
+ pkg: Package, extra_args=[]
+) -> PackageRequirements:
src = await pkg.source(extra_args)
return await eval_path_requirements(src)
@@ -172,22 +185,23 @@ class ChosenPackageRequirements:
@classmethod
def from_package_requirements(
- cls,
- package_requirements: PackageRequirements,
- version_chooser: VersionChooser,
- load_tests: bool):
+ cls,
+ package_requirements: PackageRequirements,
+ version_chooser: VersionChooser,
+ load_tests: bool,
+ ):
kwargs: Any = {}
- kwargs['build_requirements'] = []
+ kwargs["build_requirements"] = []
for req in package_requirements.build_requirements:
if req.marker and not req.marker.evaluate():
continue
package = version_chooser.package_for(req.name)
if package is None:
raise PackageNotFound(
- f'Package {req.name} not found in the version chooser'
+ f"Package {req.name} not found in the version chooser"
)
- kwargs['build_requirements'].append(package)
+ kwargs["build_requirements"].append(package)
# tests_requirements uses the packages in the version chooser
packages: List[Package] = []
@@ -198,10 +212,10 @@ def from_package_requirements(
package = version_chooser.package_for(req.name)
if package is None:
raise PackageNotFound(
- f'Package {req.name} not found in the version chooser'
+ f"Package {req.name} not found in the version chooser"
)
packages.append(package)
- kwargs['test_requirements'] = packages
+ kwargs["test_requirements"] = packages
# runtime_requirements uses the packages in the version chooser
packages = []
@@ -211,9 +225,9 @@ def from_package_requirements(
package = version_chooser.package_for(req.name)
if package is None:
raise PackageNotFound(
- f'Package {req.name} not found in the version chooser'
+ f"Package {req.name} not found in the version chooser"
)
packages.append(package)
- kwargs['runtime_requirements'] = packages
+ kwargs["runtime_requirements"] = packages
return cls(**kwargs)