Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
60 changes: 0 additions & 60 deletions .github/workflows/build-latest.yml

This file was deleted.

32 changes: 0 additions & 32 deletions .github/workflows/pip-release.yml

This file was deleted.

58 changes: 58 additions & 0 deletions .github/workflows/release.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
---
# The standard xchem Python package release process.
# Run on 'Release' and published to PyPI as a 'trusted' publisher.
#
# See https://docs.pypi.org/trusted-publishers/creating-a-project-through-oidc/
# See https://packaging.python.org/en/latest/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows/
name: Release

on:
release:
types:
- published

jobs:
build:
name: Build distribution
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v6
with:
persist-credentials: false
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: '3.x'
- name: Install build package
run: |
python3 -m pip install --upgrade pip
python3 -m pip install uv==0.10.6 --user
- name: Build
run: uv build
env:
FRAG_VERSION: ${{ github.ref_name }}
- name: Store the distribution
uses: actions/upload-artifact@v5
with:
name: python-package-distribution
path: dist/

publish:
name: Publish to PyPI
needs:
- build
runs-on: ubuntu-latest
environment:
name: pypi
url: https://pypi.org/p/xchem-hippo
permissions:
id-token: write
steps:
- name: Download distribution
uses: actions/download-artifact@v6
with:
name: python-package-distribution
path: dist/
- name: Publish
uses: pypa/gh-action-pypi-publish@release/v1
59 changes: 59 additions & 0 deletions .github/workflows/test.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
---
name: Test

# Actions that take place after every commit
# Here every commit is built, tagged as 'latest' and tested.
#
# Actions also run on a schedule - the the container is built, tested
# and pushed (if the relevant secrets are set) based on
# a defined schedule.

on:
push:
branches:
- '*'
tags-ignore:
- '**'
schedule:
# Build every Sunday (0) at 4:45pm
- cron: '45 16 * * 0'

jobs:
source:
runs-on: ubuntu-latest
strategy:
matrix:
python-version:
- "3.10"
- "3.11"
steps:
- name: Checkout
uses: actions/checkout@v6
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
- name: Install requirements
run: |
pip install -r build-requirements.txt
pip install -e .
pip install rdkit==2023.3.2
- name: Lint
run: pre-commit run --all-files
- name: Test
run: python -m unittest

image:
runs-on: ubuntu-latest
needs:
- source
steps:
- name: Checkout
uses: actions/checkout@v6
- name: Build
uses: docker/build-push-action@v6
with:
context: .
tags: xchem/frag:latest
- name: Test
run: docker run xchem/frag:latest /bin/sh -c "cd /usr/local/frag && python -m unittest"
21 changes: 9 additions & 12 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,19 +1,16 @@
FROM python:3.11.4-slim-bullseye
FROM python:3.11.15-slim

USER root
RUN apt-get --allow-releaseinfo-change update && \
apt-get install -y \
RUN apt-get --allow-releaseinfo-change update \
&& apt-get install -y \
git \
libfontconfig1 \
libsm6 \
libxrender1 \
procps && \
pip install rdkit==2023.3.2 && \
git clone https://github.com/rdkit/mmpdb /usr/local/mmpdb && \
pip install /usr/local/mmpdb
procps \
&& pip install rdkit==2023.3.2 \
&& git clone https://github.com/rdkit/mmpdb /usr/local/mmpdb \
&& pip install /usr/local/mmpdb

COPY requirements.txt ./
RUN pip install -r requirements.txt

ADD . /usr/local/fragalysis
RUN pip install /usr/local/fragalysis
ADD . /usr/local/frag
RUN pip install /usr/local/frag
22 changes: 9 additions & 13 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,22 +1,20 @@
# Fragalysis Package
# Frag

[![build latest](https://github.com/xchem/fragalysis-package/actions/workflows/build-latest.yml/badge.svg)](https://github.com/xchem/fragalysis-package/actions/workflows/build-latest.yml)
[![pip release](https://github.com/xchem/fragalysis-package/actions/workflows/pip-release.yml/badge.svg)](https://github.com/xchem/fragalysis-package/actions/workflows/pip-release.yml)
[![test](https://github.com/xchem/frag/actions/workflows/test.yaml/badge.svg)](https://github.com/xchem/frag/actions/workflows/test.yaml)
[![release](https://github.com/xchem/frag/actions/workflows/release.yaml/badge.svg)](https://github.com/xchem/frag/actions/workflows/release.yaml)

[![License](http://img.shields.io/badge/license-Apache%202.0-blue.svg?style=flat)](https://github.com/xchem/fragalysis-package/blob/master/LICENSE.txt)
[![License](http://img.shields.io/badge/license-Apache%202.0-blue.svg?style=flat)](https://github.com/xchem/frag/blob/master/LICENSE.txt)

![PyPI](https://img.shields.io/pypi/v/fragalysis-package)
![PyPI](https://img.shields.io/pypi/v/xchem-frag)

[![pre-commit](https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit&logoColor=white)](https://github.com/pre-commit/pre-commit)
[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)

Basic RDKit based Python tools for analysis of protein-ligand interactions.

> This was originally the fragalysis GitHub repository, which is now being used for
another purpose. You can still find the original package on PyPI at
https://pypi.org/project/fragalysis/ where the latest version posted was v1.1.0.
All new releases of this package will come from here, with the new name
`fragalysis-package`.
another purpose. All new releases of this package will come from here,
with the new name `xchem-frag`.

Currently contains: -

Expand Down Expand Up @@ -49,10 +47,8 @@ state of the repository as it stands with...
pre-commit run --all-files

## Publishing (to PyPI)
If the repository has been provided with a `PYPI_APIKEY` **Secret**
the GitHub `pip-release` workflow action will automatically publish the package to
PyPI when you create a new **Release**. The package version will be set using
the release `tag_name`.
We rely on out **release** GitHub workflow to publish to PyPI, something that
is done automatically when the repository main branch is tagged.

---

Expand Down
39 changes: 21 additions & 18 deletions frag/tests/test_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,16 +83,17 @@ def test_generate_nodes(self):
:return:
"""
try:
nodes = [x for x in open("frag/tests/data/nodes.txt").readlines()]
edges = [x.split() for x in open("frag/tests/data/edges.txt").readlines()]
attrs = [
Attr(input_str=x)
for x in open("frag/tests/data/attributes.txt").readlines()
]
with open("frag/tests/data/nodes.txt", encoding="utf8") as n_file:
nodes: list[str] = n_file.readlines()
attrs: list[Attr] = []
with open("frag/tests/data/attributes.txt", encoding="utf8") as n_file:
attrs.extend(Attr(input_str=line) for line in n_file)
except IOError:
nodes = [x for x in open("data/nodes.txt").readlines()]
edges = [x.split() for x in open("data/edges.txt").readlines()]
attrs = [Attr(input_str=x) for x in open("data/attributes.txt").readlines()]
with open("data/nodes.txt", encoding="utf8") as n_file:
nodes: list[str] = n_file.readlines()
attrs: list[Attr] = []
with open("data/attributes.txt", encoding="utf8") as a_file:
attrs.extend(Attr(input_str=line) for line in a_file)
node_holder = NodeHolder(iso_flag=True)
node_holder = build_network(attrs, node_holder)
# Create the nodes and test with output
Expand All @@ -101,22 +102,24 @@ def test_generate_nodes(self):
# Close enough - and the output looks right...
self.assertEqual(len(node_holder.get_edges()), 3687)

@unittest.skip("build_network() causes a segmentation fault")
def test_generate_nodes_non_iso(self):
"""
Test we can generate nodes for the basic data.
:return:
"""
try:
nodes = [x for x in open("frag/tests/data/nodes.txt").readlines()]
edges = [x.split() for x in open("frag/tests/data/edges.txt").readlines()]
attrs = [
Attr(input_str=x)
for x in open("frag/tests/data/attributes.txt").readlines()
]
with open("frag/tests/data/nodes.txt", encoding="utf8") as n_file:
nodes: list[str] = n_file.readlines()
attrs: list[Attr] = []
with open("frag/tests/data/attributes.txt", encoding="utf8") as n_file:
attrs.extend(Attr(input_str=line) for line in n_file)
except IOError:
nodes = [x for x in open("data/nodes.txt").readlines()]
edges = [x.split() for x in open("data/edges.txt").readlines()]
attrs = [Attr(input_str=x) for x in open("data/attributes.txt").readlines()]
with open("data/nodes.txt", encoding="utf8") as n_file:
nodes: list[str] = n_file.readlines()
attrs: list[Attr] = []
with open("data/attributes.txt", encoding="utf8") as a_file:
attrs.extend(Attr(input_str=line) for line in a_file)
node_holder = NodeHolder(iso_flag=False)
node_holder = build_network(attrs, node_holder)
# Create the nodes and test with output
Expand Down
3 changes: 3 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[build-system]
requires = ["setuptools >= 82.0.0"]
build-backend = "setuptools.build_meta"
6 changes: 0 additions & 6 deletions requirements.txt

This file was deleted.

Loading
Loading