diff --git a/.coveragerc b/.coveragerc
index f4c14a7..8019ee9 100644
--- a/.coveragerc
+++ b/.coveragerc
@@ -2,5 +2,5 @@
[run]
branch = True
-source = pyikt
-include = */pyikt/*
+source = ikpykit
+include = */ikpykit/*
diff --git a/.github/workflows/pypi.yml b/.github/workflows/pypi.yml
index ecc07d8..2c5ad14 100644
--- a/.github/workflows/pypi.yml
+++ b/.github/workflows/pypi.yml
@@ -1,119 +1,110 @@
name: Publish Python 🐍 distribution 📦 to PyPI and TestPyPI
-on: push
+on:
+ push:
+ branches: [main]
+ tags: ['v*'] # Only run on version tags
+ pull_request:
+ branches: [main]
jobs:
- build:
- name: Build distribution 📦
- runs-on: ubuntu-latest
+ build:
+ name: Build distribution 📦
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - name: Set up Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.x"
+ cache: 'pip' # Enable pip caching
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install build wheel
+ - name: Build a binary wheel and a source tarball
+ run: python -m build
+ - name: Store the distribution packages
+ uses: actions/upload-artifact@v4
+ with:
+ name: python-package-distributions
+ path: dist/
+ retention-days: 7 # Set a reasonable retention period
- steps:
- - uses: actions/checkout@v4
- with:
- persist-credentials: false
- - name: Set up Python
- uses: actions/setup-python@v5
- with:
- python-version: "3.x"
- - name: Install pypa/build
- run: >-
- python3 -m
- pip install
- build
- --user
- - name: Build a binary wheel and a source tarball
- run: python3 -m build
- - name: Store the distribution packages
- uses: actions/upload-artifact@v4
- with:
- name: python-package-distributions
- path: dist/
+ publish-to-testpypi:
+ name: Publish to TestPyPI
+ needs: [build]
+ runs-on: ubuntu-latest
+ if: github.event_name == 'push' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/'))
+ environment:
+ name: testpypi
+ url: https://test.pypi.org/p/ikpykit # Update to your actual package name
+ permissions:
+ id-token: write
+ steps:
+ - name: Download artifact
+ uses: actions/download-artifact@v4
+ with:
+ name: python-package-distributions
+ path: dist/
+ - name: Publish to TestPyPI
+ uses: pypa/gh-action-pypi-publish@release/v1
+ with:
+ repository-url: https://test.pypi.org/legacy/
- publish-to-pypi:
- name: >-
- Publish Python 🐍 distribution 📦 to PyPI
- if: startsWith(github.ref, 'refs/tags/') # only publish to PyPI on tag pushes
- needs:
- - build
- runs-on: ubuntu-latest
- environment:
- name: pypi
- url: https://pypi.org/p/pyikt # Replace pyikt with your PyPI project name
- permissions:
- id-token: write # IMPORTANT: mandatory for trusted publishing
+ publish-to-pypi:
+ name: Publish to PyPI
+ needs: [build]
+ runs-on: ubuntu-latest
+ if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v')
+ environment:
+ name: pypi
+ url: https://pypi.org/p/ikpykit # Update to your actual package name
+ permissions:
+ id-token: write
+ steps:
+ - name: Download artifact
+ uses: actions/download-artifact@v4
+ with:
+ name: python-package-distributions
+ path: dist/
+ - name: Publish 📦 to PyPI
+ uses: pypa/gh-action-pypi-publish@release/v1
- steps:
- - name: Download all the dists
- uses: actions/download-artifact@v4
- with:
- name: python-package-distributions
- path: dist/
- - name: Publish distribution 📦 to PyPI
- uses: pypa/gh-action-pypi-publish@release/v1
-
- github-release:
- name: >-
- Sign the Python 🐍 distribution 📦 with Sigstore
- and upload them to GitHub Release
- needs:
- - publish-to-pypi
- runs-on: ubuntu-latest
-
- permissions:
- contents: write # IMPORTANT: mandatory for making GitHub Releases
- id-token: write # IMPORTANT: mandatory for sigstore
-
- steps:
- - name: Download all the dists
- uses: actions/download-artifact@v4
- with:
- name: python-package-distributions
- path: dist/
- - name: Sign the dists with Sigstore
- uses: sigstore/gh-action-sigstore-python@v3.0.0
- with:
- inputs: >-
- ./dist/*.tar.gz
- ./dist/*.whl
- - name: Create GitHub Release
- env:
- GITHUB_TOKEN: ${{ github.token }}
- run: >-
- gh release create
- "$GITHUB_REF_NAME"
- --repo "$GITHUB_REPOSITORY"
- --notes ""
- - name: Upload artifact signatures to GitHub Release
- env:
- GITHUB_TOKEN: ${{ github.token }}
- # Upload to GitHub Release using the `gh` CLI.
- # `dist/` contains the built packages, and the
- # sigstore-produced signatures and certificates.
- run: >-
- gh release upload
- "$GITHUB_REF_NAME" dist/**
- --repo "$GITHUB_REPOSITORY"
-
- publish-to-testpypi:
- name: Publish Python 🐍 distribution 📦 to TestPyPI
- needs:
- - build
- runs-on: ubuntu-latest
-
- environment:
- name: testpypi
- url: https://test.pypi.org/p/pyikt
-
- permissions:
- id-token: write # IMPORTANT: mandatory for trusted publishing
-
- steps:
- - name: Download all the dists
- uses: actions/download-artifact@v4
- with:
- name: python-package-distributions
- path: dist/
- - name: Publish distribution 📦 to TestPyPI
- uses: pypa/gh-action-pypi-publish@release/v1
- with:
- repository-url: https://test.pypi.org/legacy/
\ No newline at end of file
+ github-release:
+ name: Create GitHub Release
+ needs: [publish-to-pypi]
+ runs-on: ubuntu-latest
+ if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v')
+ permissions:
+ contents: write
+ id-token: write
+ steps:
+ - name: Download artifact
+ uses: actions/download-artifact@v4
+ with:
+ name: python-package-distributions
+ path: dist/
+ - name: Sign the dists with Sigstore
+ uses: sigstore/gh-action-sigstore-python@v3.0.0
+ with:
+ inputs: >-
+ ./dist/*.tar.gz
+ ./dist/*.whl
+ - name: Extract release notes
+ id: extract-release-notes
+ run: |
+ version=${GITHUB_REF_NAME#v}
+ echo "version=$version" >> $GITHUB_OUTPUT
+ - name: Create GitHub Release
+ env:
+ GITHUB_TOKEN: ${{ github.token }}
+ run: |
+ gh release create "$GITHUB_REF_NAME" \
+ --repo "$GITHUB_REPOSITORY" \
+ --title "Release $GITHUB_REF_NAME" \
+ --notes "Release $GITHUB_REF_NAME"
+ - name: Upload artifacts to GitHub Release
+ env:
+ GITHUB_TOKEN: ${{ github.token }}
+ run: gh release upload "$GITHUB_REF_NAME" dist/** --repo "$GITHUB_REPOSITORY"
diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml
index aa549fc..a6d0fe4 100644
--- a/.github/workflows/python-app.yml
+++ b/.github/workflows/python-app.yml
@@ -30,4 +30,4 @@ jobs:
uses: codecov/codecov-action@v4.5.0
with:
token: ${{ secrets.CODECOV_TOKEN }}
- slug: IsolationKernel/pyikt
+ slug: IsolationKernel/ikpykit
diff --git a/.gitignore b/.gitignore
index a89af71..fa737ca 100644
--- a/.gitignore
+++ b/.gitignore
@@ -190,4 +190,4 @@ myx_test/
# pixi environments
.pixi
*.egg-info
-pyikt/_version.py
+ikpykit/_version.py
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 64c11db..346262d 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,16 +1,16 @@
repos:
-- repo: https://github.com/pre-commit/pre-commit-hooks
+ - repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.3.0
hooks:
- - id: check-yaml
- - id: end-of-file-fixer
- - id: trailing-whitespace
-- repo: https://github.com/psf/black
- rev: 23.3.0
+ - id: check-yaml
+ - id: end-of-file-fixer
+ - id: trailing-whitespace
+ - repo: https://github.com/psf/black
+ rev: 24.8.0
hooks:
- - id: black
-- repo: https://github.com/astral-sh/ruff-pre-commit
- rev: v0.0.272
+ - id: black
+ - repo: https://github.com/astral-sh/ruff-pre-commit
+ rev: v0.9.9
hooks:
- - id: ruff
+ - id: ruff
args: ["--fix", "--show-source"]
diff --git a/LICENSE b/LICENSE
index bb33d7d..fd6c0a3 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,4 +1,4 @@
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
Attribution-NonCommercial-NoDerivatives 4.0 International
diff --git a/README.md b/README.md
index 92cd346..35da24f 100644
--- a/README.md
+++ b/README.md
@@ -1,26 +1,25 @@
-

-[](https://pypi.org/project/pyikt/)
-[](https://codecov.io/gh/IsolationKernel/pyikt)
-[](https://github.com/IsolationKernel/pyikt/actions/workflows/python-app.yml/badge.svg)
+[](https://pypi.org/project/ikpykit/)
+[](https://codecov.io/gh/IsolationKernel/ikpykit)
+[](https://github.com/IsolationKernel/ikpykit/actions/workflows/python-app.yml/badge.svg)
[](https://www.repostatus.org/#active)
-[](https://github.com/IsolationKernel/pyikt/graphs/commit-activity)
-[](https://pepy.tech/project/pyikt)
-[](https://pepy.tech/project/pyikt)
-[](https://github.com/IsolationKernel/pyikt/blob/master/LICENSE)
+[](https://github.com/IsolationKernel/ikpykit/graphs/commit-activity)
+[](https://pepy.tech/project/ikpykit)
+[](https://pepy.tech/project/ikpykit)
+[](https://github.com/IsolationKernel/ikpykit/blob/master/LICENSE)
## About The Project
-**PyIKT** (Python for Isolation Kernel Toolkit) is an intuitive Python library designed for a variety of machine learning tasks including kernel similarity calculation, anomaly detection, clustering, and change detection—all powered by the innovative **Isolation Kernel (IK)** . Isolation Kernel is a data-dependent kernel that measures similarity by isolating data points using an isolation mechanism. It uniquely adapts to the data distribution, with the property that points in sparse regions are more similar than those in dense regions. Notably, it requires no learning or closed-form expression, making it efficient and scalable.
+**IKPyKit** (Isolation Kernel Python toolKit) is an intuitive Python library designed for a variety of machine learning tasks including kernel similarity calculation, anomaly detection, clustering, and change detection—all powered by the innovative **Isolation Kernel (IK)** . Isolation Kernel is a data-dependent kernel that measures similarity by isolating data points using an isolation mechanism. It uniquely adapts to the data distribution, with the property that points in sparse regions are more similar than those in dense regions. Notably, it requires no learning or closed-form expression, making it efficient and scalable.
---
@@ -36,27 +35,27 @@ Learn more about its history and development on the [IsolationKernel GitHub page
---
-### Why use PyIKT?
+### Why use IKPyKit?
-PyIKT is specifically built to harness the power of Isolation Kernel, providing specialized algorithms for a wide range of data types and tasks. Its seamless integration with the scikit-learn API allows easy adoption and compatibility with scikit-learn tools.
+IKPyKit is specifically built to harness the power of Isolation Kernel, providing specialized algorithms for a wide range of data types and tasks. Its seamless integration with the scikit-learn API allows easy adoption and compatibility with scikit-learn tools.
-- **Tailored for Isolation Kernel**: PyIKT directly leverages the unique properties of Isolation Kernel for efficient and effective machine learning solutions.
-- **Efficient and User-Friendly**: Designed for simplicity and performance, PyIKT offers an intuitive interface built on the scikit-learn API.
+- **Tailored for Isolation Kernel**: IKPyKit directly leverages the unique properties of Isolation Kernel for efficient and effective machine learning solutions.
+- **Efficient and User-Friendly**: Designed for simplicity and performance, IKPyKit offers an intuitive interface built on the scikit-learn API.
- **Support for Diverse Data Types**: It supports graph data, group data, stream data, time series, and trajectory data, making it versatile for various domains.
- **Comprehensive Resources**: Users benefit from rich documentation and examples to quickly understand and apply the library’s features.
-- **Ideal for Research and Industry**: PyIKT is suitable for both academic research and industrial applications, providing scalable and cutting-edge tools for modern machine learning challenges.
+- **Ideal for Research and Industry**: IKPyKit is suitable for both academic research and industrial applications, providing scalable and cutting-edge tools for modern machine learning challenges.
---
## Installation & Dependencies
-To install the basic version of `pyikt` with core dependencies, run the following:
+To install the basic version of `ikpykit` with core dependencies, run the following:
```bash
-pip install pyikt
+pip install ikpykit
```
-For more installation options, including dependencies and additional features, check out our [Installation Guide](https://isolationkernel.github.io/pyikt/quick-start/how-to-install.html).
+For more installation options, including dependencies and additional features, check out our [Installation Guide](https://isolationkernel.github.io/ikpykit/quick-start/how-to-install.html).
---
@@ -65,7 +64,7 @@ For more installation options, including dependencies and additional features, c
```py
# Anomaly Detection using inne.
import numpy as np
-from pyikt.anomaly import INNE
+from ikpykit.anomaly import INNE
X = np.array([[-1.1, 0.2], [0.3, 0.5], [0.5, 1.1], [100, 90]])
clf = INNE(contamination=0.25).fit(X)
clf.predict([[0.1, 0.3], [0, 0.7], [90, 85]])
@@ -92,104 +91,104 @@ clf.predict([[0.1, 0.3], [0, 0.7], [90, 85]])
| Abbr | Algorithm | Utilization | Published |
| -------------------------------------------------------------------------------------------- | ----------------------------- | --------------------------------------------- | -------------------- |
-| [IsoKernel](https://isolationkernel.github.io/pyikt/api/kernel/isolation_kernel.html) | Isolation Kernel | IK feature mapping and similarity calculating | AAAI2019, SIGKDD2018 |
-| [IsodisKernel](https://isolationkernel.github.io/pyikt/api/kernel/isolation_dis_kernel.html) | Isolation Distribution Kernel | Distribution similarity calculating | SIGKDD2022 |
+| [IsoKernel](https://isolationkernel.github.io/ikpykit/api/kernel/isolation_kernel.html) | Isolation Kernel | IK feature mapping and similarity calculating | AAAI2019, SIGKDD2018 |
+| [IsodisKernel](https://isolationkernel.github.io/ikpykit/api/kernel/isolation_dis_kernel.html) | Isolation Distribution Kernel | Distribution similarity calculating | SIGKDD2022 |
**(ii) Point Anomaly detection** :
| Abbr | Algorithm | Utiliztion | Published |
| --------------------------------------------------------------------------- | ------------------------------------------------------------------ | ----------------- | ------------------ |
-| [IForest](https://isolationkernel.github.io/pyikt/api/anomaly/iforest.html) | Isolation forest | Anomaly Detection | ICDM2008, TKDD2022 |
-| [INNE](https://isolationkernel.github.io/pyikt/api/anomaly/inne.html) | Isolation-based anomaly detection using nearest-neighbor ensembles | Anomaly Detection | CIJ2018 |
-| [IDKD](https://isolationkernel.github.io/pyikt/api/anomaly/idkd.html) | Isolation Distributional Kernel for point anomaly detections | Anomaly Detection | TKDE2022 |
+| [IForest](https://isolationkernel.github.io/ikpykit/api/anomaly/iforest.html) | Isolation forest | Anomaly Detection | ICDM2008, TKDD2022 |
+| [INNE](https://isolationkernel.github.io/ikpykit/api/anomaly/inne.html) | Isolation-based anomaly detection using nearest-neighbor ensembles | Anomaly Detection | CIJ2018 |
+| [IDKD](https://isolationkernel.github.io/ikpykit/api/anomaly/idkd.html) | Isolation Distributional Kernel for point anomaly detections | Anomaly Detection | TKDE2022 |
**(iii) Point Clustering** :
| Abbr | Algorithm | Utiliztion | Published |
| ----------------------------------------------------------------------- | ------------------------------------------------------------ | ----------------------- | --------- |
-| [IDKC](https://isolationkernel.github.io/pyikt/api/cluster/idkc.html) | Kernel-based Clustering via Isolation Distributional Kernel. | Point Clustering | IS2023 |
-| [PSKC](https://isolationkernel.github.io/pyikt/api/cluster/pskc.html) | Point-set Kernel Clustering | Point Clustering | TKDE2023 |
-| [IKAHC](https://isolationkernel.github.io/pyikt/api/cluster/ikahc.html) | Isolation Kernel for Agglomerative Hierarchical Clustering | Hierarchical Clustering | PR2023 |
+| [IDKC](https://isolationkernel.github.io/ikpykit/api/cluster/idkc.html) | Kernel-based Clustering via Isolation Distributional Kernel. | Point Clustering | IS2023 |
+| [PSKC](https://isolationkernel.github.io/ikpykit/api/cluster/pskc.html) | Point-set Kernel Clustering | Point Clustering | TKDE2023 |
+| [IKAHC](https://isolationkernel.github.io/ikpykit/api/cluster/ikahc.html) | Isolation Kernel for Agglomerative Hierarchical Clustering | Hierarchical Clustering | PR2023 |
**(IV) Graph Data** :
| Abbr | Algorithm | Utiliztion | Published |
| --------------------------------------------------------------------------------------- | ---------------------------------------------------------------------- | --------------------------------------------- | --------- |
-| [IKGOD](https://isolationkernel.github.io/pyikt/api/graph/ikgod.html) | Subgraph Centralization: A Necessary Step for Graph Anomaly Detection. | Graph Anomaly Detection | SIAM2023 |
-| [IsoGraphKernel](https://isolationkernel.github.io/pyikt/api/graph/IsoGraphKernel.html) | Isolation Graph Kernel | Graph IK embedding and similarity calculating | AAAI2021 |
+| [IKGOD](https://isolationkernel.github.io/ikpykit/api/graph/ikgod.html) | Subgraph Centralization: A Necessary Step for Graph Anomaly Detection. | Graph Anomaly Detection | SIAM2023 |
+| [IsoGraphKernel](https://isolationkernel.github.io/ikpykit/api/graph/IsoGraphKernel.html) | Isolation Graph Kernel | Graph IK embedding and similarity calculating | AAAI2021 |
**(V) Group Data** :
| Abbr | Algorithm | Utiliztion | Published |
| --------------------------------------------------------------------- | ------------------------------------------------------------ | ----------------------- | --------- |
-| [IKGAD](https://isolationkernel.github.io/pyikt/api/group/ikgad.html) | Isolation Distributional Kernel for group anomaly detections | Group Anomaly Detection | TKDE2022 |
+| [IKGAD](https://isolationkernel.github.io/ikpykit/api/group/ikgad.html) | Isolation Distributional Kernel for group anomaly detections | Group Anomaly Detection | TKDE2022 |
**(VI) Stream Data** :
| Abbr | Algorithm | Utiliztion | Published |
| ---------------------------------------------------------------------------- | --------------------------------------------------------------- | ------------------------------ | ---------- |
-| [StreaKHC](https://isolationkernel.github.io/pyikt/api/stream/streakhc.html) | Isolation Distribution Kernel for Trajectory Anomaly Detections | Online Hierarchical Clustering | SIGKDD2022 |
-| [ICID](https://isolationkernel.github.io/pyikt/api/stream/icid.html) | Detecting change intervals with isolation distributional kernel | Change Intervals Detection | JAIR2024 |
+| [StreaKHC](https://isolationkernel.github.io/ikpykit/api/stream/streakhc.html) | Isolation Distribution Kernel for Trajectory Anomaly Detections | Online Hierarchical Clustering | SIGKDD2022 |
+| [ICID](https://isolationkernel.github.io/ikpykit/api/stream/icid.html) | Detecting change intervals with isolation distributional kernel | Change Intervals Detection | JAIR2024 |
**(VII) Trajectory Data** :
| Abbr | Algorithm | Utiliztion | Published |
| -------------------------------------------------------------------------- | --------------------------------------------------------------- | ---------------------------- | --------- |
-| [TIDKC](https://isolationkernel.github.io/pyikt/api/trajectory/tidkc.html) | Distribution-based Tajectory Clustering | Trajectory Clustering | ICDM2023 |
-| [IKAT](https://isolationkernel.github.io/pyikt/api/trajectory/ikat.html) | Isolation Distribution Kernel for Trajectory Anomaly Detections | Trajectory Anomaly Detection | JAIR2024 |
+| [TIDKC](https://isolationkernel.github.io/ikpykit/api/trajectory/tidkc.html) | Distribution-based Tajectory Clustering | Trajectory Clustering | ICDM2023 |
+| [IKAT](https://isolationkernel.github.io/ikpykit/api/trajectory/ikat.html) | Isolation Distribution Kernel for Trajectory Anomaly Detections | Trajectory Anomaly Detection | JAIR2024 |
**(VIII) Time Series**
| Abbr | Algorithm | Utiliztion | Published |
| --------------------------------------------------------------------------- | --------------------------------------------------------------- | ----------------- | --------- |
-| [IKTOD](https://isolationkernel.github.io/pyikt/api/time_series/iktod.html) | Isolation distribution kernel for Time Series Anomaly Detection | Anomaly detection | VLDB2022 |
+| [IKTOD](https://isolationkernel.github.io/ikpykit/api/time_series/iktod.html) | Isolation distribution kernel for Time Series Anomaly Detection | Anomaly detection | VLDB2022 |
---
## Features
-pyikt provides a set of key features designed to make time series forecasting with machine learning easy and efficient. For a detailed overview, see the [User Guides](https://isolationkernel.github.io/pyikt/user_guides/table-of-contents.html).
+ikpykit provides a set of key features designed to make time series forecasting with machine learning easy and efficient. For a detailed overview, see the [User Guides](https://isolationkernel.github.io/ikpykit/user_guides/table-of-contents.html).
---
## Examples and tutorials
-Explore our extensive list of examples and tutorials (English and Spanish) to get you started with PyIKT. You can find them [here](https://isolationkernel.github.io/pyikt/examples/examples_english.html).
+Explore our extensive list of examples and tutorials (English and Spanish) to get you started with ikpykit. You can find them [here](https://isolationkernel.github.io/ikpykit/examples/examples_english.html).
---
## How to contribute
-Primarily, PyIKT development consists of adding and creating new *Forecasters*, new validation strategies, or improving the performance of the current code. However, there are many other ways to contribute:
+Primarily, ikpykit development consists of adding and creating new *Forecasters*, new validation strategies, or improving the performance of the current code. However, there are many other ways to contribute:
-- Submit a bug report or feature request on [GitHub Issues](https://github.com/IsolationKernel/pyikt/issues).
-- Contribute a Jupyter notebook to our [examples](https://isolationkernel.github.io/pyikt/examples/examples_english.html).
+- Submit a bug report or feature request on [GitHub Issues](https://github.com/IsolationKernel/ikpykit/issues).
+- Contribute a Jupyter notebook to our [examples](https://isolationkernel.github.io/ikpykit/examples/examples_english.html).
- Write [unit or integration tests](https://docs.pytest.org/en/latest/) for our project.
- Answer questions on our issues, Stack Overflow, and elsewhere.
- Translate our documentation into another language.
- Write a blog post, tweet, or share our project with others.
-For more information on how to contribute to pyikt, see our [Contribution Guide](https://isolationkernel.github.io/pyikt/contributing/contribution.html).
+For more information on how to contribute to ikpykit, see our [Contribution Guide](https://isolationkernel.github.io/ikpykit/contributing/contribution.html).
-Visit our [authors section](https://isolationkernel.github.io/pyikt/authors/authors.html) to meet all the contributors to pyikt.
+Visit our [authors section](https://isolationkernel.github.io/ikpykit/authors/authors.html) to meet all the contributors to ikpykit.
---
## Citation
-If you use pyikt for a scientific publication, we would appreciate citations to the published software.
+If you use ikpykit for a scientific publication, we would appreciate citations to the published software.
**BibTeX**:
```
-@software{PyIKT,
+@software{IKPyKit,
author = {Xin Han, Yixiao Ma, Ye Zhu, and Kaiming Ting},
-title = {PyIKT:A Python Library for Isolation Kernel Toolkit},
+title = {IKPyKit:A Python Library for Isolation Kernel Toolkit},
version = {0.1.0},
month = {3},
year = {2025},
license = {BSD-3-Clause},
-url = {https://github.com/IsolationKernel/pyikt}
+url = {https://github.com/IsolationKernel/ikpykit}
}
```
@@ -197,4 +196,4 @@ url = {https://github.com/IsolationKernel/pyikt}
## License
-[BSD-3-Clause License](https://github.com/IsolationKernel/pyikt/blob/master/LICENSE)
+[BSD-3-Clause License](https://github.com/IsolationKernel/ikpykit/blob/master/LICENSE)
diff --git a/docs/README.md b/docs/README.md
index 001bf20..0dbb5e6 100644
--- a/docs/README.md
+++ b/docs/README.md
@@ -1,27 +1,25 @@

-[](https://pypi.org/project/pyikt/)
-[](https://codecov.io/gh/IsolationKernel/pyikt)
-[](https://github.com/IsolationKernel/pyikt/actions/workflows/python-app.yml/badge.svg)
+[](https://pypi.org/project/ikpykit/)
+[](https://codecov.io/gh/IsolationKernel/ikpykit)
+[](https://github.com/IsolationKernel/ikpykit/actions/workflows/python-app.yml/badge.svg)
[](https://www.repostatus.org/#active)
-[](https://github.com/IsolationKernel/pyikt/graphs/commit-activity)
-[](https://pepy.tech/project/pyikt)
-[](https://pepy.tech/project/pyikt)
-[](https://github.com/IsolationKernel/pyikt/blob/master/LICENSE)
-
-
+[](https://github.com/IsolationKernel/ikpykit/graphs/commit-activity)
+[](https://pepy.tech/project/ikpykit)
+[](https://pepy.tech/project/ikpykit)
+[](https://github.com/IsolationKernel/ikpykit/blob/master/LICENSE)
## About The Project
-**PyIKT** (Python for Isolation Kernel Toolkit) is an intuitive Python library designed for a variety of machine learning tasks including kernel similarity calculation, anomaly detection, clustering, and change detection—all powered by the innovative **Isolation Kernel (IK)** . Isolation Kernel is a data-dependent kernel that measures similarity by isolating data points using an isolation mechanism. It uniquely adapts to the data distribution, with the property that points in sparse regions are more similar than those in dense regions. Notably, it requires no learning or closed-form expression, making it efficient and scalable.
+**IKPyKit** (Python for Isolation Kernel Toolkit) is an intuitive Python library designed for a variety of machine learning tasks including kernel similarity calculation, anomaly detection, clustering, and change detection—all powered by the innovative **Isolation Kernel (IK)** . Isolation Kernel is a data-dependent kernel that measures similarity by isolating data points using an isolation mechanism. It uniquely adapts to the data distribution, with the property that points in sparse regions are more similar than those in dense regions. Notably, it requires no learning or closed-form expression, making it efficient and scalable.
---
@@ -37,24 +35,24 @@ Learn more about its history and development on the [IsolationKernel GitHub page
---
-### Why use PyIKT?
+### Why use IKPyKit?
-PyIKT is specifically built to harness the power of Isolation Kernel, providing specialized algorithms for a wide range of data types and tasks. Its seamless integration with the scikit-learn API allows easy adoption and compatibility with scikit-learn tools.
+IKPyKit is specifically built to harness the power of Isolation Kernel, providing specialized algorithms for a wide range of data types and tasks. Its seamless integration with the scikit-learn API allows easy adoption and compatibility with scikit-learn tools.
-- **Tailored for Isolation Kernel**: PyIKT directly leverages the unique properties of Isolation Kernel for efficient and effective machine learning solutions.
-- **Efficient and User-Friendly**: Designed for simplicity and performance, PyIKT offers an intuitive interface built on the scikit-learn API.
+- **Tailored for Isolation Kernel**: IKPyKit directly leverages the unique properties of Isolation Kernel for efficient and effective machine learning solutions.
+- **Efficient and User-Friendly**: Designed for simplicity and performance, IKPyKit offers an intuitive interface built on the scikit-learn API.
- **Support for Diverse Data Types**: It supports graph data, group data, stream data, time series, and trajectory data, making it versatile for various domains.
- **Comprehensive Resources**: Users benefit from rich documentation and examples to quickly understand and apply the library’s features.
-- **Ideal for Research and Industry**: PyIKT is suitable for both academic research and industrial applications, providing scalable and cutting-edge tools for modern machine learning challenges.
+- **Ideal for Research and Industry**: IKPyKit is suitable for both academic research and industrial applications, providing scalable and cutting-edge tools for modern machine learning challenges.
---
## Installation & Dependencies
-To install the basic version of `pyikt` with core dependencies, run the following:
+To install the basic version of `IKPyKit` with core dependencies, run the following:
```bash
-pip install pyikt
+pip install ikpykit
```
For more installation options, including dependencies and additional features, check out our [Installation Guide](./quick-start/how-to-install.html).
@@ -66,7 +64,7 @@ For more installation options, including dependencies and additional features, c
```py
# Anomaly Detection using inne.
import numpy as np
-from pyikt.anomaly import INNE
+from ikpykit.anomaly import INNE
X = np.array([[-1.1, 0.2], [0.3, 0.5], [0.5, 1.1], [100, 90]])
clf = INNE(contamination=0.25).fit(X)
clf.predict([[0.1, 0.3], [0, 0.7], [90, 85]])
@@ -149,48 +147,48 @@ clf.predict([[0.1, 0.3], [0, 0.7], [90, 85]])
## Features
-pyikt provides a set of key features designed to make time series forecasting with machine learning easy and efficient. For a detailed overview, see the [User Guides](./user_guides/table-of-contents.html).
+ikpykit provides a set of key features designed to make time series forecasting with machine learning easy and efficient. For a detailed overview, see the [User Guides](./user_guides/table-of-contents.html).
---
## Examples and tutorials
-Explore our extensive list of examples and tutorials (English and Spanish) to get you started with PyIKT. You can find them [here](./examples/examples_english.html).
+Explore our extensive list of examples and tutorials (English and Spanish) to get you started with ikpykit. You can find them [here](./examples/examples_english.html).
---
## How to contribute
-Primarily, PyIKT development consists of adding and creating new *Forecasters*, new validation strategies, or improving the performance of the current code. However, there are many other ways to contribute:
+Primarily, ikpykit development consists of adding and creating new *Forecasters*, new validation strategies, or improving the performance of the current code. However, there are many other ways to contribute:
-- Submit a bug report or feature request on [GitHub Issues](https://github.com/IsolationKernel/pyikt/issues).
+- Submit a bug report or feature request on [GitHub Issues](https://github.com/IsolationKernel/ikpykit/issues).
- Contribute a Jupyter notebook to our [examples](./examples/examples_english.html).
- Write [unit or integration tests](https://docs.pytest.org/en/latest/) for our project.
- Answer questions on our issues, Stack Overflow, and elsewhere.
- Translate our documentation into another language.
- Write a blog post, tweet, or share our project with others.
-For more information on how to contribute to pyikt, see our [Contribution Guide](./contributing/contribution.html).
+For more information on how to contribute to ikpykit, see our [Contribution Guide](./contributing/contribution.html).
-Visit our [authors section](./authors/authors.html) to meet all the contributors to pyikt.
+Visit our [authors section](./authors/authors.html) to meet all the contributors to ikpykit.
---
## Citation
-If you use pyikt for a scientific publication, we would appreciate citations to the published software.
+If you use ikpykit for a scientific publication, we would appreciate citations to the published software.
**BibTeX**:
```
-@software{PyIKT,
+@software{IKPyKit,
author = {Xin Han, Yixiao Ma, Ye Zhu, and Kaiming Ting},
-title = {PyIKT:A Python Library for Isolation Kernel Toolkit},
+title = {IKPyKit:A Python Library for Isolation Kernel Toolkit},
version = {0.1.0},
month = {3},
year = {2025},
license = {BSD-3-Clause},
-url = {https://github.com/IsolationKernel/pyikt}
+url = {https://github.com/IsolationKernel/ikpykit}
}
```
@@ -198,4 +196,4 @@ url = {https://github.com/IsolationKernel/pyikt}
## License
-[BSD-3-Clause License](https://github.com/IsolationKernel/pyikt/blob/master/LICENSE)
+[BSD-3-Clause License](https://github.com/IsolationKernel/ikpykit/blob/master/LICENSE)
diff --git a/docs/api/anomaly/idkd.md b/docs/api/anomaly/idkd.md
index b5ebe7b..c041d25 100644
--- a/docs/api/anomaly/idkd.md
+++ b/docs/api/anomaly/idkd.md
@@ -1 +1 @@
-::: pyikt.anomaly.IDKD
+::: ikpykit.anomaly.IDKD
diff --git a/docs/api/anomaly/iforest.md b/docs/api/anomaly/iforest.md
index df79339..3a98d1c 100644
--- a/docs/api/anomaly/iforest.md
+++ b/docs/api/anomaly/iforest.md
@@ -1 +1 @@
-::: pyikt.anomaly.IForest
+::: ikpykit.anomaly.IForest
diff --git a/docs/api/anomaly/inne.md b/docs/api/anomaly/inne.md
index 467bb9c..676a018 100644
--- a/docs/api/anomaly/inne.md
+++ b/docs/api/anomaly/inne.md
@@ -1 +1 @@
-::: pyikt.anomaly.INNE
+::: ikpykit.anomaly.INNE
diff --git a/docs/api/cluster/idkc.md b/docs/api/cluster/idkc.md
index b004254..b074c36 100644
--- a/docs/api/cluster/idkc.md
+++ b/docs/api/cluster/idkc.md
@@ -1 +1 @@
-::: pyikt.cluster.IDKC
+::: ikpykit.cluster.IDKC
diff --git a/docs/api/cluster/ikahc.md b/docs/api/cluster/ikahc.md
index 5c44e1b..ee37b21 100644
--- a/docs/api/cluster/ikahc.md
+++ b/docs/api/cluster/ikahc.md
@@ -1 +1 @@
-::: pyikt.cluster.IKAHC
+::: ikpykit.cluster.IKAHC
diff --git a/docs/api/cluster/pskc.md b/docs/api/cluster/pskc.md
index 5ee8403..61f1df6 100644
--- a/docs/api/cluster/pskc.md
+++ b/docs/api/cluster/pskc.md
@@ -1 +1 @@
-::: pyikt.cluster.PSKC
+::: ikpykit.cluster.PSKC
diff --git a/docs/api/graph/IsoGraphKernel.md b/docs/api/graph/IsoGraphKernel.md
index f4941f0..1680e6c 100644
--- a/docs/api/graph/IsoGraphKernel.md
+++ b/docs/api/graph/IsoGraphKernel.md
@@ -1 +1 @@
-::: pyikt.graph.IsoGraphKernel
+::: ikpykit.graph.IsoGraphKernel
diff --git a/docs/api/graph/ikgod.md b/docs/api/graph/ikgod.md
index d0ccb4c..a5e2c64 100644
--- a/docs/api/graph/ikgod.md
+++ b/docs/api/graph/ikgod.md
@@ -1 +1 @@
-::: pyikt.graph.IKGOD
+::: ikpykit.graph.IKGOD
diff --git a/docs/api/group/ikgad.md b/docs/api/group/ikgad.md
index 762c979..93ef321 100644
--- a/docs/api/group/ikgad.md
+++ b/docs/api/group/ikgad.md
@@ -1 +1 @@
-::: pyikt.group.IKGAD
+::: ikpykit.group.IKGAD
diff --git a/docs/api/kernel/isolation_dis_kernel.md b/docs/api/kernel/isolation_dis_kernel.md
index e421fd4..a9a4809 100644
--- a/docs/api/kernel/isolation_dis_kernel.md
+++ b/docs/api/kernel/isolation_dis_kernel.md
@@ -1 +1 @@
-::: pyikt.kernel.IsoDisKernel
+::: ikpykit.kernel.IsoDisKernel
diff --git a/docs/api/kernel/isolation_kernel.md b/docs/api/kernel/isolation_kernel.md
index 5ae911a..0edb0ca 100644
--- a/docs/api/kernel/isolation_kernel.md
+++ b/docs/api/kernel/isolation_kernel.md
@@ -1 +1 @@
-::: pyikt.kernel.IsoKernel
+::: ikpykit.kernel.IsoKernel
diff --git a/docs/api/stream/icid.md b/docs/api/stream/icid.md
index d309e15..4271a04 100644
--- a/docs/api/stream/icid.md
+++ b/docs/api/stream/icid.md
@@ -1 +1 @@
-::: pyikt.stream.ICID
+::: ikpykit.stream.ICID
diff --git a/docs/api/stream/streakhc.md b/docs/api/stream/streakhc.md
index e224183..bdb9f2d 100644
--- a/docs/api/stream/streakhc.md
+++ b/docs/api/stream/streakhc.md
@@ -1 +1 @@
-::: pyikt.stream.STREAMKHC
+::: ikpykit.stream.STREAMKHC
diff --git a/docs/api/time_series/iktod.md b/docs/api/time_series/iktod.md
index 77c8a4c..ea3e5fd 100644
--- a/docs/api/time_series/iktod.md
+++ b/docs/api/time_series/iktod.md
@@ -1 +1 @@
-::: pyikt.timeseries.IKTOD
+::: ikpykit.timeseries.IKTOD
diff --git a/docs/api/trajectory/data_loader/sheep_dogs.md b/docs/api/trajectory/data_loader/sheep_dogs.md
index 91eb50b..bfd92b9 100644
--- a/docs/api/trajectory/data_loader/sheep_dogs.md
+++ b/docs/api/trajectory/data_loader/sheep_dogs.md
@@ -1 +1 @@
-::: pyikt.trajectory.dataloader.SheepDogs
+::: ikpykit.trajectory.dataloader.SheepDogs
diff --git a/docs/api/trajectory/ikat.md b/docs/api/trajectory/ikat.md
index c39b181..cde2a24 100644
--- a/docs/api/trajectory/ikat.md
+++ b/docs/api/trajectory/ikat.md
@@ -1 +1 @@
-::: pyikt.trajectory.IKAT
+::: ikpykit.trajectory.IKAT
diff --git a/docs/api/trajectory/tidkc.md b/docs/api/trajectory/tidkc.md
index 869a6fa..1fe4ebe 100644
--- a/docs/api/trajectory/tidkc.md
+++ b/docs/api/trajectory/tidkc.md
@@ -1 +1 @@
-::: pyikt.trajectory.TIDKC
+::: ikpykit.trajectory.TIDKC
diff --git a/docs/contributing/contribution.md b/docs/contributing/contribution.md
index 456f7fc..23c362d 100644
--- a/docs/contributing/contribution.md
+++ b/docs/contributing/contribution.md
@@ -1,15 +1,15 @@
-Contributing to PyIKT
+Contributing to ikpykit
=====================
-Hi! Thanks for your interest in contributing to PyIKT :D .
+Hi! Thanks for your interest in contributing to ikpykit :D .
In this document we'll try to summarize everything that you need to know to do a good job.
Code and Issues
---------------
-We use [Github](https://github.com/IsolationKernel/pyikt) to host our code repositories
-and issues.You can look at [issues](https://github.com/IsolationKernel/pyikt/issues) to report any
-issues related to pyikt. Here is a [guide](https://guides.github.com/features/issues/)
+We use [Github](https://github.com/IsolationKernel/ikpykit) to host our code repositories
+and issues.You can look at [issues](https://github.com/IsolationKernel/ikpykit/issues) to report any
+issues related to ikpykit. Here is a [guide](https://guides.github.com/features/issues/)
on how to report better issues.
Git and our Branching model
diff --git a/docs/examples/examples_english.md b/docs/examples/examples_english.md
index b3d961c..b525342 100644
--- a/docs/examples/examples_english.md
+++ b/docs/examples/examples_english.md
@@ -2,7 +2,7 @@
# Examples and Tutorials
-Practical examples and tutorials to help you understand and apply PyIKT.
+Practical examples and tutorials to help you understand and apply ikpykit.
Anomaly Detection
diff --git a/docs/quick-start/how-to-install.md b/docs/quick-start/how-to-install.md
index 604b2d2..7b2e3c6 100644
--- a/docs/quick-start/how-to-install.md
+++ b/docs/quick-start/how-to-install.md
@@ -1,29 +1,46 @@
# Installation Guide
-This guide will help you install `pyikt`. The default installation of `pyikt` includes only the essential dependencies required for basic functionality. Additional optional dependencies can be installed for extended features.
+This guide will help you install `ikpykit`. The default installation of `ikpykit` includes only the essential dependencies required for basic functionality. Additional optional dependencies can be installed for extended features.
- [](https://pypi.org/project/pyikt/)
+ [](https://pypi.org/project/ikpykit/)
## **Basic installation**
-To install the basic version of `pyikt` with its core dependencies, run:
+To install the basic version of `ikpykit` with its core dependencies, run:
```bash
-pip install pyikt
+pip install ikpykit
```
-Specific version:
+If you're feeling brave, feel free to install the bleeding edge: NOTE: Do so at your own risk; no guarantees given!
+Latest (unstable):
```bash
-pip install pyikt==0.1.0
+pip install git+https://github.com/IsolationKernel/ikpykit.git@main --upgrade
```
-Latest (unstable):
+Alternatively download the package, install requirements, and manually run the installer:
```bash
-pip install git+https://github.com/pyikt/pyikt@master
+wget https://github.com/IsolationKernel/ikpykit.git@main
+unzip ikpykit-main.zip
+rm ikpyikt-main.zip
+cd ikpykit-main
+
+pip install -r requirements.txt
+
+python setup.py install
```
+Once the installation is completed, you can check whether the installation was successful through:
+
+```py
+import ikpyikt
+print(ikpyikt.__version__)
+```
+
+## **Dependencies**
+
The following dependencies are installed with the default installation:
+ numpy>=1.22
diff --git a/docs/releases/releases.md b/docs/releases/releases.md
index 92427d1..1630c98 100644
--- a/docs/releases/releases.md
+++ b/docs/releases/releases.md
@@ -3,8 +3,8 @@
All significant changes to this project are documented in this release file.
| Legend | |
-|:-----------------------------------------------------------|:--------------------------------------|
+| :--------------------------------------------------------- | :------------------------------------ |
| Feature | New feature |
| Enhancement | Improvement in existing functionality |
| API Change | Changes in the API |
-| Fix | Bug fix |
\ No newline at end of file
+| Fix | Bug fix |
diff --git a/docs/user_guides/inne.ipynb b/docs/user_guides/inne.ipynb
index adb9362..ec8a9fd 100644
--- a/docs/user_guides/inne.ipynb
+++ b/docs/user_guides/inne.ipynb
@@ -4,7 +4,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
- "\n# IsolationNNE example\n\nAn example using :class:`pyikt.anomaly.IsolationNNE` for anomaly\ndetection.\n"
+ "\n# IsolationNNE example\n\nAn example using :class:`ikpykit.anomaly.IsolationNNE` for anomaly\ndetection.\n"
]
},
{
@@ -15,7 +15,7 @@
},
"outputs": [],
"source": [
- "import numpy as np\nimport matplotlib.pyplot as plt\nfrom pyikt.anomaly import IsolationNNE\n\nrng = np.random.RandomState(42)\n\n# Generate train data\nX = 0.3 * rng.randn(100, 2)\nX_train = np.r_[X + 2, X - 2]\n# Generate some regular novel observations\nX = 0.3 * rng.randn(20, 2)\nX_test = np.r_[X + 2, X - 2]\n# Generate some abnormal novel observations\nX_outliers = rng.uniform(low=-4, high=4, size=(20, 2))\n\n# fit the model\nclf = IsolationNNE()\nclf.fit(X_train)\ny_pred_train = clf.predict(X_train)\ny_pred_test = clf.predict(X_test)\ny_pred_outliers = clf.predict(X_outliers)\n\n# plot the line, the samples, and the nearest vectors to the plane\nxx, yy = np.meshgrid(np.linspace(-5, 5, 50), np.linspace(-5, 5, 50))\nZ = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])\nZ = Z.reshape(xx.shape)\n\nplt.title(\"IsolationNNE\")\nplt.contourf(xx, yy, Z, cmap=plt.cm.Blues_r)\n\nb1 = plt.scatter(X_train[:, 0], X_train[:, 1], c=\"white\", s=20, edgecolor=\"k\")\nb2 = plt.scatter(X_test[:, 0], X_test[:, 1], c=\"green\", s=20, edgecolor=\"k\")\nc = plt.scatter(X_outliers[:, 0], X_outliers[:, 1], c=\"red\", s=20, edgecolor=\"k\")\nplt.axis(\"tight\")\nplt.xlim((-5, 5))\nplt.ylim((-5, 5))\nplt.legend(\n [b1, b2, c],\n [\"training observations\", \"new regular observations\", \"new abnormal observations\"],\n loc=\"upper left\",\n)\nplt.show()"
+ "import numpy as np\nimport matplotlib.pyplot as plt\nfrom ikpykit.anomaly import IsolationNNE\n\nrng = np.random.RandomState(42)\n\n# Generate train data\nX = 0.3 * rng.randn(100, 2)\nX_train = np.r_[X + 2, X - 2]\n# Generate some regular novel observations\nX = 0.3 * rng.randn(20, 2)\nX_test = np.r_[X + 2, X - 2]\n# Generate some abnormal novel observations\nX_outliers = rng.uniform(low=-4, high=4, size=(20, 2))\n\n# fit the model\nclf = IsolationNNE()\nclf.fit(X_train)\ny_pred_train = clf.predict(X_train)\ny_pred_test = clf.predict(X_test)\ny_pred_outliers = clf.predict(X_outliers)\n\n# plot the line, the samples, and the nearest vectors to the plane\nxx, yy = np.meshgrid(np.linspace(-5, 5, 50), np.linspace(-5, 5, 50))\nZ = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])\nZ = Z.reshape(xx.shape)\n\nplt.title(\"IsolationNNE\")\nplt.contourf(xx, yy, Z, cmap=plt.cm.Blues_r)\n\nb1 = plt.scatter(X_train[:, 0], X_train[:, 1], c=\"white\", s=20, edgecolor=\"k\")\nb2 = plt.scatter(X_test[:, 0], X_test[:, 1], c=\"green\", s=20, edgecolor=\"k\")\nc = plt.scatter(X_outliers[:, 0], X_outliers[:, 1], c=\"red\", s=20, edgecolor=\"k\")\nplt.axis(\"tight\")\nplt.xlim((-5, 5))\nplt.ylim((-5, 5))\nplt.legend(\n [b1, b2, c],\n [\"training observations\", \"new regular observations\", \"new abnormal observations\"],\n loc=\"upper left\",\n)\nplt.show()"
]
}
],
diff --git a/docs/user_guides/table-of-contents.md b/docs/user_guides/table-of-contents.md
index 94b0ed1..78c61fa 100644
--- a/docs/user_guides/table-of-contents.md
+++ b/docs/user_guides/table-of-contents.md
@@ -1,9 +1,9 @@
# Table of Contents
-Welcome to the PyIKT user guides! This comprehensive collection of guides is designed to help you navigate through the various features and functionalities of PyIKT. Whether you are a beginner or an advanced user, you will find the necessary resources to master data with PyIKT. Below, you will find the user guides categorized by topic for easier navigation.
+Welcome to the ikpykit user guides! This comprehensive collection of guides is designed to help you navigate through the various features and functionalities of ikpykit. Whether you are a beginner or an advanced user, you will find the necessary resources to master data with ikpykit. Below, you will find the user guides categorized by topic for easier navigation.
Anomaly Detection
- [INNE](../user_guides/inne.html)
-We hope you find these guides helpful. If you have any questions or need further assistance, please don't hesitate to reach out to the PyIKT community.
+We hope you find these guides helpful. If you have any questions or need further assistance, please don't hesitate to reach out to the ikpykit community.
diff --git a/examples/plot_inne.py b/examples/plot_inne.py
index 4e9d306..e189d60 100644
--- a/examples/plot_inne.py
+++ b/examples/plot_inne.py
@@ -7,14 +7,14 @@
IsolationNNE example
==========================================
-An example using :class:`pyikt.anomaly.IsolationNNE` for anomaly
+An example using :class:`ikpykit.anomaly.IsolationNNE` for anomaly
detection.
"""
import numpy as np
import matplotlib.pyplot as plt
-from pyikt.anomaly import INNE
+from ikpykit.anomaly import INNE
rng = np.random.RandomState(42)
diff --git a/pyikt/__init__.py b/ikpykit/__init__.py
similarity index 57%
rename from pyikt/__init__.py
rename to ikpykit/__init__.py
index e04b09a..9c1f1a4 100644
--- a/pyikt/__init__.py
+++ b/ikpykit/__init__.py
@@ -1,38 +1,22 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
work. If not, see .
"""
-from .kernel import IsoKernel
-from .kernel import IsoDisKernel
-
-from .anomaly import IDKD
-from .anomaly import INNE
-
-from .cluster import IDKC
-from .cluster import PSKC
-from .cluster import IKAHC
-
-from .graph import IsoGraphKernel
-from .graph import IKGOD
-
+from ._version import __version__
+from .anomaly import IDKD, INNE
+from .cluster import IDKC, IKAHC, PSKC
+from .graph import IKGOD, IsoGraphKernel
from .group import IKGAD
-
-
-from .trajectory import IKAT
-from .trajectory import TIDKC
-
-from .stream import ICID
-from .stream import STREAMKHC
-
+from .kernel import IsoDisKernel, IsoKernel
+from .stream import ICID, STREAMKHC
from .timeseries import IKTOD
-
-from ._version import __version__
+from .trajectory import IKAT, TIDKC
__all__ = [
"IsoDisKernel",
diff --git a/pyikt/anomaly/__init__.py b/ikpykit/anomaly/__init__.py
similarity index 87%
rename from pyikt/anomaly/__init__.py
rename to ikpykit/anomaly/__init__.py
index eb83102..45e2bf5 100644
--- a/pyikt/anomaly/__init__.py
+++ b/ikpykit/anomaly/__init__.py
@@ -1,7 +1,7 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
@@ -9,7 +9,7 @@
"""
from ._idkd import IDKD
-from ._inne import INNE
from ._iforest import IForest
+from ._inne import INNE
__all__ = ["IDKD", "INNE", "IForest"]
diff --git a/pyikt/anomaly/_idkd.py b/ikpykit/anomaly/_idkd.py
similarity index 98%
rename from pyikt/anomaly/_idkd.py
rename to ikpykit/anomaly/_idkd.py
index d7a8cd1..856d8e2 100644
--- a/pyikt/anomaly/_idkd.py
+++ b/ikpykit/anomaly/_idkd.py
@@ -1,7 +1,7 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
@@ -10,12 +10,14 @@
import numbers
from warnings import warn
+
import numpy as np
from sklearn.base import BaseEstimator, OutlierMixin
-from sklearn.utils.validation import check_is_fitted
from sklearn.utils import check_array
from sklearn.utils.extmath import safe_sparse_dot
-from pyikt.kernel import IsoKernel
+from sklearn.utils.validation import check_is_fitted
+
+from ikpykit.kernel import IsoKernel
class IDKD(OutlierMixin, BaseEstimator):
@@ -74,7 +76,7 @@ class IDKD(OutlierMixin, BaseEstimator):
Examples
--------
- >>> from pyikt.anomaly import IDKD
+ >>> from ikpykit.anomaly import IDKD
>>> import numpy as np
>>> X = np.array([[-1.1, 0.2], [0.3, 0.5], [0.5, 1.1], [100, 90]])
>>> clf = IDKD(max_samples=2, contamination=0.25).fit(X)
diff --git a/pyikt/anomaly/_iforest.py b/ikpykit/anomaly/_iforest.py
similarity index 98%
rename from pyikt/anomaly/_iforest.py
rename to ikpykit/anomaly/_iforest.py
index 067a4cf..7b1491d 100644
--- a/pyikt/anomaly/_iforest.py
+++ b/ikpykit/anomaly/_iforest.py
@@ -1,7 +1,7 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
@@ -10,9 +10,9 @@
import numpy as np
from sklearn.base import BaseEstimator, OutlierMixin
-from sklearn.utils.validation import check_is_fitted
-from sklearn.utils import check_array
from sklearn.ensemble import IsolationForest
+from sklearn.utils import check_array
+from sklearn.utils.validation import check_is_fitted
MAX_INT = np.iinfo(np.int32).max
MIN_FLOAT = np.finfo(float).eps
@@ -93,7 +93,7 @@ class IForest(OutlierMixin, BaseEstimator):
Examples
--------
- >>> from pyikt.anomaly import IForest
+ >>> from ikpykit.anomaly import IForest
>>> import numpy as np
>>> X = np.array([[-1.1, 0.2], [0.3, 0.5], [0.5, 1.1], [100, 90]])
>>> clf = IForest(contamination=0.25).fit(X)
@@ -112,7 +112,7 @@ def __init__(
random_state=None,
verbose=0,
):
- self.contamination=contamination
+ self.contamination = contamination
self.n_estimators = n_estimators
self.max_samples = max_samples
self.max_features = max_features
diff --git a/pyikt/anomaly/_inne.py b/ikpykit/anomaly/_inne.py
similarity index 99%
rename from pyikt/anomaly/_inne.py
rename to ikpykit/anomaly/_inne.py
index 1feeca4..90b386d 100644
--- a/pyikt/anomaly/_inne.py
+++ b/ikpykit/anomaly/_inne.py
@@ -1,7 +1,7 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
@@ -10,12 +10,13 @@
import numbers
from warnings import warn
+
import numpy as np
from sklearn.base import BaseEstimator, OutlierMixin
from sklearn.metrics import euclidean_distances
-from sklearn.utils.validation import check_is_fitted, check_random_state
-from sklearn.utils import check_array
from sklearn.metrics._pairwise_distances_reduction import ArgKmin
+from sklearn.utils import check_array
+from sklearn.utils.validation import check_is_fitted, check_random_state
MAX_INT = np.iinfo(np.int32).max
MIN_FLOAT = np.finfo(float).eps
@@ -67,7 +68,7 @@ class INNE(OutlierMixin, BaseEstimator):
Examples
--------
- >>> from pyikt.anomaly import INNE
+ >>> from ikpykit.anomaly import INNE
>>> import numpy as np
>>> X = np.array([[-1.1, 0.2], [0.3, 0.5], [0.5, 1.1], [100, 90]])
>>> clf = INNE(contamination=0.25).fit(X)
@@ -161,7 +162,6 @@ def fit(self, X, y=None):
return self
def _fit(self, X):
-
n_samples, n_features = X.shape
self._centroids = np.empty([self.n_estimators, self.max_samples_, n_features])
self._ratio = np.empty([self.n_estimators, self.max_samples_])
diff --git a/pyikt/anomaly/tests/__init__.py b/ikpykit/anomaly/tests/__init__.py
similarity index 100%
rename from pyikt/anomaly/tests/__init__.py
rename to ikpykit/anomaly/tests/__init__.py
diff --git a/pyikt/anomaly/tests/test_idkd.py b/ikpykit/anomaly/tests/test_idkd.py
similarity index 98%
rename from pyikt/anomaly/tests/test_idkd.py
rename to ikpykit/anomaly/tests/test_idkd.py
index 32f3938..9131364 100644
--- a/pyikt/anomaly/tests/test_idkd.py
+++ b/ikpykit/anomaly/tests/test_idkd.py
@@ -4,10 +4,11 @@
license that can be found in the LICENSE file.
"""
-from sklearn.datasets import load_iris
import numpy as np
import pytest
-from pyikt.anomaly import IDKD
+from sklearn.datasets import load_iris
+
+from ikpykit.anomaly import IDKD
method = ["inne", "anne"]
diff --git a/pyikt/anomaly/tests/test_inne.py b/ikpykit/anomaly/tests/test_inne.py
similarity index 99%
rename from pyikt/anomaly/tests/test_inne.py
rename to ikpykit/anomaly/tests/test_inne.py
index 243ff9d..f6b09e4 100644
--- a/pyikt/anomaly/tests/test_inne.py
+++ b/ikpykit/anomaly/tests/test_inne.py
@@ -6,11 +6,11 @@
import numpy as np
import pytest
-from pyikt.anomaly import INNE
from sklearn.datasets import (
load_diabetes,
load_iris,
)
+from sklearn.ensemble import IsolationForest
from sklearn.metrics import roc_auc_score
from sklearn.model_selection import ParameterGrid
from sklearn.utils import check_random_state
@@ -19,7 +19,7 @@
ignore_warnings,
)
-from sklearn.ensemble import IsolationForest
+from ikpykit.anomaly import INNE
rng = check_random_state(0)
diff --git a/pyikt/cluster/__init__.py b/ikpykit/cluster/__init__.py
similarity index 100%
rename from pyikt/cluster/__init__.py
rename to ikpykit/cluster/__init__.py
index 020f895..183df1b 100644
--- a/pyikt/cluster/__init__.py
+++ b/ikpykit/cluster/__init__.py
@@ -2,8 +2,8 @@
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
-from ._pskc import PSKC
-from ._ikahc import IKAHC
from ._idkc import IDKC
+from ._ikahc import IKAHC
+from ._pskc import PSKC
__all__ = ["PSKC", "IKAHC", "IDKC"]
diff --git a/pyikt/cluster/_idkc.py b/ikpykit/cluster/_idkc.py
similarity index 98%
rename from pyikt/cluster/_idkc.py
rename to ikpykit/cluster/_idkc.py
index 4633341..4ccff69 100644
--- a/pyikt/cluster/_idkc.py
+++ b/ikpykit/cluster/_idkc.py
@@ -1,7 +1,7 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
@@ -10,11 +10,13 @@
import numpy as np
import scipy.sparse as sp
-from sklearn.utils.extmath import safe_sparse_dot
from sklearn.base import BaseEstimator, ClusterMixin
-from sklearn.utils.validation import check_random_state, check_array
from sklearn.metrics._pairwise_distances_reduction import ArgKmin
-from pyikt.kernel import IsoKernel
+from sklearn.utils.extmath import safe_sparse_dot
+from sklearn.utils.validation import check_array, check_random_state
+
+from ikpykit.kernel import IsoKernel
+
from ._kcluster import KCluster
@@ -86,7 +88,7 @@ class IDKC(BaseEstimator, ClusterMixin):
Examples
--------
- >>> from pyikt.cluster import IDKC
+ >>> from ikpykit.cluster import IDKC
>>> import numpy as np
>>> X = np.array([[1, 2], [1, 4], [5, 2], [5, 5], [1, 0], [5, 0]])
>>> clustering = IDKC(
diff --git a/pyikt/cluster/_ikahc.py b/ikpykit/cluster/_ikahc.py
similarity index 97%
rename from pyikt/cluster/_ikahc.py
rename to ikpykit/cluster/_ikahc.py
index 9c4f30b..91317c2 100644
--- a/pyikt/cluster/_ikahc.py
+++ b/ikpykit/cluster/_ikahc.py
@@ -1,22 +1,22 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
work. If not, see .
"""
-from typing import Optional, Union, Literal, Any
+from typing import Any, Literal, Optional, Union
import numpy as np
-from scipy.cluster.hierarchy import linkage, fcluster
+from scipy.cluster.hierarchy import fcluster, linkage
from sklearn.base import BaseEstimator, ClusterMixin
-from sklearn.utils.validation import check_is_fitted
from sklearn.utils import check_array
+from sklearn.utils.validation import check_is_fitted
-from pyikt.kernel import IsoKernel
+from ikpykit.kernel import IsoKernel
class IKAHC(BaseEstimator, ClusterMixin):
@@ -80,7 +80,7 @@ class IKAHC(BaseEstimator, ClusterMixin):
Examples
--------
- >>> from pyikt.cluster import IKAHC
+ >>> from ikpykit.cluster import IKAHC
>>> import numpy as np
>>> X = [[0.4,0.3], [0.3,0.8], [0.5, 0.4], [0.5, 0.1]]
>>> clf = IKAHC(n_estimators=200, max_samples=2, lk_method='single', n_clusters=2, return_flat=True)
@@ -233,7 +233,6 @@ def _extract_flat_cluster(
else:
return fcluster(self.dendrogram_, t=n_clusters, criterion="maxclust")
-
def fit_transform(self, X: np.ndarray, y: Any = None) -> np.ndarray:
"""Fit algorithm to data and return the dendrogram.
diff --git a/pyikt/cluster/_kcluster.py b/ikpykit/cluster/_kcluster.py
similarity index 97%
rename from pyikt/cluster/_kcluster.py
rename to ikpykit/cluster/_kcluster.py
index 9673a6e..d41450b 100644
--- a/pyikt/cluster/_kcluster.py
+++ b/ikpykit/cluster/_kcluster.py
@@ -1,7 +1,7 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
@@ -9,8 +9,9 @@
"""
from collections.abc import Iterable
-from scipy import sparse as sp
+
import numpy as np
+from scipy import sparse as sp
class KCluster(object):
diff --git a/pyikt/cluster/_pskc.py b/ikpykit/cluster/_pskc.py
similarity index 97%
rename from pyikt/cluster/_pskc.py
rename to ikpykit/cluster/_pskc.py
index ede2f27..c43c988 100644
--- a/pyikt/cluster/_pskc.py
+++ b/ikpykit/cluster/_pskc.py
@@ -1,7 +1,7 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
@@ -9,12 +9,13 @@
"""
import numpy as np
-from pyikt.kernel import IsoKernel
-from sklearn.utils.extmath import safe_sparse_dot
from sklearn.base import BaseEstimator, ClusterMixin
-from sklearn.utils.validation import check_is_fitted
from sklearn.utils import check_array
-from pyikt.cluster._kcluster import KCluster
+from sklearn.utils.extmath import safe_sparse_dot
+from sklearn.utils.validation import check_is_fitted
+
+from ikpykit.cluster._kcluster import KCluster
+from ikpykit.kernel import IsoKernel
class PSKC(BaseEstimator, ClusterMixin):
@@ -67,7 +68,7 @@ class PSKC(BaseEstimator, ClusterMixin):
Examples
--------
- >>> from pyikt.cluster import PSKC
+ >>> from ikpykit.cluster import PSKC
>>> import numpy as np
>>> X = np.array([[1, 2], [1, 4], [10, 2], [10, 10], [1, 0], [1, 1]])
>>> pskc = PSKC(n_estimators=100, max_samples=2, tau=0.3, v=0.1, random_state=24)
@@ -192,7 +193,6 @@ def _update_cluster(
point_indices = np.delete(point_indices, x_id)
return c_k, point_indices
-
def _get_labels(self, X):
"""Get cluster labels for all points in the dataset."""
n_samples = X.shape[0]
diff --git a/pyikt/cluster/_utils.py b/ikpykit/cluster/_utils.py
similarity index 96%
rename from pyikt/cluster/_utils.py
rename to ikpykit/cluster/_utils.py
index 7cae759..b8240ac 100644
--- a/pyikt/cluster/_utils.py
+++ b/ikpykit/cluster/_utils.py
@@ -1,16 +1,16 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
work. If not, see .
"""
+import numpy as np
import scipy
from scipy import sparse
-import numpy as np
def delete_row_csr(mat, i):
diff --git a/pyikt/cluster/tests/__init__.py b/ikpykit/cluster/tests/__init__.py
similarity index 100%
rename from pyikt/cluster/tests/__init__.py
rename to ikpykit/cluster/tests/__init__.py
diff --git a/pyikt/cluster/tests/test_idkc.py b/ikpykit/cluster/tests/test_idkc.py
similarity index 95%
rename from pyikt/cluster/tests/test_idkc.py
rename to ikpykit/cluster/tests/test_idkc.py
index 19a7b94..2a186cb 100644
--- a/pyikt/cluster/tests/test_idkc.py
+++ b/ikpykit/cluster/tests/test_idkc.py
@@ -1,7 +1,7 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
@@ -9,9 +9,10 @@
"""
import numpy as np
-from sklearn.datasets import make_blobs
-from pyikt.cluster import IDKC
from sklearn import metrics
+from sklearn.datasets import make_blobs
+
+from ikpykit.cluster import IDKC
def test_IDKC():
diff --git a/pyikt/cluster/tests/test_ikahc.py b/ikpykit/cluster/tests/test_ikahc.py
similarity index 98%
rename from pyikt/cluster/tests/test_ikahc.py
rename to ikpykit/cluster/tests/test_ikahc.py
index 92efbf4..4ae2f67 100644
--- a/pyikt/cluster/tests/test_ikahc.py
+++ b/ikpykit/cluster/tests/test_ikahc.py
@@ -1,6 +1,6 @@
-"""pyikt (c) by Xin Han
+"""ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
@@ -9,7 +9,8 @@
import numpy as np
import pytest
-from pyikt.cluster import IKAHC
+
+from ikpykit.cluster import IKAHC
def test_ikahc_initialization():
diff --git a/pyikt/cluster/tests/test_pskc.py b/ikpykit/cluster/tests/test_pskc.py
similarity index 99%
rename from pyikt/cluster/tests/test_pskc.py
rename to ikpykit/cluster/tests/test_pskc.py
index 4fbbee1..c9c6047 100644
--- a/pyikt/cluster/tests/test_pskc.py
+++ b/ikpykit/cluster/tests/test_pskc.py
@@ -7,7 +7,8 @@
import numpy as np
import pytest
from sklearn.datasets import make_blobs
-from pyikt.cluster import PSKC
+
+from ikpykit.cluster import PSKC
@pytest.fixture
diff --git a/pyikt/graph/__init__.py b/ikpykit/graph/__init__.py
similarity index 87%
rename from pyikt/graph/__init__.py
rename to ikpykit/graph/__init__.py
index e80aa95..6763ea6 100644
--- a/pyikt/graph/__init__.py
+++ b/ikpykit/graph/__init__.py
@@ -1,15 +1,15 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
work. If not, see .
"""
-from ._isographkernel import IsoGraphKernel
from ._ikgod import IKGOD
+from ._isographkernel import IsoGraphKernel
__all__ = [
"IsoGraphKernel",
diff --git a/pyikt/graph/_ikgod.py b/ikpykit/graph/_ikgod.py
similarity index 96%
rename from pyikt/graph/_ikgod.py
rename to ikpykit/graph/_ikgod.py
index 3cbb970..9a1b5d1 100644
--- a/pyikt/graph/_ikgod.py
+++ b/ikpykit/graph/_ikgod.py
@@ -1,25 +1,26 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
work. If not, see .
"""
-import numbers
import copy
+import numbers
from warnings import warn
+
import numpy as np
import scipy.sparse as sp
from sklearn.base import BaseEstimator
-from sklearn.utils.validation import check_is_fitted
from sklearn.utils import check_array
from sklearn.utils.extmath import safe_sparse_dot
-from pyikt.graph.utils import get_degrees, get_neighbors, check_format
+from sklearn.utils.validation import check_is_fitted
-from pyikt.kernel import IsoKernel
+from ikpykit.graph.utils import check_format, get_degrees, get_neighbors
+from ikpykit.kernel import IsoKernel
class IKGOD(BaseEstimator):
@@ -75,7 +76,7 @@ class IKGOD(BaseEstimator):
Examples
--------
- >>> from pyikt.graph import IKGOD
+ >>> from ikpykit.graph import IKGOD
>>> import scipy.sparse as sp
>>> import numpy as np
>>> # Create adjacency matrix and features
@@ -300,12 +301,9 @@ def _wlembedding(self, adjacency, X):
neighbors = get_neighbors(adjacency, i)
if degrees[i] > 0: # Avoid division by zero
updated_embedding[i] = (
- (
- tmp_embedding[neighbors].sum(axis=0) / degrees[i]
- + tmp_embedding[i]
- )
- / 2
- )
+ tmp_embedding[neighbors].sum(axis=0) / degrees[i]
+ + tmp_embedding[i]
+ ) / 2
else:
updated_embedding[i] = tmp_embedding[i]
diff --git a/pyikt/graph/_isographkernel.py b/ikpykit/graph/_isographkernel.py
similarity index 97%
rename from pyikt/graph/_isographkernel.py
rename to ikpykit/graph/_isographkernel.py
index 28d3940..4aef9a6 100644
--- a/pyikt/graph/_isographkernel.py
+++ b/ikpykit/graph/_isographkernel.py
@@ -1,7 +1,7 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
@@ -9,16 +9,18 @@
"""
import copy
+from typing import Optional, Union
from warnings import warn
-from typing import Union, Optional
+
import numpy as np
import scipy.sparse as sp
from sklearn.base import BaseEstimator
from sklearn.utils import check_array
-from sklearn.utils.validation import check_is_fitted
from sklearn.utils.extmath import safe_sparse_dot
-from pyikt.kernel import IsoKernel
-from pyikt.graph.utils import get_degrees, get_neighbors, check_format
+from sklearn.utils.validation import check_is_fitted
+
+from ikpykit.graph.utils import check_format, get_degrees, get_neighbors
+from ikpykit.kernel import IsoKernel
class IsoGraphKernel(BaseEstimator):
@@ -57,7 +59,7 @@ class IsoGraphKernel(BaseEstimator):
Examples
--------
- >>> from pyikt.graph import IsoGraphKernel
+ >>> from ikpykit.graph import IsoGraphKernel
>>> import numpy as np
>>> X = np.array([[0.4, 0.3], [0.3, 0.8], [0.5, 0.4], [0.5, 0.1]])
>>> adjacency = np.array([[0, 1, 1, 0], [1, 0, 0, 1], [1, 0, 0, 1], [0, 1, 1, 0]])
diff --git a/pyikt/graph/tests/test_ikgod.py b/ikpykit/graph/tests/test_ikgod.py
similarity index 97%
rename from pyikt/graph/tests/test_ikgod.py
rename to ikpykit/graph/tests/test_ikgod.py
index 73b6a29..952573e 100644
--- a/pyikt/graph/tests/test_ikgod.py
+++ b/ikpykit/graph/tests/test_ikgod.py
@@ -1,10 +1,9 @@
-import pytest
import numpy as np
+import pytest
+import scipy.sparse as sp
from sklearn.datasets import make_blobs
-from sklearn.utils.estimator_checks import check_estimator
-from pyikt.graph import IKGOD
-import scipy.sparse as sp
+from ikpykit.graph import IKGOD
def test_ikgod_initialization():
diff --git a/pyikt/graph/tests/test_isographkernel.py b/ikpykit/graph/tests/test_isographkernel.py
similarity index 98%
rename from pyikt/graph/tests/test_isographkernel.py
rename to ikpykit/graph/tests/test_isographkernel.py
index d14a16b..ce114ae 100644
--- a/pyikt/graph/tests/test_isographkernel.py
+++ b/ikpykit/graph/tests/test_isographkernel.py
@@ -1,9 +1,8 @@
-import pytest
import numpy as np
-from pyikt.graph import IsoGraphKernel
-
import scipy.sparse as sp
+from ikpykit.graph import IsoGraphKernel
+
def test_isographkernel_initialization():
"""Test IsoGraphKernel initialization with default and custom parameters."""
diff --git a/pyikt/graph/utils.py b/ikpykit/graph/utils.py
similarity index 98%
rename from pyikt/graph/utils.py
rename to ikpykit/graph/utils.py
index 2eb5efd..cb4c3d8 100644
--- a/pyikt/graph/utils.py
+++ b/ikpykit/graph/utils.py
@@ -1,16 +1,17 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
work. If not, see .
"""
+from typing import Union
+
import numpy as np
import scipy.sparse as sp
-from typing import Union
def get_degrees(input_matrix: sp.csr_matrix, transpose: bool = False) -> np.ndarray:
diff --git a/pyikt/group/__init__.py b/ikpykit/group/__init__.py
similarity index 85%
rename from pyikt/group/__init__.py
rename to ikpykit/group/__init__.py
index e4635d8..ff66604 100644
--- a/pyikt/group/__init__.py
+++ b/ikpykit/group/__init__.py
@@ -1,7 +1,7 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
diff --git a/pyikt/group/anomaly/__init__.py b/ikpykit/group/anomaly/__init__.py
similarity index 100%
rename from pyikt/group/anomaly/__init__.py
rename to ikpykit/group/anomaly/__init__.py
diff --git a/pyikt/group/anomaly/_ikgad.py b/ikpykit/group/anomaly/_ikgad.py
similarity index 98%
rename from pyikt/group/anomaly/_ikgad.py
rename to ikpykit/group/anomaly/_ikgad.py
index ea41300..61ca665 100644
--- a/pyikt/group/anomaly/_ikgad.py
+++ b/ikpykit/group/anomaly/_ikgad.py
@@ -1,7 +1,7 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
@@ -10,10 +10,11 @@
import numpy as np
from sklearn.base import BaseEstimator, OutlierMixin
-from sklearn.utils.validation import check_is_fitted
from sklearn.utils.extmath import safe_sparse_dot
-from pyikt.kernel import IsoKernel
-from pyikt.group.utils import check_format
+from sklearn.utils.validation import check_is_fitted
+
+from ikpykit.group.utils import check_format
+from ikpykit.kernel import IsoKernel
class IKGAD(OutlierMixin, BaseEstimator):
@@ -75,7 +76,7 @@ class IKGAD(OutlierMixin, BaseEstimator):
Examples
--------
- >>> from pyikt.group import IKGAD
+ >>> from ikpykit.group import IKGAD
>>> import numpy as np
>>> X =[[[1.0, 1.1], [1.2, 1.3]], [[1.3, 1.2], [1.1, 1.0]], [[1.0, 1.2], [1.4, 1.3]], [[5.0, 5.1], [5.2, 5.3]]]
>>> clf = IKGAD(max_samples_1=2, max_samples_2=2, contamination=0.25, random_state=42)
diff --git a/pyikt/group/tests/test_Ikgad.py b/ikpykit/group/tests/test_Ikgad.py
similarity index 97%
rename from pyikt/group/tests/test_Ikgad.py
rename to ikpykit/group/tests/test_Ikgad.py
index ccc7085..faec846 100644
--- a/pyikt/group/tests/test_Ikgad.py
+++ b/ikpykit/group/tests/test_Ikgad.py
@@ -1,7 +1,7 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
@@ -10,7 +10,8 @@
import numpy as np
import pytest
-from pyikt.group import IKGAD
+
+from ikpykit.group import IKGAD
def test_IKGAD_initialization():
diff --git a/pyikt/group/utils.py b/ikpykit/group/utils.py
similarity index 95%
rename from pyikt/group/utils.py
rename to ikpykit/group/utils.py
index 2957c0c..bd7bf7c 100644
--- a/pyikt/group/utils.py
+++ b/ikpykit/group/utils.py
@@ -1,15 +1,16 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
work. If not, see .
"""
+from typing import Any, List, Optional, Union
+
import numpy as np
-from typing import Union, List, Any, Optional
from sklearn.utils.validation import check_array
diff --git a/pyikt/kernel/__init__.py b/ikpykit/kernel/__init__.py
similarity index 87%
rename from pyikt/kernel/__init__.py
rename to ikpykit/kernel/__init__.py
index 8e37672..3d6cb40 100644
--- a/pyikt/kernel/__init__.py
+++ b/ikpykit/kernel/__init__.py
@@ -1,15 +1,15 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
work. If not, see .
"""
-from ._isokernel import IsoKernel
from ._isodiskernel import IsoDisKernel
+from ._isokernel import IsoKernel
__all__ = [
"IsoDisKernel",
diff --git a/pyikt/kernel/_ik_anne.py b/ikpykit/kernel/_ik_anne.py
similarity index 99%
rename from pyikt/kernel/_ik_anne.py
rename to ikpykit/kernel/_ik_anne.py
index e281b10..d331065 100644
--- a/pyikt/kernel/_ik_anne.py
+++ b/ikpykit/kernel/_ik_anne.py
@@ -1,7 +1,7 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
diff --git a/pyikt/kernel/_ik_iforest.py b/ikpykit/kernel/_ik_iforest.py
similarity index 90%
rename from pyikt/kernel/_ik_iforest.py
rename to ikpykit/kernel/_ik_iforest.py
index 91c3c47..462c3d6 100644
--- a/pyikt/kernel/_ik_iforest.py
+++ b/ikpykit/kernel/_ik_iforest.py
@@ -1,21 +1,15 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
work. If not, see .
"""
-import numbers
-from warnings import warn
-
import numpy as np
from sklearn.base import BaseEstimator, TransformerMixin
-from sklearn.metrics import euclidean_distances
-from sklearn.utils import check_array
-from sklearn.utils.validation import check_is_fitted, check_random_state
MAX_INT = np.iinfo(np.int32).max
MIN_FLOAT = np.finfo(float).eps
@@ -65,5 +59,4 @@ class IK_IForest(TransformerMixin, BaseEstimator):
"""
def __init__(self):
-
pass
diff --git a/pyikt/kernel/_ik_inne.py b/ikpykit/kernel/_ik_inne.py
similarity index 99%
rename from pyikt/kernel/_ik_inne.py
rename to ikpykit/kernel/_ik_inne.py
index 57d6209..135dd36 100644
--- a/pyikt/kernel/_ik_inne.py
+++ b/ikpykit/kernel/_ik_inne.py
@@ -1,7 +1,7 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
diff --git a/pyikt/kernel/_isodiskernel.py b/ikpykit/kernel/_isodiskernel.py
similarity index 97%
rename from pyikt/kernel/_isodiskernel.py
rename to ikpykit/kernel/_isodiskernel.py
index c6ee633..3e44419 100644
--- a/pyikt/kernel/_isodiskernel.py
+++ b/ikpykit/kernel/_isodiskernel.py
@@ -1,7 +1,7 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
@@ -9,12 +9,14 @@
"""
import math
+
import numpy as np
import scipy.sparse as sp
from sklearn.base import BaseEstimator, TransformerMixin
from sklearn.utils import check_array
from sklearn.utils.validation import check_is_fitted
-from pyikt.kernel import IsoKernel
+
+from ._isokernel import IsoKernel
class IsoDisKernel(BaseEstimator, TransformerMixin):
@@ -55,7 +57,7 @@ class IsoDisKernel(BaseEstimator, TransformerMixin):
Examples
--------
- >>> from pyikt.kernel import IsoDisKernel
+ >>> from ikpykit.kernel import IsoDisKernel
>>> import numpy as np
>>> X = [[0.4,0.3], [0.3,0.8], [0.5,0.4], [0.5,0.1]]
>>> idk = IsoDisKernel(max_samples=3,).fit(X)
diff --git a/pyikt/kernel/_isokernel.py b/ikpykit/kernel/_isokernel.py
similarity index 96%
rename from pyikt/kernel/_isokernel.py
rename to ikpykit/kernel/_isokernel.py
index 05cb78a..74e4d2c 100644
--- a/pyikt/kernel/_isokernel.py
+++ b/ikpykit/kernel/_isokernel.py
@@ -1,7 +1,7 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
@@ -10,14 +10,16 @@
import numbers
from warnings import warn
+
import scipy.sparse as sp
+from sklearn.base import BaseEstimator, TransformerMixin
from sklearn.utils import check_array
-from sklearn.utils.validation import check_is_fitted
from sklearn.utils.extmath import safe_sparse_dot
-from sklearn.base import BaseEstimator, TransformerMixin
-from pyikt.kernel._ik_anne import IK_ANNE
-from pyikt.kernel._ik_iforest import IK_IForest
-from pyikt.kernel._ik_inne import IK_INNE
+from sklearn.utils.validation import check_is_fitted
+
+from ikpykit.kernel._ik_anne import IK_ANNE
+from ikpykit.kernel._ik_iforest import IK_IForest
+from ikpykit.kernel._ik_inne import IK_INNE
class IsoKernel(TransformerMixin, BaseEstimator):
@@ -62,7 +64,7 @@ class IsoKernel(TransformerMixin, BaseEstimator):
Examples
--------
- >>> from pyikt.kernel import IsoKernel
+ >>> from ikpykit.kernel import IsoKernel
>>> import numpy as np
>>> X = [[0.4,0.3], [0.3,0.8], [0.5, 0.4], [0.5, 0.1]]
>>> ik = IsoKernel().fit(X)
diff --git a/pyikt/kernel/tests/__init__.py b/ikpykit/kernel/tests/__init__.py
similarity index 100%
rename from pyikt/kernel/tests/__init__.py
rename to ikpykit/kernel/tests/__init__.py
diff --git a/pyikt/kernel/tests/test_isodiskernel.py b/ikpykit/kernel/tests/test_isodiskernel.py
similarity index 96%
rename from pyikt/kernel/tests/test_isodiskernel.py
rename to ikpykit/kernel/tests/test_isodiskernel.py
index 69d9df9..3c62664 100644
--- a/pyikt/kernel/tests/test_isodiskernel.py
+++ b/ikpykit/kernel/tests/test_isodiskernel.py
@@ -4,9 +4,10 @@
license that can be found in the LICENSE file.
"""
-from sklearn.datasets import load_iris
-from pyikt.kernel import IsoDisKernel
import pytest
+from sklearn.datasets import load_iris
+
+from ikpykit.kernel import IsoDisKernel
method = ["inne", "anne"]
diff --git a/pyikt/kernel/tests/test_isokernel.py b/ikpykit/kernel/tests/test_isokernel.py
similarity index 97%
rename from pyikt/kernel/tests/test_isokernel.py
rename to ikpykit/kernel/tests/test_isokernel.py
index 0854154..8df0a1b 100644
--- a/pyikt/kernel/tests/test_isokernel.py
+++ b/ikpykit/kernel/tests/test_isokernel.py
@@ -4,9 +4,10 @@
license that can be found in the LICENSE file.
"""
-from sklearn.datasets import load_iris
-from pyikt import IsoKernel
import pytest
+from sklearn.datasets import load_iris
+
+from ikpykit import IsoKernel
method = ["inne", "anne"]
diff --git a/pyikt/stream/__init__.py b/ikpykit/stream/__init__.py
similarity index 88%
rename from pyikt/stream/__init__.py
rename to ikpykit/stream/__init__.py
index f8f4122..dc31ff4 100644
--- a/pyikt/stream/__init__.py
+++ b/ikpykit/stream/__init__.py
@@ -1,7 +1,7 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
diff --git a/pyikt/stream/changedetect/__init__.py b/ikpykit/stream/changedetect/__init__.py
similarity index 100%
rename from pyikt/stream/changedetect/__init__.py
rename to ikpykit/stream/changedetect/__init__.py
diff --git a/pyikt/stream/changedetect/_icid.py b/ikpykit/stream/changedetect/_icid.py
similarity index 93%
rename from pyikt/stream/changedetect/_icid.py
rename to ikpykit/stream/changedetect/_icid.py
index 0df2c9e..4074a89 100644
--- a/pyikt/stream/changedetect/_icid.py
+++ b/ikpykit/stream/changedetect/_icid.py
@@ -1,7 +1,7 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
@@ -9,21 +9,22 @@
"""
import numpy as np
+from scipy.stats import entropy
from sklearn.base import BaseEstimator
-from sklearn.utils.validation import check_is_fitted
from sklearn.utils import check_array
-from pyikt.kernel import IsoDisKernel
-from scipy.stats import entropy
+from sklearn.utils.validation import check_is_fitted
+
+from ikpykit.kernel import IsoDisKernel
class ICID(BaseEstimator):
"""Isolate Change Interval Detection for monitoring data stream distribution changes.
- ICID (Isolate Change Interval Detection) is designed to detect intervals in a data stream
- where significant distribution changes occur. It leverages isolation-based methods to
- measure similarity between consecutive data windows, identifying points where the
- underlying distribution shifts. The algorithm adaptively selects the best sampling
- parameters for isolation kernels based on stability metrics.
+ ICID (Isolate Change Interval Detection) is designed to detect intervals in a data
+ stream where significant distribution changes occur. It leverages isolation-based
+ methods to measure similarity between consecutive data windows, identifying points
+ where the underlying distribution shifts. The algorithm adaptively selects the best
+ sampling parameters for isolation kernels based on stability metrics.
Parameters
----------
@@ -86,14 +87,14 @@ class ICID(BaseEstimator):
Examples
--------
- >>> from pyikt.stream import ICID
+ >>> from ikpykit.stream import ICID
>>> import numpy as np
>>> np.random.seed(42)
>>> X_normal1 = np.random.randn(50, 2)
>>> X_anomaly = np.random.randn(10, 2) * 5 + 10 # Different distribution
>>> X_normal2 = np.random.randn(20, 2)
>>> X = np.vstack([X_normal1, X_anomaly, X_normal2])
- >>> icid = ICID( n_estimators=50, max_samples_list=[4, 8], window_size=10, random_state=42)
+ >>> icid = ICID(n_estimators=50, max_samples_list=[4, 8], window_size=10, random_state=42)
>>> # Batch predictions
>>> icid.fit_predict_batch(X)
array([ 1, 1, 1, 1, -1, -1, 1])
@@ -177,9 +178,9 @@ def fit_predict_batch(self, X):
self.fit(X)
is_inlier = np.ones(len(self.interval_score_), dtype=int)
threshold = self._determine_anomaly_bounds()
- is_inlier[self.interval_score_ > threshold] = (
- -1
- ) # Higher scores indicate change
+ is_inlier[
+ self.interval_score_ > threshold
+ ] = -1 # Higher scores indicate change
return is_inlier
def predict_online(self, X):
diff --git a/pyikt/stream/cluster/__init__.py b/ikpykit/stream/cluster/__init__.py
similarity index 100%
rename from pyikt/stream/cluster/__init__.py
rename to ikpykit/stream/cluster/__init__.py
diff --git a/pyikt/stream/cluster/_inode.py b/ikpykit/stream/cluster/_inode.py
similarity index 99%
rename from pyikt/stream/cluster/_inode.py
rename to ikpykit/stream/cluster/_inode.py
index 328185d..c75c816 100644
--- a/pyikt/stream/cluster/_inode.py
+++ b/ikpykit/stream/cluster/_inode.py
@@ -1,7 +1,7 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
@@ -12,7 +12,7 @@
import random
import string
from collections import defaultdict, deque
-from typing import List, Tuple, Dict, Optional, Any, Union, Set
+from typing import Dict, List, Optional, Tuple
import numpy as np
from numba import jit
diff --git a/pyikt/stream/cluster/_streakhc.py b/ikpykit/stream/cluster/_streakhc.py
similarity index 97%
rename from pyikt/stream/cluster/_streakhc.py
rename to ikpykit/stream/cluster/_streakhc.py
index ce9e00d..6b2aa19 100644
--- a/pyikt/stream/cluster/_streakhc.py
+++ b/ikpykit/stream/cluster/_streakhc.py
@@ -1,7 +1,7 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
@@ -10,13 +10,15 @@
from __future__ import annotations
-from sklearn.base import BaseEstimator, ClusterMixin
-from sklearn.utils.validation import check_is_fitted, check_X_y, check_array
-from typing import Optional, Union, Any, Literal
+from typing import Literal, Optional, Union
+
import numpy as np
+from sklearn.base import BaseEstimator, ClusterMixin
+from sklearn.utils.validation import check_array, check_is_fitted, check_X_y
+
+from ikpykit.kernel import IsoKernel
from ._inode import INODE
-from pyikt.kernel import IsoKernel
from .utils.dendrogram_purity import dendrogram_purity
from .utils.Graphviz import Graphviz
from .utils.serialize_trees import serliaze_tree_to_file
@@ -66,7 +68,7 @@ class STREAMKHC(BaseEstimator, ClusterMixin):
Examples
--------
- >>> from pyikt.stream import STREAMKHC
+ >>> from ikpykit.stream import STREAMKHC
>>> import numpy as np
>>> # Generate sample data
>>> X = np.random.rand(100, 10) # 100 samples with 10 features
diff --git a/ikpykit/stream/cluster/utils/Graphviz.py b/ikpykit/stream/cluster/utils/Graphviz.py
new file mode 100644
index 0000000..01dedf3
--- /dev/null
+++ b/ikpykit/stream/cluster/utils/Graphviz.py
@@ -0,0 +1,253 @@
+class Graphviz(object):
+ def __init__(self):
+ self.internal_color = "lavenderblush4"
+ self.colors = [
+ "aquamarine",
+ "bisque",
+ "blue",
+ "blueviolet",
+ "brown",
+ "cadetblue",
+ "chartreuse",
+ "coral",
+ "cornflowerblue",
+ "crimson",
+ "darkgoldenrod",
+ "darkgreen",
+ "darkkhaki",
+ "darkmagenta",
+ "darkorange",
+ "darkred",
+ "darksalmon",
+ "darkseagreen",
+ "darkslateblue",
+ "darkslategrey",
+ "darkviolet",
+ "deepskyblue",
+ "dodgerblue",
+ "firebrick",
+ "forestgreen",
+ "gainsboro",
+ "ghostwhite",
+ "gold",
+ "goldenrod",
+ "gray",
+ "grey",
+ "green",
+ "greenyellow",
+ "honeydew",
+ "hotpink",
+ "indianred",
+ "indigo",
+ "ivory",
+ "khaki",
+ "lavender",
+ "lavenderblush",
+ "lawngreen",
+ "lemonchiffon",
+ "lightblue",
+ "lightcoral",
+ "lightcyan",
+ "lightgoldenrodyellow",
+ "lightgray",
+ "lightgreen",
+ "lightgrey",
+ "lightpink",
+ "lightsalmon",
+ "lightseagreen",
+ "lightskyblue",
+ "lightslategray",
+ "lightslategrey",
+ "lightsteelblue",
+ "lightyellow",
+ "limegreen",
+ "linen",
+ "magenta",
+ "maroon",
+ "mediumaquamarine",
+ "mediumblue",
+ "mediumorchid",
+ "mediumpurple",
+ "mediumseagreen",
+ "mediumslateblue",
+ "mediumturquoise",
+ "midnightblue",
+ "mintcream",
+ "mistyrose",
+ "moccasin",
+ "navajowhite",
+ "navy",
+ "oldlace",
+ "olive",
+ "olivedrab",
+ "orange",
+ "orangered",
+ "orchid",
+ "palegoldenrod",
+ "palegreen",
+ "paleturquoise",
+ "palevioletred",
+ "papayawhip",
+ "peachpuff",
+ "peru",
+ "pink",
+ "powderblue",
+ "purple",
+ "red",
+ "rosybrown",
+ "royalblue",
+ "saddlebrown",
+ "salmon",
+ "sandybrown",
+ "seagreen",
+ "seashell",
+ "sienna",
+ "silver",
+ "skyblue",
+ "slateblue",
+ "slategray",
+ "slategrey",
+ "snow",
+ "springgreen",
+ "steelblue",
+ "tan",
+ "teal",
+ "thistle",
+ "tomato",
+ "violet",
+ "wheat",
+ "burlywood",
+ "chocolate",
+ ]
+ self.color_map = {}
+ self.color_counter = 0
+
+ def format_id(self, ID):
+ if not ID.startswith("id"):
+ return (
+ ("id%s" % ID)
+ .replace("-", "")
+ .replace("#", "_HASH_")
+ .replace(".", "_DOT_")
+ )
+ else:
+ return (
+ ("%s" % ID)
+ .replace("-", "")
+ .replace("#", "_HASH_")
+ .replace(".", "_DOT_")
+ )
+
+ def clean_label(self, s):
+ return s.replace("[/:.]", "_")
+
+ def get_node_label(self, node):
+ lbl = []
+ lbl.append(self.format_id(node.id))
+ lbl.append("
")
+ lbl.append("num pts: %d" % len(node.leaves()))
+ lbl.append("
")
+ try:
+ lbl.append("purity: %f" % node.purity())
+ except Exception:
+ pass
+ try:
+ lbl.append("
")
+ lbl.append("across: %s" % node.best_across_debug)
+ except Exception:
+ pass
+ return "".join(lbl)
+
+ def get_color(self, lbl):
+ if lbl in self.color_map:
+ return self.color_map[lbl]
+ else:
+ self.color_map[lbl] = self.colors[self.color_counter]
+ self.color_counter = (self.color_counter + 1) % len(self.colors)
+ return self.color_map[lbl]
+
+ def format_graphiz_node(self, node):
+ """Format a graphviz node for printing."""
+ s = []
+ color = self.internal_color
+ try:
+ if node.purity() == 1.0:
+ if hasattr(node, "pts"):
+ curr_node = node
+ while curr_node.pts == None:
+ curr_node = curr_node.children[0]
+ if len(curr_node.pts) > 0:
+ w_gt = [x for x in curr_node.pts if x[1] and x[1] != "None"]
+ if w_gt:
+ color = self.get_color(w_gt[0][0])
+ else:
+ color = self.get_color("None")
+ except Exception:
+ pass
+ shape = "point"
+
+ if node.parent is None:
+ s.append(
+ "\n%s[shape=%s;style=filled;width=1;color=%s;label=<%s
%s
>]"
+ % (
+ self.format_id(node.id),
+ shape,
+ color,
+ self.get_node_label(node),
+ color,
+ )
+ )
+ s.append("\nROOTNODE[shape=star;style=filled;color=gold;label=]")
+ s.append("\nROOTNODE->%s" % self.format_id(node.id))
+ else:
+ leaf_m = ""
+ if hasattr(node, "pts") and node.pts and len(node.pts) > 0:
+ if hasattr(node.pts[0][0], "mid"):
+ leaf_m = (
+ "%s|%s" % (node.pts[0][0].mid, node.pts[0][0].gt)
+ if node.is_leaf()
+ else ""
+ )
+ else:
+ leaf_m = (
+ "%s|%s" % (node.pts[0][1], node.pts[0][0])
+ if node.is_leaf()
+ else ""
+ )
+ s.append(
+ "\n%s[shape=%s;style=filled;width=1;color=%s;label=<%s
"
+ "%s
%s
>]"
+ % (
+ self.format_id(node.id),
+ shape,
+ color,
+ self.get_node_label(node),
+ color,
+ leaf_m,
+ )
+ )
+ s.append(
+ "\n%s->%s" % (self.format_id(node.parent.id), self.format_id(node.id))
+ )
+ return "".join(s)
+
+ def graphviz_tree(
+ self,
+ root,
+ ):
+ """Return a graphviz tree as a string."""
+ s = []
+ s.append("digraph TreeStructure {\n")
+ s.append(self.format_graphiz_node(root))
+ for d in root.descendants():
+ s.append(self.format_graphiz_node(d))
+ s.append("\n}")
+ return "".join(s)
+
+ @staticmethod
+ def write_tree(root, filename):
+ """Write a graphviz tree to a file."""
+ gv = Graphviz()
+ tree = gv.graphviz_tree(root)
+ with open(filename, "w") as fout:
+ fout.write(tree)
diff --git a/pyikt/stream/cluster/utils/__init__.py b/ikpykit/stream/cluster/utils/__init__.py
similarity index 98%
rename from pyikt/stream/cluster/utils/__init__.py
rename to ikpykit/stream/cluster/utils/__init__.py
index 9dbb6fa..a6323bd 100644
--- a/pyikt/stream/cluster/utils/__init__.py
+++ b/ikpykit/stream/cluster/utils/__init__.py
@@ -1,14 +1,13 @@
# Copyright 2021 hanxin
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
diff --git a/ikpykit/stream/cluster/utils/deltasep_utils.py b/ikpykit/stream/cluster/utils/deltasep_utils.py
new file mode 100644
index 0000000..7beb7b7
--- /dev/null
+++ b/ikpykit/stream/cluster/utils/deltasep_utils.py
@@ -0,0 +1,331 @@
+"""
+Copyright (C) 2017 University of Massachusetts Amherst.
+This file is part of "xcluster"
+http://github.com/iesl/xcluster
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+"""
+Utilities for creating delta separated data sets.
+"""
+import numpy as np
+
+
+def gen_k_centers(k, dim):
+ """Create a k cluster data set with required separation.
+
+ For the purposes of validating a proof, generate each cluster center such
+ that it is at least 4 * delta away from any other cluster for some value of
+ delta > 0.
+
+ Args:
+ k - the number of clusters.
+ dim - (optional) the dimension of the points.
+
+ Returns:
+ A list of 2 cluster centers and a value of delta such that the clusters
+ centers are 4 * delta away form each other.
+ """
+ delta = abs(np.random.normal(0.0, 5.0))
+ eps = 0.001
+ centers = []
+ for i in range(k):
+ c = np.random.multivariate_normal(np.zeros(dim), np.identity(dim))
+ if len(centers):
+ c1 = centers[0]
+ x = np.random.multivariate_normal(c1, np.identity(c1.size)) - c1
+ direction = x / np.linalg.norm(x)
+ centers.append(c1 + 2.0 * i * delta * direction + eps)
+ else:
+ centers.append(c)
+ return centers, delta
+
+
+def create_dataset(dims, size, num_clusters=20):
+ """Create a delta separated data set.
+
+ Generate a set of centers for the clusters and from each center draw size
+ number of points that constitute the points in that cluster. Then return
+ a dataset of all points.
+
+ Args:
+ dims - (int) the dimention of all data points.
+ size - (int) the number of points to generate for each cluster.
+ num_clusters - (int) the number of clusters.
+ """
+ clusters, delta = gen_k_centers(num_clusters, dims)
+ return _create_constrained_dataset(clusters, delta, size)
+
+
+def _create_constrained_dataset(centers, delta, size):
+ """Create a delta-separated dataset.
+
+ For each of the centers draw size number of points. No two points may be
+ farther than delta away form each other. Thus, to generate each point,
+ choosea random direction and random distance from the center (of up to 0.5
+ delta).
+
+ Args:
+ centers - a list of cluster centers.
+ delta - the maximum distance between two points in the same cluster.
+ size - the number of points to draw per cluster.
+
+ Returns:
+ A list of points that represents the dataset.
+ """
+ dataset = []
+ count = 0
+ for i, c in enumerate(centers):
+ for j in range(size):
+ x = np.random.multivariate_normal(c, np.identity(np.size(c))) - c
+ direction = x / np.linalg.norm(x)
+ magnitude = np.random.uniform(0.0, 0.5 * delta)
+ # magnitude = np.random.uniform(0.0, delta) # NOT DEL-SEPARATED
+ vec = c + magnitude * direction
+ vec = np.append(vec, i)
+ vec = np.append(vec, count)
+ dataset.append(vec)
+ count += 1
+ return np.array(dataset)
+
+
+def gen_4_normal():
+ """Create 4 cluster centers.
+
+ Create gaussians centered at (1,1), (1,-1), (-1,-1) and (-1,1). Each has
+ standard covariance.
+
+ Args:
+ None
+
+ Returns:
+ A list of the four cluster centers.
+ """
+ return [
+ np.random.multivariate_normal(
+ mean=np.array([1.0, 1.0]), cov=np.array([[1.0, 0.0], [0.0, 1.0]])
+ ),
+ np.random.multivariate_normal(
+ mean=np.array([1.0, -1.0]), cov=np.array([[1.0, 0.0], [0.0, 1.0]])
+ ),
+ np.random.multivariate_normal(
+ mean=np.array([-1.0, -1.0]), cov=np.array([[1.0, 0.0], [0.0, 1.0]])
+ ),
+ np.random.multivariate_normal(
+ mean=np.array([-1.0, 1.0]), cov=np.array([[1.0, 0.0], [0.0, 1.0]])
+ ),
+ ]
+
+
+def _4_normal_spread():
+ """Create 4 cluster centers.
+
+ Create gaussians centered at (10,10), (10,-10), (-10,-10) and (-10,10).
+ Each has standard covariance.
+
+ Args:
+ None
+
+ Returns:
+ A list of the four cluster centers.
+ """
+ return [
+ np.random.multivariate_normal(
+ mean=np.array([10.0, 10.0]), cov=np.array([[1.0, 0.0], [0.0, 1.0]])
+ ),
+ np.random.multivariate_normal(
+ mean=np.array([10.0, -10.0]), cov=np.array([[1.0, 0.0], [0.0, 1.0]])
+ ),
+ np.random.multivariate_normal(
+ mean=np.array([-10.0, -10.0]), cov=np.array([[1.0, 0.0], [0.0, 1.0]])
+ ),
+ np.random.multivariate_normal(
+ mean=np.array([-10.0, 10.0]), cov=np.array([[1.0, 0.0], [0.0, 1.0]])
+ ),
+ ]
+
+
+def _5x5_grid_clusters():
+ """Create a 5x5 grid of cluster centers.
+
+ Create 25 cluster centers on the grid I^{[0, 4] x [0,4]}. Each center is a
+ gaussian with standard covariance
+
+ Args:
+ None
+
+ Returns:
+ A list of cluster centers.
+ """
+ return [
+ np.random.multivariate_normal(
+ mean=np.array([i, j]), cov=np.array([[1.0, 0.0], [0.0, 1.0]])
+ )
+ for i in range(5)
+ for j in range(5)
+ ]
+
+
+def _5x5_grid_clusters_spread():
+ """Create a 5x5 grid of cluster centers.
+
+ Create 25 cluster centers on the grid I^{[0, 4] x [0,4]}. Each center is a
+ gaussian with standard covariance
+
+ Args:
+ None
+
+ Returns:
+ A list of cluster centers.
+ """
+ return [
+ np.random.multivariate_normal(
+ mean=np.array([i * 25, j * 25]), cov=np.array([[1.0, 0.0], [0.0, 1.0]])
+ )
+ for i in range(5)
+ for j in range(5)
+ ]
+
+
+def _5x5_grid_clusters_close():
+ """Create a 5x5 grid of cluster centers.
+
+ Create 25 cluster centers on the grid I^{[0, 4] x [0,4]}. Each center is a
+ gaussian with standard covariance
+
+ Args:
+ None
+
+ Returns:
+ A list of cluster centers.
+ """
+ return [
+ np.random.multivariate_normal(
+ mean=np.array([i * 5, j * 5]), cov=np.array([[1.0, 0.0], [0.0, 1.0]])
+ )
+ for i in range(5)
+ for j in range(5)
+ ]
+
+
+def _2x3_grid_clusters_close():
+ """Create a 3x3 grid of cluster centers.
+
+ Create 25 cluster centers on the grid I^{[0, 4] x [0,4]}. Each center is a
+ gaussian with standard covariance
+
+ Args:
+ None
+
+ Returns:
+ A list of cluster centers.
+ """
+ return [
+ np.random.multivariate_normal(
+ mean=np.array([i * 5, j * 5]), cov=np.array([[1.0, 0.0], [0.0, 1.0]])
+ )
+ for i in range(2)
+ for j in range(3)
+ ]
+
+
+def _2x3_grid_clusters_spread():
+ """Create a 3x3 grid of cluster centers.
+
+ Create 25 cluster centers on the grid I^{[0, 4] x [0,4]}. Each center is a
+ gaussian with standard covariance
+
+ Args:
+ None
+
+ Returns:
+ A list of cluster centers.
+ """
+ return [
+ np.random.multivariate_normal(
+ mean=np.array([i * 25, j * 25]), cov=np.array([[1.0, 0.0], [0.0, 1.0]])
+ )
+ for i in range(2)
+ for j in range(3)
+ ]
+
+
+def _10x10_grid_clusters_close():
+ """Create a 3x3 grid of cluster centers.
+
+ Create 25 cluster centers on the grid I^{[0, 4] x [0,4]}. Each center is a
+ gaussian with standard covariance
+
+ Args:
+ None
+
+ Returns:
+ A list of cluster centers.
+ """
+ return [
+ np.random.multivariate_normal(
+ mean=np.array([i * 5, j * 5]), cov=np.array([[1.0, 0.0], [0.0, 1.0]])
+ )
+ for i in range(10)
+ for j in range(10)
+ ]
+
+
+def _10x10_grid_clusters_spread():
+ """Create a 3x3 grid of cluster centers.
+
+ Create 25 cluster centers on the grid I^{[0, 4] x [0,4]}. Each center is a
+ gaussian with standard covariance
+
+ Args:
+ None
+
+ Returns:
+ A list of cluster centers.
+ """
+ return [
+ np.random.multivariate_normal(
+ mean=np.array([i * 25, j * 25]), cov=np.array([[1.0, 0.0], [0.0, 1.0]])
+ )
+ for i in range(10)
+ for j in range(10)
+ ]
+
+
+def _random_standard_centers(n=100):
+ """Create random cluster centers.
+
+ Create n cluster centers randomly. Each cluster center is a draw from a
+ gaussian distribution centered at (0,0) with standard covariance.
+
+ Args:
+ n - optional; the number of centers to draw (default 100).
+
+ Returns:
+ A list of cluster centers.
+ """
+ generator = np.random.multivariate_normal(
+ mean=np.array([0, 0]), cov=np.array([[1.0, 0.0], [0.0, 1.0]])
+ )
+ return [
+ np.random.multivariate_normal(mean=pt, cov=np.array([[1.0, 0.0], [0.0, 1.0]]))
+ for pt in generator.rvs(size=n)
+ ]
+
+
+def _from_file(filename):
+ with open(filename, "r") as f:
+ clustering = []
+ for line in f:
+ splits = line.split("\t")
+ l, vec = int(splits[0]), np.array([float(x) for x in splits[1:]])
+ clustering.append((vec, l))
+ return clustering
diff --git a/pyikt/stream/cluster/utils/dendrogram_purity.py b/ikpykit/stream/cluster/utils/dendrogram_purity.py
similarity index 79%
rename from pyikt/stream/cluster/utils/dendrogram_purity.py
rename to ikpykit/stream/cluster/utils/dendrogram_purity.py
index a70032f..0d6cd25 100644
--- a/pyikt/stream/cluster/utils/dendrogram_purity.py
+++ b/ikpykit/stream/cluster/utils/dendrogram_purity.py
@@ -16,7 +16,6 @@
from itertools import combinations, groupby
import numpy as np
-from tqdm import tqdm
from tqdm._tqdm import trange
@@ -38,16 +37,22 @@ def expected_dendrogram_purity(root):
# Construct a map from leaf to cluster and from cluster to a list of leaves.
# Filter out the singletons in the leaf to cluster map.
leaves = root.leaves()
- def get_cluster(x): return x.pts[0][0]
- cluster_to_leaves = {c: list(ls)
- for c, ls in groupby(sorted(leaves, key=get_cluster),
- get_cluster)}
+
+ def get_cluster(x):
+ return x.pts[0][0]
+
+ cluster_to_leaves = {
+ c: list(ls) for c, ls in groupby(sorted(leaves, key=get_cluster), get_cluster)
+ }
leaf_to_cluster = {l: l.pts[0][0] for l in leaves}
- non_singleton_leaves = [l for l in leaf_to_cluster.keys()
- if len(cluster_to_leaves[leaf_to_cluster[l]]) > 1]
+ non_singleton_leaves = [
+ l
+ for l in leaf_to_cluster.keys()
+ if len(cluster_to_leaves[leaf_to_cluster[l]]) > 1
+ ]
if len(non_singleton_leaves) == 0.0:
return 1.0
- assert(len(non_singleton_leaves) > 0)
+ assert len(non_singleton_leaves) > 0
# For n samples, sample a leaf uniformly at random then select another leaf
# from the same class unformly at random.
@@ -59,13 +64,11 @@ def get_cluster(x): return x.pts[0][0]
rand_cluster_member = np.random.choice(cluster_to_leaves[cluster])
# Make sure we get two distinct leaves
while rand_cluster_member == rand_leaf:
- assert(leaf_to_cluster[rand_leaf] ==
- leaf_to_cluster[rand_cluster_member])
+ assert leaf_to_cluster[rand_leaf] == leaf_to_cluster[rand_cluster_member]
rand_cluster_member = np.random.choice(cluster_to_leaves[cluster])
# Find their lowest common ancestor and compute cluster purity.
- assert(leaf_to_cluster[rand_leaf] ==
- leaf_to_cluster[rand_cluster_member])
+ assert leaf_to_cluster[rand_leaf] == leaf_to_cluster[rand_cluster_member]
lca = rand_leaf.lca(rand_cluster_member)
unnormalized_purity += lca.purity(cluster=cluster)
return unnormalized_purity / samps
@@ -81,8 +84,9 @@ def get_cluster(x):
return x.pts[0][0]
sorted_lvs = sorted(leaves, key=get_cluster)
- leaves_by_true_class = {c: list(ls) for c, ls in groupby(sorted_lvs,
- key=get_cluster)}
+ leaves_by_true_class = {
+ c: list(ls) for c, ls in groupby(sorted_lvs, key=get_cluster)
+ }
leaf_pairs_by_true_class = {}
for class_lbl, lvs in leaves_by_true_class.items():
# leaf_pairs_by_true_class[class_lbl] = combinations(leaves_by_true_class[class_lbl], 2)
@@ -93,7 +97,7 @@ def get_cluster(x):
for pair in leaf_pairs_by_true_class[class_lbl]:
lca = pair[0].lca(pair[1])
sum_purity += lca.purity(get_cluster(pair[0]))
- assert(get_cluster(pair[0]) == get_cluster(pair[1]))
+ assert get_cluster(pair[0]) == get_cluster(pair[1])
count += 1.0
if count == 0.0:
return 1.0
diff --git a/pyikt/stream/cluster/utils/dendrogram_purity_pool.py b/ikpykit/stream/cluster/utils/dendrogram_purity_pool.py
similarity index 79%
rename from pyikt/stream/cluster/utils/dendrogram_purity_pool.py
rename to ikpykit/stream/cluster/utils/dendrogram_purity_pool.py
index 19e1a7b..8db22b9 100644
--- a/pyikt/stream/cluster/utils/dendrogram_purity_pool.py
+++ b/ikpykit/stream/cluster/utils/dendrogram_purity_pool.py
@@ -11,22 +11,28 @@
limitations under the License.
"""
-import time
-from functools import partial
-from itertools import combinations, groupby
-from multiprocessing import Pool, Manager
-
+import sys
import threading
-from queue import Queue
+from itertools import combinations, groupby
+from multiprocessing import Pool
import numpy as np
-import sys
+
sys.setrecursionlimit(50000)
queueLock = threading.Lock()
class Producer(threading.Thread):
- def __init__(self, samp_queue, cluser_to_leaves, leaf_to_cluster, result_queue, non_singleton_leaves, *args, **kwargs):
+ def __init__(
+ self,
+ samp_queue,
+ cluser_to_leaves,
+ leaf_to_cluster,
+ result_queue,
+ non_singleton_leaves,
+ *args,
+ **kwargs,
+ ):
super(Producer, self).__init__(*args, **kwargs)
self.samp_queue = samp_queue
self.result_queue = result_queue
@@ -38,10 +44,10 @@ def run(self):
while True:
queueLock.acquire()
if self.samp_queue.empty():
- print('bye')
+ print("bye")
queueLock.release()
break
- print('剩余采样数:', self.samp_queue.qsize())
+ print("剩余采样数:", self.samp_queue.qsize())
samp = self.samp_queue.get()
queueLock.release()
self.get_purity()
@@ -52,9 +58,8 @@ def get_purity(self):
rand_cluster_member = np.random.choice(self.cluster_to_leaves[cluster])
# Make sure we get two distinct leaves
while rand_cluster_member == rand_leaf:
- #assert(leaf_to_cluster[rand_leaf] == leaf_to_cluster[rand_cluster_member])
- rand_cluster_member = np.random.choice(
- self.cluster_to_leaves[cluster])
+ # assert(leaf_to_cluster[rand_leaf] == leaf_to_cluster[rand_cluster_member])
+ rand_cluster_member = np.random.choice(self.cluster_to_leaves[cluster])
lca = rand_leaf.lca(rand_cluster_member)
purity = lca.purity(cluster=cluster)
# print(purity)
@@ -86,12 +91,15 @@ def expected_dendrogram_purity(root):
def get_cluster(x):
return x.pts[0][0]
- cluster_to_leaves = {c: list(ls)
- for c, ls in groupby(sorted(leaves, key=get_cluster),
- get_cluster)}
+ cluster_to_leaves = {
+ c: list(ls) for c, ls in groupby(sorted(leaves, key=get_cluster), get_cluster)
+ }
leaf_to_cluster = {l: l.pts[0][0] for l in leaves}
- non_singleton_leaves = [l for l in leaf_to_cluster.keys()
- if len(cluster_to_leaves[leaf_to_cluster[l]]) > 1]
+ non_singleton_leaves = [
+ l
+ for l in leaf_to_cluster.keys()
+ if len(cluster_to_leaves[leaf_to_cluster[l]]) > 1
+ ]
if len(non_singleton_leaves) == 0.0:
return 1.0
@@ -100,12 +108,13 @@ def get_cluster(x):
samps = len(non_singleton_leaves) * 5 # TODO (AK): pick 5 in a better way.
with Pool(processes=6) as pool:
res = pool.starmap(
- process, [(non_singleton_leaves, leaf_to_cluster, cluster_to_leaves)]*samps)
+ process,
+ [(non_singleton_leaves, leaf_to_cluster, cluster_to_leaves)] * samps,
+ )
return sum(res) / samps
def process(non_singleton_leaves, leaf_to_cluster, cluster_to_leaves):
-
rand_leaf = np.random.choice(non_singleton_leaves)
cluster = leaf_to_cluster[rand_leaf]
rand_cluster_member = np.random.choice(cluster_to_leaves[cluster])
@@ -127,8 +136,9 @@ def get_cluster(x):
return x.pts[0][0]
sorted_lvs = sorted(leaves, key=get_cluster)
- leaves_by_true_class = {c: list(ls) for c, ls in groupby(sorted_lvs,
- key=get_cluster)}
+ leaves_by_true_class = {
+ c: list(ls) for c, ls in groupby(sorted_lvs, key=get_cluster)
+ }
leaf_pairs_by_true_class = {}
for class_lbl, lvs in leaves_by_true_class.items():
# leaf_pairs_by_true_class[class_lbl] = combinations(leaves_by_true_class[class_lbl], 2)
@@ -139,7 +149,7 @@ def get_cluster(x):
for pair in leaf_pairs_by_true_class[class_lbl]:
lca = pair[0].lca(pair[1])
sum_purity += lca.purity(get_cluster(pair[0]))
- assert(get_cluster(pair[0]) == get_cluster(pair[1]))
+ assert get_cluster(pair[0]) == get_cluster(pair[1])
count += 1.0
if count == 0.0:
return 1.0
diff --git a/pyikt/stream/cluster/utils/file_utils.py b/ikpykit/stream/cluster/utils/file_utils.py
similarity index 86%
rename from pyikt/stream/cluster/utils/file_utils.py
rename to ikpykit/stream/cluster/utils/file_utils.py
index 981b532..e850c8e 100644
--- a/pyikt/stream/cluster/utils/file_utils.py
+++ b/ikpykit/stream/cluster/utils/file_utils.py
@@ -1,11 +1,11 @@
# Copyright 2021 Xin Han
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -38,10 +38,9 @@ def load_data(filename):
if filename.endswith(".csv"):
split_sep = ","
elif filename.endswith(".tsv"):
- split_sep = '\t'
- with open(filename, 'r') as f:
+ split_sep = "\t"
+ with open(filename, "r") as f:
for line in f:
splits = line.strip().split(sep=split_sep)
- pid, l, vec = splits[0], splits[1], np.array([float(x)
- for x in splits[2:]])
- yield ((l, pid, vec))
\ No newline at end of file
+ pid, l, vec = splits[0], splits[1], np.array([float(x) for x in splits[2:]])
+ yield ((l, pid, vec))
diff --git a/ikpykit/stream/cluster/utils/logger.py b/ikpykit/stream/cluster/utils/logger.py
new file mode 100644
index 0000000..6b509c7
--- /dev/null
+++ b/ikpykit/stream/cluster/utils/logger.py
@@ -0,0 +1,51 @@
+# -*- coding: utf-8 -*-
+"""
+Copyright (c) 2021 Xin Han
+
+This Source Code Form is subject to the terms of the Mozilla Public
+License, v. 2.0. If a copy of the MPL was not distributed with this
+file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+# coding: utf-8
+
+import logging
+from logging import handlers
+
+
+class Logger(object):
+ level_relations = {
+ "debug": logging.DEBUG,
+ "info": logging.INFO,
+ "warning": logging.WARNING,
+ "error": logging.ERROR,
+ "crit": logging.CRITICAL,
+ }
+
+ def __init__(
+ self,
+ filename,
+ level="info",
+ when="D",
+ backCount=25,
+ fmt="%(asctime)s - %(pathname)s[line:%(lineno)d] - %(levelname)s: %(message)s",
+ ):
+ self.logger = logging.getLogger(filename)
+ format_str = logging.Formatter(fmt)
+ self.logger.setLevel(self.level_relations.get(level))
+ console_handler = logging.StreamHandler()
+ console_handler.setFormatter(format_str)
+ file_handler = handlers.TimedRotatingFileHandler(
+ filename=filename, when=when, backupCount=backCount, encoding="utf-8"
+ )
+ file_handler.setFormatter(format_str)
+ self.logger.addHandler(console_handler)
+ self.logger.addHandler(file_handler)
+
+
+if __name__ == "__main__":
+ log = Logger("all.log", level="debug")
+ log.logger.debug("debug")
+ log.logger.info("info")
+ log.logger.warning("warning")
+ log.logger.error("error")
+ Logger("error.log", level="error").logger.error("error")
diff --git a/pyikt/stream/cluster/utils/serialize_trees.py b/ikpykit/stream/cluster/utils/serialize_trees.py
similarity index 57%
rename from pyikt/stream/cluster/utils/serialize_trees.py
rename to ikpykit/stream/cluster/utils/serialize_trees.py
index d380bc3..9d3d2da 100644
--- a/pyikt/stream/cluster/utils/serialize_trees.py
+++ b/ikpykit/stream/cluster/utils/serialize_trees.py
@@ -12,23 +12,25 @@
See the License for the specific language governing permissions and
limitations under the License.
"""
+
import math
import numpy as np
from numba import jit
+from queue import Queue
@jit(nopython=True)
def _fast_dot(x, y):
"""Compute the dot product of x and y using numba.
- Args:
- x - a numpy vector (or list).
- y - a numpy vector (or list).
+ Args:
+ x - a numpy vector (or list).
+ y - a numpy vector (or list).
- Returns:
- x_T.y
- """
+ Returns:
+ x_T.y
+ """
return np.dot(x, y)
@@ -63,32 +65,31 @@ def _fast_norm_diff(x, y):
return _fast_norm(x - y)
-try:
- from Queue import Queue
-except:
- pass
-
-try:
- from queue import Queue
-except:
- pass
-
-
def serliaze_tree_to_file_with_point_ids(root, fn):
- with open(fn, 'w') as fout:
+ with open(fn, "w") as fout:
queue = Queue()
queue.put(root)
while not queue.empty():
curr_node = queue.get()
- curr_node_id = curr_node.pts[0][1] if curr_node.is_leaf(
- ) else curr_node.id
+ curr_node_id = curr_node.pts[0][1] if curr_node.is_leaf() else curr_node.id
sibling_node_id = "None"
if curr_node.parent:
- sibling_node_id = curr_node.siblings()[0].pts[0][1] if curr_node.siblings()[
- 0].is_leaf() else curr_node.siblings()[0].id
+ sibling_node_id = (
+ curr_node.siblings()[0].pts[0][1]
+ if curr_node.siblings()[0].is_leaf()
+ else curr_node.siblings()[0].id
+ )
dis = getDistance(curr_node) if curr_node.parent else "None"
- fout.write("%s\t%s\t%s\t%s\t%s\n" % (curr_node_id, sibling_node_id,
- curr_node.parent.id if curr_node.parent else "None", dis, curr_node.pts[0][0] if curr_node.is_leaf() else "None"))
+ fout.write(
+ "%s\t%s\t%s\t%s\t%s\n"
+ % (
+ curr_node_id,
+ sibling_node_id,
+ curr_node.parent.id if curr_node.parent else "None",
+ dis,
+ curr_node.pts[0][0] if curr_node.is_leaf() else "None",
+ )
+ )
for c in curr_node.children:
queue.put(c)
@@ -98,34 +99,59 @@ def getDistance(curr_node):
curr_point_count = curr_node.point_counter
sibling_point_count = sibling.point_counter
- distance = 2*(200 - (_fast_dot(sibling.ikv, curr_node.ikv) /
- (curr_point_count*sibling_point_count)))
+ distance = 2 * (
+ 200
+ - (
+ _fast_dot(sibling.ikv, curr_node.ikv)
+ / (curr_point_count * sibling_point_count)
+ )
+ )
return distance
def serliaze_tree_to_file(root, fn):
- with open(fn, 'w') as fout:
+ with open(fn, "w") as fout:
queue = Queue()
queue.put(root)
while not queue.empty():
curr_node = queue.get()
- fout.write("%s\t%s\t%s\t%s\n" % (curr_node.id, curr_node.parent.id if curr_node.parent else "None",
- curr_node.pts[0][1] if curr_node.is_leaf() else "None", len(curr_node.pts)))
+ fout.write(
+ "%s\t%s\t%s\t%s\n"
+ % (
+ curr_node.id,
+ curr_node.parent.id if curr_node.parent else "None",
+ curr_node.pts[0][1] if curr_node.is_leaf() else "None",
+ len(curr_node.pts),
+ )
+ )
for c in curr_node.children:
queue.put(c)
def serliaze_collapsed_tree_to_file_with_point_ids(root, fn):
- with open(fn, 'w') as fout:
+ with open(fn, "w") as fout:
queue = Queue()
queue.put(root)
while not queue.empty():
curr_node = queue.get()
- curr_node_id = curr_node.pts[0][2] if curr_node.is_leaf(
- ) and not curr_node.is_collapsed else curr_node.id
- fout.write("%s\t%s\t%s\n" % (curr_node_id, curr_node.parent.id if curr_node.parent else "None",
- curr_node.pts[0][1] if curr_node.is_leaf() and not curr_node.is_collapsed else "None"))
+ curr_node_id = (
+ curr_node.pts[0][2]
+ if curr_node.is_leaf() and not curr_node.is_collapsed
+ else curr_node.id
+ )
+ fout.write(
+ "%s\t%s\t%s\n"
+ % (
+ curr_node_id,
+ curr_node.parent.id if curr_node.parent else "None",
+ (
+ curr_node.pts[0][1]
+ if curr_node.is_leaf() and not curr_node.is_collapsed
+ else "None"
+ ),
+ )
+ )
for c in curr_node.children:
queue.put(c)
if curr_node.collapsed_leaves is not None:
diff --git a/pyikt/stream/tests/test_icid.py b/ikpykit/stream/tests/test_icid.py
similarity index 99%
rename from pyikt/stream/tests/test_icid.py
rename to ikpykit/stream/tests/test_icid.py
index ddc79c8..6b573e4 100644
--- a/pyikt/stream/tests/test_icid.py
+++ b/ikpykit/stream/tests/test_icid.py
@@ -2,10 +2,11 @@
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
-import pytest
import numpy as np
+import pytest
from sklearn.utils import check_random_state
-from pyikt.stream import ICID
+
+from ikpykit.stream import ICID
rng = check_random_state(42)
diff --git a/pyikt/stream/tests/test_streakhc.py b/ikpykit/stream/tests/test_streakhc.py
similarity index 98%
rename from pyikt/stream/tests/test_streakhc.py
rename to ikpykit/stream/tests/test_streakhc.py
index bf70dc5..fb98c87 100644
--- a/pyikt/stream/tests/test_streakhc.py
+++ b/ikpykit/stream/tests/test_streakhc.py
@@ -1,8 +1,7 @@
-import pytest
import numpy as np
-from pyikt.stream import STREAMKHC
-import tempfile
-import os
+import pytest
+
+from ikpykit.stream import STREAMKHC
def test_streamkhc_init():
diff --git a/pyikt/timeseries/__init__.py b/ikpykit/timeseries/__init__.py
similarity index 85%
rename from pyikt/timeseries/__init__.py
rename to ikpykit/timeseries/__init__.py
index 18cae9a..5df0c28 100644
--- a/pyikt/timeseries/__init__.py
+++ b/ikpykit/timeseries/__init__.py
@@ -1,7 +1,7 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
@@ -10,7 +10,6 @@
from .anomaly._iktod import IKTOD
-
__all__ = [
"IKTOD",
]
diff --git a/pyikt/timeseries/anomaly/__init__.py b/ikpykit/timeseries/anomaly/__init__.py
similarity index 100%
rename from pyikt/timeseries/anomaly/__init__.py
rename to ikpykit/timeseries/anomaly/__init__.py
diff --git a/pyikt/timeseries/anomaly/_iktod.py b/ikpykit/timeseries/anomaly/_iktod.py
similarity index 97%
rename from pyikt/timeseries/anomaly/_iktod.py
rename to ikpykit/timeseries/anomaly/_iktod.py
index d9bcf7f..f6c8170 100644
--- a/pyikt/timeseries/anomaly/_iktod.py
+++ b/ikpykit/timeseries/anomaly/_iktod.py
@@ -1,7 +1,7 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
@@ -9,11 +9,13 @@
"""
import warnings
+from typing import Optional, Union
+
import numpy as np
-from typing import Union, Optional
from sklearn.base import BaseEstimator, OutlierMixin
-from sklearn.utils.validation import check_is_fitted, check_array
-from pyikt.group import IKGAD
+from sklearn.utils.validation import check_array, check_is_fitted
+
+from ikpykit.group import IKGAD
class IKTOD(OutlierMixin, BaseEstimator):
@@ -83,7 +85,7 @@ class IKTOD(OutlierMixin, BaseEstimator):
Examples
--------
- >>> from pyikt.timeseries import IKTOD
+ >>> from ikpykit.timeseries import IKTOD
>>> import numpy as np
>>> # Time series with length 40 (4 periods of length 10)
>>> X = np.sin(np.linspace(0, 8*np.pi, 40)).reshape(-1, 1)
@@ -147,7 +149,6 @@ def fit(self, X) -> "IKTOD":
# Check if time series length is compatible with period_length
rest_samples = X.shape[0] % self.period_length
if rest_samples != 0:
-
warnings.warn(
f"The last sequence of series has {rest_samples} samples, "
f"which are less than other sequence."
diff --git a/pyikt/timeseries/tests/test_iktod.py b/ikpykit/timeseries/tests/test_iktod.py
similarity index 98%
rename from pyikt/timeseries/tests/test_iktod.py
rename to ikpykit/timeseries/tests/test_iktod.py
index 40e17c1..3c1dae8 100644
--- a/pyikt/timeseries/tests/test_iktod.py
+++ b/ikpykit/timeseries/tests/test_iktod.py
@@ -1,6 +1,7 @@
import numpy as np
import pytest
-from pyikt.timeseries import IKTOD
+
+from ikpykit.timeseries import IKTOD
def test_iktod_fit_and_predict():
diff --git a/pyikt/trajectory/__init__.py b/ikpykit/trajectory/__init__.py
similarity index 86%
rename from pyikt/trajectory/__init__.py
rename to ikpykit/trajectory/__init__.py
index 1d04e46..b6c3ef4 100644
--- a/pyikt/trajectory/__init__.py
+++ b/ikpykit/trajectory/__init__.py
@@ -1,7 +1,7 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
@@ -11,7 +11,6 @@
from .anomaly._ikat import IKAT
from .cluster._tidkc import TIDKC
-
__all__ = [
"IKAT",
"TIDKC",
diff --git a/pyikt/trajectory/anomaly/__init__.py b/ikpykit/trajectory/anomaly/__init__.py
similarity index 100%
rename from pyikt/trajectory/anomaly/__init__.py
rename to ikpykit/trajectory/anomaly/__init__.py
diff --git a/pyikt/trajectory/anomaly/_ikat.py b/ikpykit/trajectory/anomaly/_ikat.py
similarity index 97%
rename from pyikt/trajectory/anomaly/_ikat.py
rename to ikpykit/trajectory/anomaly/_ikat.py
index d38b31b..f785f19 100644
--- a/pyikt/trajectory/anomaly/_ikat.py
+++ b/ikpykit/trajectory/anomaly/_ikat.py
@@ -1,9 +1,11 @@
+from typing import Any, Literal, Optional, Union
+
import numpy as np
-from typing import Optional, Union, Literal, Any
from sklearn.base import BaseEstimator, OutlierMixin
from sklearn.utils.validation import check_is_fitted
-from pyikt.group import IKGAD
-from pyikt.group.utils import check_format
+
+from ikpykit.group import IKGAD
+from ikpykit.group.utils import check_format
class IKAT(OutlierMixin, BaseEstimator):
@@ -65,8 +67,8 @@ class IKAT(OutlierMixin, BaseEstimator):
Examples
--------
- >>> from pyikt.trajectory import IKAT
- >>> from pyikt.trajectory.dataloader import SheepDogs
+ >>> from ikpykit.trajectory import IKAT
+ >>> from ikpykit.trajectory.dataloader import SheepDogs
>>> sheepdogs = SheepDogs()
>>> X, y = sheepdogs.load(return_X_y=True)
>>> clf = IKAT().fit(X)
diff --git a/pyikt/trajectory/cluster/__init__.py b/ikpykit/trajectory/cluster/__init__.py
similarity index 100%
rename from pyikt/trajectory/cluster/__init__.py
rename to ikpykit/trajectory/cluster/__init__.py
diff --git a/pyikt/trajectory/cluster/_tidkc.py b/ikpykit/trajectory/cluster/_tidkc.py
similarity index 95%
rename from pyikt/trajectory/cluster/_tidkc.py
rename to ikpykit/trajectory/cluster/_tidkc.py
index bef2d8f..cfca118 100644
--- a/pyikt/trajectory/cluster/_tidkc.py
+++ b/ikpykit/trajectory/cluster/_tidkc.py
@@ -1,19 +1,21 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
work. If not, see .
"""
+from typing import Any, Literal, Optional, Union
+
import numpy as np
from sklearn.base import BaseEstimator, ClusterMixin
-from pyikt.kernel import IsoKernel
-from pyikt.cluster import IDKC
-from pyikt.group.utils import check_format
-from typing import Optional, Union, Literal, Any
+
+from ikpykit.cluster import IDKC
+from ikpykit.group.utils import check_format
+from ikpykit.kernel import IsoKernel
class TIDKC(BaseEstimator, ClusterMixin):
@@ -83,8 +85,8 @@ class TIDKC(BaseEstimator, ClusterMixin):
Examples
--------
- >>> from pyikt.trajectory import TIDKC
- >>> from pyikt.trajectory.dataloader import SheepDogs
+ >>> from ikpykit.trajectory import TIDKC
+ >>> from ikpykit.trajectory.dataloader import SheepDogs
>>> sheepdogs = SheepDogs()
>>> X, y = sheepdogs.load(return_X_y=True)
>>> clf = TIDKC(k=2, kn=5, v=0.5, n_init_samples=10).fit(X)
diff --git a/pyikt/trajectory/dataloader/__init__.py b/ikpykit/trajectory/dataloader/__init__.py
similarity index 100%
rename from pyikt/trajectory/dataloader/__init__.py
rename to ikpykit/trajectory/dataloader/__init__.py
diff --git a/pyikt/trajectory/dataloader/_sheepdogs.py b/ikpykit/trajectory/dataloader/_sheepdogs.py
similarity index 98%
rename from pyikt/trajectory/dataloader/_sheepdogs.py
rename to ikpykit/trajectory/dataloader/_sheepdogs.py
index d14f7b0..d9c0b4c 100644
--- a/pyikt/trajectory/dataloader/_sheepdogs.py
+++ b/ikpykit/trajectory/dataloader/_sheepdogs.py
@@ -1,7 +1,9 @@
-from .base import FileDataset
-import pandas as pd
import datetime
+import pandas as pd
+
+from .base import FileDataset
+
class SheepDogs(FileDataset):
"""SheepDogs trajectory dataset.
@@ -33,7 +35,7 @@ class SheepDogs(FileDataset):
Examples
--------
- >>> from pyikt.trajectory.dataloader import SheepDogs
+ >>> from ikpykit.trajectory.dataloader import SheepDogs
>>> sheepdogs = SheepDogs()
>>> X, y = sheepdogs.load(return_X_y=True)
"""
diff --git a/pyikt/trajectory/dataloader/base.py b/ikpykit/trajectory/dataloader/base.py
similarity index 99%
rename from pyikt/trajectory/dataloader/base.py
rename to ikpykit/trajectory/dataloader/base.py
index 18de853..095db53 100644
--- a/pyikt/trajectory/dataloader/base.py
+++ b/ikpykit/trajectory/dataloader/base.py
@@ -2,7 +2,6 @@
import abc
import inspect
-import itertools
import os
import pathlib
import re
diff --git a/pyikt/trajectory/dataloader/datasets/flyingfox.zip b/ikpykit/trajectory/dataloader/datasets/flyingfox.zip
similarity index 100%
rename from pyikt/trajectory/dataloader/datasets/flyingfox.zip
rename to ikpykit/trajectory/dataloader/datasets/flyingfox.zip
diff --git a/pyikt/trajectory/dataloader/datasets/sheepdogs.zip b/ikpykit/trajectory/dataloader/datasets/sheepdogs.zip
similarity index 100%
rename from pyikt/trajectory/dataloader/datasets/sheepdogs.zip
rename to ikpykit/trajectory/dataloader/datasets/sheepdogs.zip
diff --git a/pyikt/trajectory/dataloader/datasets/turkey.zip b/ikpykit/trajectory/dataloader/datasets/turkey.zip
similarity index 100%
rename from pyikt/trajectory/dataloader/datasets/turkey.zip
rename to ikpykit/trajectory/dataloader/datasets/turkey.zip
diff --git a/pyikt/trajectory/dataloader/datasets/wildebeest.zip b/ikpykit/trajectory/dataloader/datasets/wildebeest.zip
similarity index 100%
rename from pyikt/trajectory/dataloader/datasets/wildebeest.zip
rename to ikpykit/trajectory/dataloader/datasets/wildebeest.zip
diff --git a/pyikt/trajectory/tests/test_ikat.py b/ikpykit/trajectory/tests/test_ikat.py
similarity index 99%
rename from pyikt/trajectory/tests/test_ikat.py
rename to ikpykit/trajectory/tests/test_ikat.py
index d1f3404..e446e49 100644
--- a/pyikt/trajectory/tests/test_ikat.py
+++ b/ikpykit/trajectory/tests/test_ikat.py
@@ -4,9 +4,10 @@
license that can be found in the LICENSE file.
"""
-import pytest
import numpy as np
-from pyikt.trajectory import IKAT
+import pytest
+
+from ikpykit.trajectory import IKAT
@pytest.fixture
diff --git a/pyikt/trajectory/tests/test_tidkc.py b/ikpykit/trajectory/tests/test_tidkc.py
similarity index 98%
rename from pyikt/trajectory/tests/test_tidkc.py
rename to ikpykit/trajectory/tests/test_tidkc.py
index 174acce..ca55853 100644
--- a/pyikt/trajectory/tests/test_tidkc.py
+++ b/ikpykit/trajectory/tests/test_tidkc.py
@@ -1,6 +1,7 @@
import numpy as np
import pytest
-from pyikt.trajectory import TIDKC
+
+from ikpykit.trajectory import TIDKC
def test_tidkc_initialization():
diff --git a/pyikt/trajectory/utils.py b/ikpykit/trajectory/utils.py
similarity index 96%
rename from pyikt/trajectory/utils.py
rename to ikpykit/trajectory/utils.py
index e7b51bb..7986e12 100644
--- a/pyikt/trajectory/utils.py
+++ b/ikpykit/trajectory/utils.py
@@ -1,15 +1,16 @@
"""
-pyikt (c) by Xin Han
+ikpykit (c) by Xin Han
-pyikt is licensed under a
+ikpykit is licensed under a
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
work. If not, see .
"""
+from typing import Any, List, Optional, Union
+
import numpy as np
-from typing import Union, List, Any, Optional
def check_format(
diff --git a/mkdocs.yml b/mkdocs.yml
index 9ded03d..5d821ba 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -1,8 +1,8 @@
-site_name: PyIKT Docs
+site_name: IKPyKit Docs
extra_css:
- stylesheets/extra.css
-repo_url: https://github.com/IsolationKernel/pyikt
-site_url: https://isolationkernel.github.io/pyikt
+repo_url: https://github.com/IsolationKernel/ikpykit
+site_url: https://isolationkernel.github.io/ikpykit
remote_branch: docs_dev
site_description: Python library for Isolation Kernel Toolkit.
site_author: Isolation Kernel Team
@@ -11,7 +11,7 @@ copyright: Copyright © 2024 - 2025 Xin Han
nav:
- Home:
- - Welcome to PyIKT: README.md
+ - Welcome to IKPyKit: README.md
- Quick start:
- How to install: quick-start/how-to-install.md
@@ -186,5 +186,5 @@ extra:
make our documentation better.
seo:
description: >-
- **PyIKT** (Python for Isolation Kernel Similarity) is an intuitive Python library designed for a variety of machine learning tasks including kernel similarity calculation, anomaly detection, clustering, and change detection—all powered by the innovative **Isolation Kernel (IK)** . Isolation Kernel is a data-dependent kernel that measures similarity by isolating data points using an isolation mechanism. It uniquely adapts to the data distribution, with the property that points in sparse regions are more similar than those in dense regions. Notably, it requires no learning or closed-form expression, making it efficient and scalable.
+ **IKPyKit** (Isolation Kernel Python toolKit) is an intuitive Python library designed for a variety of machine learning tasks including kernel similarity calculation, anomaly detection, clustering, and change detection—all powered by the innovative **Isolation Kernel (IK)** . Isolation Kernel is a data-dependent kernel that measures similarity by isolating data points using an isolation mechanism. It uniquely adapts to the data distribution, with the property that points in sparse regions are more similar than those in dense regions. Notably, it requires no learning or closed-form expression, making it efficient and scalable.
keywords: time series, forecasting, machine learning, python, data science, scikit-learn, anomaly detection, clustering, change detection, kernel similarity, isolation kernel
diff --git a/pixi.lock b/pixi.lock
index 2d73ec1..6f38826 100644
--- a/pixi.lock
+++ b/pixi.lock
@@ -281,7 +281,7 @@ environments:
- conda: https://conda.anaconda.org/conda-forge/noarch/attrs-25.1.0-pyh71513ae_0.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.17.0-pyhd8ed1ab_0.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.13.3-pyha770c72_0.conda
- - conda: https://conda.anaconda.org/conda-forge/linux-64/black-23.3.0-py310hff52083_1.conda
+ - conda: https://conda.anaconda.org/conda-forge/linux-64/black-24.8.0-py310hff52083_1.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.2.0-pyh29332c3_4.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/bleach-with-css-6.2.0-h82add2a_4.conda
- conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py310hf71b8c6_2.conda
@@ -429,7 +429,7 @@ environments:
- conda: https://conda.anaconda.org/conda-forge/linux-64/regex-2024.11.6-py310ha75aee5_0.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda
- conda: https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.23.1-py310hc1293b2_0.conda
- - conda: https://conda.anaconda.org/conda-forge/linux-64/ruff-0.4.2-py310h9065425_0.conda
+ - conda: https://conda.anaconda.org/conda-forge/linux-64/ruff-0.9.9-py310h8851ac2_0.conda
- conda: https://conda.anaconda.org/conda-forge/linux-64/scikit-learn-1.6.1-py310h27f47ee_0.conda
- conda: https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.2-py310ha4c1d20_0.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh0d859eb_1.conda
@@ -472,7 +472,7 @@ environments:
- conda: https://conda.anaconda.org/conda-forge/noarch/attrs-25.1.0-pyh71513ae_0.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.17.0-pyhd8ed1ab_0.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.13.3-pyha770c72_0.conda
- - conda: https://conda.anaconda.org/conda-forge/osx-64/black-23.3.0-py310h2ec42d9_1.conda
+ - conda: https://conda.anaconda.org/conda-forge/osx-64/black-24.8.0-py310h2ec42d9_1.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.2.0-pyh29332c3_4.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/bleach-with-css-6.2.0-h82add2a_4.conda
- conda: https://conda.anaconda.org/conda-forge/osx-64/brotli-python-1.1.0-py310h53e7c6a_2.conda
@@ -613,7 +613,7 @@ environments:
- conda: https://conda.anaconda.org/conda-forge/osx-64/regex-2024.11.6-py310hbb8c376_0.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda
- conda: https://conda.anaconda.org/conda-forge/osx-64/rpds-py-0.23.1-py310h40a894c_0.conda
- - conda: https://conda.anaconda.org/conda-forge/osx-64/ruff-0.4.2-py310h4a464a8_0.conda
+ - conda: https://conda.anaconda.org/conda-forge/osx-64/ruff-0.9.9-py310h9dc1ea1_0.conda
- conda: https://conda.anaconda.org/conda-forge/osx-64/scikit-learn-1.6.1-py310h6ed8a50_0.conda
- conda: https://conda.anaconda.org/conda-forge/osx-64/scipy-1.11.2-py310h3900cf1_0.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh31c8845_1.conda
@@ -656,7 +656,7 @@ environments:
- conda: https://conda.anaconda.org/conda-forge/noarch/attrs-25.1.0-pyh71513ae_0.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.17.0-pyhd8ed1ab_0.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.13.3-pyha770c72_0.conda
- - conda: https://conda.anaconda.org/conda-forge/osx-arm64/black-23.3.0-py310hbe9552e_1.conda
+ - conda: https://conda.anaconda.org/conda-forge/osx-arm64/black-24.8.0-py310hbe9552e_1.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.2.0-pyh29332c3_4.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/bleach-with-css-6.2.0-h82add2a_4.conda
- conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-python-1.1.0-py310hb4ad77e_2.conda
@@ -798,7 +798,7 @@ environments:
- conda: https://conda.anaconda.org/conda-forge/osx-arm64/regex-2024.11.6-py310h078409c_0.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda
- conda: https://conda.anaconda.org/conda-forge/osx-arm64/rpds-py-0.23.1-py310h31b3829_0.conda
- - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ruff-0.4.2-py310h275c10e_0.conda
+ - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ruff-0.9.9-py310hdcf0d46_0.conda
- conda: https://conda.anaconda.org/conda-forge/osx-arm64/scikit-learn-1.6.1-py310h48c93d9_0.conda
- conda: https://conda.anaconda.org/conda-forge/osx-arm64/scipy-1.11.2-py310h0975f3d_0.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh31c8845_1.conda
@@ -840,7 +840,7 @@ environments:
- conda: https://conda.anaconda.org/conda-forge/noarch/attrs-25.1.0-pyh71513ae_0.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.17.0-pyhd8ed1ab_0.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.13.3-pyha770c72_0.conda
- - conda: https://conda.anaconda.org/conda-forge/win-64/black-23.3.0-py310h5588dad_1.conda
+ - conda: https://conda.anaconda.org/conda-forge/win-64/black-24.8.0-py310h5588dad_1.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.2.0-pyh29332c3_4.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/bleach-with-css-6.2.0-h82add2a_4.conda
- conda: https://conda.anaconda.org/conda-forge/win-64/brotli-python-1.1.0-py310h9e98ed7_2.conda
@@ -981,7 +981,7 @@ environments:
- conda: https://conda.anaconda.org/conda-forge/win-64/regex-2024.11.6-py310ha8f682b_0.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda
- conda: https://conda.anaconda.org/conda-forge/win-64/rpds-py-0.23.1-py310h7c79e54_0.conda
- - conda: https://conda.anaconda.org/conda-forge/win-64/ruff-0.4.2-py310h7f1804c_0.conda
+ - conda: https://conda.anaconda.org/conda-forge/win-64/ruff-0.9.9-py310h090d742_0.conda
- conda: https://conda.anaconda.org/conda-forge/win-64/scikit-learn-1.6.1-py310hf2a6c47_0.conda
- conda: https://conda.anaconda.org/conda-forge/win-64/scipy-1.11.2-py310h578b7cb_0.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.3-pyh5737063_1.conda
@@ -1038,7 +1038,7 @@ environments:
- conda: https://conda.anaconda.org/conda-forge/noarch/attrs-25.1.0-pyh71513ae_0.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.17.0-pyhd8ed1ab_0.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.13.3-pyha770c72_0.conda
- - conda: https://conda.anaconda.org/conda-forge/linux-64/black-23.3.0-py310hff52083_1.conda
+ - conda: https://conda.anaconda.org/conda-forge/linux-64/black-24.8.0-py310hff52083_1.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.2.0-pyh29332c3_4.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/bleach-with-css-6.2.0-h82add2a_4.conda
- conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py310hf71b8c6_2.conda
@@ -1220,7 +1220,7 @@ environments:
- conda: https://conda.anaconda.org/conda-forge/noarch/attrs-25.1.0-pyh71513ae_0.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.17.0-pyhd8ed1ab_0.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.13.3-pyha770c72_0.conda
- - conda: https://conda.anaconda.org/conda-forge/osx-64/black-23.3.0-py310h2ec42d9_1.conda
+ - conda: https://conda.anaconda.org/conda-forge/osx-64/black-24.8.0-py310h2ec42d9_1.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.2.0-pyh29332c3_4.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/bleach-with-css-6.2.0-h82add2a_4.conda
- conda: https://conda.anaconda.org/conda-forge/osx-64/brotli-python-1.1.0-py310h53e7c6a_2.conda
@@ -1395,7 +1395,7 @@ environments:
- conda: https://conda.anaconda.org/conda-forge/noarch/attrs-25.1.0-pyh71513ae_0.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.17.0-pyhd8ed1ab_0.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.13.3-pyha770c72_0.conda
- - conda: https://conda.anaconda.org/conda-forge/osx-arm64/black-23.3.0-py310hbe9552e_1.conda
+ - conda: https://conda.anaconda.org/conda-forge/osx-arm64/black-24.8.0-py310hbe9552e_1.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.2.0-pyh29332c3_4.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/bleach-with-css-6.2.0-h82add2a_4.conda
- conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-python-1.1.0-py310hb4ad77e_2.conda
@@ -1569,7 +1569,7 @@ environments:
- conda: https://conda.anaconda.org/conda-forge/noarch/attrs-25.1.0-pyh71513ae_0.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/babel-2.17.0-pyhd8ed1ab_0.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.13.3-pyha770c72_0.conda
- - conda: https://conda.anaconda.org/conda-forge/win-64/black-23.3.0-py310h5588dad_1.conda
+ - conda: https://conda.anaconda.org/conda-forge/win-64/black-24.8.0-py310h5588dad_1.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.2.0-pyh29332c3_4.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/bleach-with-css-6.2.0-h82add2a_4.conda
- conda: https://conda.anaconda.org/conda-forge/win-64/brotli-python-1.1.0-py310h9e98ed7_2.conda
@@ -1749,7 +1749,7 @@ environments:
linux-64:
- conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2
- conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2
- - conda: https://conda.anaconda.org/conda-forge/linux-64/black-23.3.0-py310hff52083_1.conda
+ - conda: https://conda.anaconda.org/conda-forge/linux-64/black-24.8.0-py310hff52083_1.conda
- conda: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py310hf71b8c6_2.conda
- conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda
- conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.12.14-hbcca054_0.conda
@@ -1807,13 +1807,14 @@ environments:
- conda: https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py310h89163eb_2.conda
- conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda
- - conda: https://conda.anaconda.org/conda-forge/linux-64/ruff-0.4.2-py310h9065425_0.conda
+ - conda: https://conda.anaconda.org/conda-forge/linux-64/ruff-0.9.9-py310h8851ac2_0.conda
- conda: https://conda.anaconda.org/conda-forge/linux-64/scikit-learn-1.6.1-py310h27f47ee_0.conda
- conda: https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.2-py310ha4c1d20_0.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-75.8.0-pyhff2d567_0.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/threadpoolctl-3.5.0-pyhc1e730c_0.conda
- conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.2.1-pyhd8ed1ab_1.conda
+ - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_1.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda
- conda: https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py310h3788b33_5.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.3.0-pyhd8ed1ab_0.conda
@@ -1833,7 +1834,7 @@ environments:
- pypi: https://files.pythonhosted.org/packages/0f/dd/84f10e23edd882c6f968c21c2434fe67bd4a528967067515feca9e611e5e/tzdata-2025.1-py2.py3-none-any.whl
- pypi: .
osx-64:
- - conda: https://conda.anaconda.org/conda-forge/osx-64/black-23.3.0-py310h2ec42d9_1.conda
+ - conda: https://conda.anaconda.org/conda-forge/osx-64/black-24.8.0-py310h2ec42d9_1.conda
- conda: https://conda.anaconda.org/conda-forge/osx-64/brotli-python-1.1.0-py310h53e7c6a_2.conda
- conda: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-hfdf4475_7.conda
- conda: https://conda.anaconda.org/conda-forge/osx-64/ca-certificates-2024.12.14-h8857fd0_0.conda
@@ -1883,13 +1884,14 @@ environments:
- conda: https://conda.anaconda.org/conda-forge/osx-64/pyyaml-6.0.2-py310h8e2f543_2.conda
- conda: https://conda.anaconda.org/conda-forge/osx-64/readline-8.2-h9e318b2_1.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda
- - conda: https://conda.anaconda.org/conda-forge/osx-64/ruff-0.4.2-py310h4a464a8_0.conda
+ - conda: https://conda.anaconda.org/conda-forge/osx-64/ruff-0.9.9-py310h9dc1ea1_0.conda
- conda: https://conda.anaconda.org/conda-forge/osx-64/scikit-learn-1.6.1-py310h6ed8a50_0.conda
- conda: https://conda.anaconda.org/conda-forge/osx-64/scipy-1.11.2-py310h3900cf1_0.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-75.8.0-pyhff2d567_0.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/threadpoolctl-3.5.0-pyhc1e730c_0.conda
- conda: https://conda.anaconda.org/conda-forge/osx-64/tk-8.6.13-h1abcd95_1.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.2.1-pyhd8ed1ab_1.conda
+ - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_1.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda
- conda: https://conda.anaconda.org/conda-forge/osx-64/ukkonen-1.0.1-py310hfa8da69_5.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.3.0-pyhd8ed1ab_0.conda
@@ -1909,7 +1911,7 @@ environments:
- pypi: https://files.pythonhosted.org/packages/0f/dd/84f10e23edd882c6f968c21c2434fe67bd4a528967067515feca9e611e5e/tzdata-2025.1-py2.py3-none-any.whl
- pypi: .
osx-arm64:
- - conda: https://conda.anaconda.org/conda-forge/osx-arm64/black-23.3.0-py310hbe9552e_1.conda
+ - conda: https://conda.anaconda.org/conda-forge/osx-arm64/black-24.8.0-py310hbe9552e_1.conda
- conda: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-python-1.1.0-py310hb4ad77e_2.conda
- conda: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-h99b78c6_7.conda
- conda: https://conda.anaconda.org/conda-forge/osx-arm64/ca-certificates-2024.12.14-hf0a4a13_0.conda
@@ -1959,13 +1961,14 @@ environments:
- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pyyaml-6.0.2-py310hc74094e_2.conda
- conda: https://conda.anaconda.org/conda-forge/osx-arm64/readline-8.2-h92ec313_1.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda
- - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ruff-0.4.2-py310h275c10e_0.conda
+ - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ruff-0.9.9-py310hdcf0d46_0.conda
- conda: https://conda.anaconda.org/conda-forge/osx-arm64/scikit-learn-1.6.1-py310h48c93d9_0.conda
- conda: https://conda.anaconda.org/conda-forge/osx-arm64/scipy-1.11.2-py310h0975f3d_0.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-75.8.0-pyhff2d567_0.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/threadpoolctl-3.5.0-pyhc1e730c_0.conda
- conda: https://conda.anaconda.org/conda-forge/osx-arm64/tk-8.6.13-h5083fa2_1.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.2.1-pyhd8ed1ab_1.conda
+ - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_1.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda
- conda: https://conda.anaconda.org/conda-forge/osx-arm64/ukkonen-1.0.1-py310h7306fd8_5.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.3.0-pyhd8ed1ab_0.conda
@@ -1985,7 +1988,7 @@ environments:
- pypi: https://files.pythonhosted.org/packages/0f/dd/84f10e23edd882c6f968c21c2434fe67bd4a528967067515feca9e611e5e/tzdata-2025.1-py2.py3-none-any.whl
- pypi: .
win-64:
- - conda: https://conda.anaconda.org/conda-forge/win-64/black-23.3.0-py310h5588dad_1.conda
+ - conda: https://conda.anaconda.org/conda-forge/win-64/black-24.8.0-py310h5588dad_1.conda
- conda: https://conda.anaconda.org/conda-forge/win-64/brotli-python-1.1.0-py310h9e98ed7_2.conda
- conda: https://conda.anaconda.org/conda-forge/win-64/bzip2-1.0.8-h2466b09_7.conda
- conda: https://conda.anaconda.org/conda-forge/win-64/ca-certificates-2024.12.14-h56e8100_0.conda
@@ -2040,7 +2043,7 @@ environments:
- conda: https://conda.anaconda.org/conda-forge/win-64/python_abi-3.10-5_cp310.conda
- conda: https://conda.anaconda.org/conda-forge/win-64/pyyaml-6.0.2-py310h38315fa_2.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda
- - conda: https://conda.anaconda.org/conda-forge/win-64/ruff-0.4.2-py310h7f1804c_0.conda
+ - conda: https://conda.anaconda.org/conda-forge/win-64/ruff-0.9.9-py310h090d742_0.conda
- conda: https://conda.anaconda.org/conda-forge/win-64/scikit-learn-1.6.1-py310hf2a6c47_0.conda
- conda: https://conda.anaconda.org/conda-forge/win-64/scipy-1.11.2-py310h578b7cb_0.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/setuptools-75.8.0-pyhff2d567_0.conda
@@ -2048,6 +2051,7 @@ environments:
- conda: https://conda.anaconda.org/conda-forge/noarch/threadpoolctl-3.5.0-pyhc1e730c_0.conda
- conda: https://conda.anaconda.org/conda-forge/win-64/tk-8.6.13-h5226925_1.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/tomli-2.2.1-pyhd8ed1ab_1.conda
+ - conda: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_1.conda
- conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda
- conda: https://conda.anaconda.org/conda-forge/win-64/ucrt-10.0.22621.0-h57928b3_1.conda
- conda: https://conda.anaconda.org/conda-forge/win-64/ukkonen-1.0.1-py310hc19bc0b_5.conda
@@ -2428,8 +2432,6 @@ packages:
- libgcc >=13
- python >=3.10,<3.11.0a0
- python_abi 3.10.* *_cp310
- arch: x86_64
- platform: linux
license: MIT
license_family: MIT
purls:
@@ -2444,8 +2446,6 @@ packages:
- cffi >=1.0.1
- python >=3.10,<3.11.0a0
- python_abi 3.10.* *_cp310
- arch: x86_64
- platform: osx
license: MIT
license_family: MIT
purls:
@@ -2461,8 +2461,6 @@ packages:
- python >=3.10,<3.11.0a0
- python >=3.10,<3.11.0a0 *_cpython
- python_abi 3.10.* *_cp310
- arch: arm64
- platform: osx
license: MIT
license_family: MIT
purls:
@@ -2479,8 +2477,6 @@ packages:
- ucrt >=10.0.20348.0
- vc >=14.2,<15
- vc14_runtime >=14.29.30139
- arch: x86_64
- platform: win
license: MIT
license_family: MIT
purls:
@@ -2536,9 +2532,9 @@ packages:
- pkg:pypi/beautifulsoup4?source=compressed-mapping
size: 145482
timestamp: 1738740460562
-- conda: https://conda.anaconda.org/conda-forge/linux-64/black-23.3.0-py310hff52083_1.conda
- sha256: 9ede541d2f84668f45dd02185d53547e84a5834f8a167d89b74b79b5d7f7a992
- md5: 22bc96cd2aed2482d207ab376fc00f18
+- conda: https://conda.anaconda.org/conda-forge/linux-64/black-24.8.0-py310hff52083_1.conda
+ sha256: 660306076a8128446c64c057ea2fd87be7b0b42b04d80d51f45c5006f44a7560
+ md5: 803feebc8451cf02fb415b603dcd3e36
depends:
- click >=8.0.0
- mypy_extensions >=0.4.3
@@ -2548,17 +2544,16 @@ packages:
- python >=3.10,<3.11.0a0
- python_abi 3.10.* *_cp310
- tomli >=1.1.0
- arch: x86_64
- platform: linux
+ - typing_extensions >=4.0.1
license: MIT
license_family: MIT
purls:
- pkg:pypi/black?source=hash-mapping
- size: 271936
- timestamp: 1682492054286
-- conda: https://conda.anaconda.org/conda-forge/osx-64/black-23.3.0-py310h2ec42d9_1.conda
- sha256: ccd6cb8877ef0a4246c3ad6893e927bc6ddb265b773f908c2188e5e52c2ae37a
- md5: dda28fe2408191e4ab3d48df4a331899
+ size: 304024
+ timestamp: 1726154973752
+- conda: https://conda.anaconda.org/conda-forge/osx-64/black-24.8.0-py310h2ec42d9_1.conda
+ sha256: 0829ccb0e526aec7825a4ef34d07dbed4d002a0ae9f02e87cff8666cdd9be4d5
+ md5: bec9c91e3676af9ca62c3d373f22637b
depends:
- click >=8.0.0
- mypy_extensions >=0.4.3
@@ -2568,17 +2563,16 @@ packages:
- python >=3.10,<3.11.0a0
- python_abi 3.10.* *_cp310
- tomli >=1.1.0
- arch: x86_64
- platform: osx
+ - typing_extensions >=4.0.1
license: MIT
license_family: MIT
purls:
- pkg:pypi/black?source=hash-mapping
- size: 271994
- timestamp: 1682492532222
-- conda: https://conda.anaconda.org/conda-forge/osx-arm64/black-23.3.0-py310hbe9552e_1.conda
- sha256: 26ee80c3d884eef62b6850085fdab4ecb83ba91d2a23179b9c2292ec55ba9f52
- md5: a81d69fef268a3a8fba818da27bab102
+ size: 305165
+ timestamp: 1726154983228
+- conda: https://conda.anaconda.org/conda-forge/osx-arm64/black-24.8.0-py310hbe9552e_1.conda
+ sha256: b403cf7ad2678b0682d86755168c7d3c28974c69caa059aa58e27b04048adc03
+ md5: 7e2e651c0c677216559ea424ce6daf67
depends:
- click >=8.0.0
- mypy_extensions >=0.4.3
@@ -2589,17 +2583,16 @@ packages:
- python >=3.10,<3.11.0a0 *_cpython
- python_abi 3.10.* *_cp310
- tomli >=1.1.0
- arch: arm64
- platform: osx
+ - typing_extensions >=4.0.1
license: MIT
license_family: MIT
purls:
- pkg:pypi/black?source=hash-mapping
- size: 272162
- timestamp: 1682492378469
-- conda: https://conda.anaconda.org/conda-forge/win-64/black-23.3.0-py310h5588dad_1.conda
- sha256: 1ce1ddbddf3397fafa1e8cfa5df8add4c2184c09ebc8db29534d933085e8a869
- md5: 1d8f9fb35cc2668ee709dbbdb18864c0
+ size: 306017
+ timestamp: 1726155314251
+- conda: https://conda.anaconda.org/conda-forge/win-64/black-24.8.0-py310h5588dad_1.conda
+ sha256: f0497e7b8c9144ab2d53ff95f76f1fb15b2a69c2af5b93c0cc420356c8243c1d
+ md5: f1db58ddb07812e6b18d68440ed07d84
depends:
- click >=8.0.0
- mypy_extensions >=0.4.3
@@ -2609,14 +2602,13 @@ packages:
- python >=3.10,<3.11.0a0
- python_abi 3.10.* *_cp310
- tomli >=1.1.0
- arch: x86_64
- platform: win
+ - typing_extensions >=4.0.1
license: MIT
license_family: MIT
purls:
- pkg:pypi/black?source=hash-mapping
- size: 287586
- timestamp: 1682492402457
+ size: 330123
+ timestamp: 1726155398256
- conda: https://conda.anaconda.org/conda-forge/noarch/bleach-6.2.0-pyh29332c3_4.conda
sha256: a05971bb80cca50ce9977aad3f7fc053e54ea7d5321523efc7b9a6e12901d3cd
md5: f0b4c8e370446ef89797608d60a564b3
@@ -2936,8 +2928,6 @@ packages:
- python >=3.10,<3.11.0a0
- python_abi 3.10.* *_cp310
- tomli
- arch: x86_64
- platform: linux
license: Apache-2.0
license_family: APACHE
purls:
@@ -2952,8 +2942,6 @@ packages:
- python >=3.10,<3.11.0a0
- python_abi 3.10.* *_cp310
- tomli
- arch: x86_64
- platform: osx
license: Apache-2.0
license_family: APACHE
purls:
@@ -2969,8 +2957,6 @@ packages:
- python >=3.10,<3.11.0a0 *_cpython
- python_abi 3.10.* *_cp310
- tomli
- arch: arm64
- platform: osx
license: Apache-2.0
license_family: APACHE
purls:
@@ -2987,8 +2973,6 @@ packages:
- ucrt >=10.0.20348.0
- vc >=14.2,<15
- vc14_runtime >=14.29.30139
- arch: x86_64
- platform: win
license: Apache-2.0
license_family: APACHE
purls:
@@ -3015,8 +2999,6 @@ packages:
- libstdcxx >=13
- python >=3.10,<3.11.0a0
- python_abi 3.10.* *_cp310
- arch: x86_64
- platform: linux
license: MIT
license_family: MIT
purls:
@@ -3031,8 +3013,6 @@ packages:
- libcxx >=18
- python >=3.10,<3.11.0a0
- python_abi 3.10.* *_cp310
- arch: x86_64
- platform: osx
license: MIT
license_family: MIT
purls:
@@ -3048,8 +3028,6 @@ packages:
- python >=3.10,<3.11.0a0
- python >=3.10,<3.11.0a0 *_cpython
- python_abi 3.10.* *_cp310
- arch: arm64
- platform: osx
license: MIT
license_family: MIT
purls:
@@ -3065,8 +3043,6 @@ packages:
- ucrt >=10.0.20348.0
- vc >=14.2,<15
- vc14_runtime >=14.29.30139
- arch: x86_64
- platform: win
license: MIT
license_family: MIT
purls:
@@ -3201,8 +3177,6 @@ packages:
depends:
- libgcc-ng >=12
- libstdcxx-ng >=12
- arch: x86_64
- platform: linux
license: MIT
license_family: MIT
purls: []
@@ -3211,8 +3185,6 @@ packages:
- conda: https://conda.anaconda.org/conda-forge/osx-64/icu-73.2-hf5e326d_0.conda
sha256: f66362dc36178ac9b7c7a9b012948a9d2d050b3debec24bbd94aadbc44854185
md5: 5cc301d759ec03f28328428e28f65591
- arch: x86_64
- platform: osx
license: MIT
license_family: MIT
purls: []
@@ -3221,8 +3193,6 @@ packages:
- conda: https://conda.anaconda.org/conda-forge/osx-arm64/icu-73.2-hc8870d7_0.conda
sha256: ff9cd0c6cd1349954c801fb443c94192b637e1b414514539f3c49c56a39f51b1
md5: 8521bd47c0e11c5902535bb1a17c565f
- arch: arm64
- platform: osx
license: MIT
license_family: MIT
purls: []
@@ -3233,8 +3203,6 @@ packages:
md5: 5eb22c1d7b3fc4abb50d92d621583137
depends:
- __osx >=11.0
- arch: arm64
- platform: osx
license: MIT
license_family: MIT
purls: []
@@ -3263,6 +3231,18 @@ packages:
- pkg:pypi/idna?source=hash-mapping
size: 49765
timestamp: 1733211921194
+- pypi: .
+ name: ikpykit
+ version: 0.1.dev153
+ sha256: 37164a92bdfea39a6e2109de81c87eb271e74e23200be8f715fa0a4a0da87ebd
+ requires_dist:
+ - scikit-learn>=1.2
+ - pandas>=1.5
+ - numpy<=1.22
+ - numba>=0.54
+ - tqdm>=4.62.3
+ requires_python: '>=3.9'
+ editable: true
- conda: https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.6.1-pyha770c72_0.conda
sha256: 598951ebdb23e25e4cec4bbff0ae369cec65ead80b50bc08b441d8e54de5cf03
md5: f4b39bf00c69f56ac01e020ebfac066c
@@ -3608,8 +3588,6 @@ packages:
- python_abi 3.10.* *_cp310
- pywin32 >=300
- traitlets >=5.3
- arch: x86_64
- platform: win
license: BSD-3-Clause
license_family: BSD
purls:
@@ -3700,8 +3678,6 @@ packages:
md5: 30186d27e2c9fa62b45fb1476b7200e3
depends:
- libgcc-ng >=10.3.0
- arch: x86_64
- platform: linux
license: LGPL-2.1-or-later
purls: []
size: 117831
@@ -3716,8 +3692,6 @@ packages:
- libgcc-ng >=12
- libstdcxx-ng >=12
- openssl >=3.3.1,<4.0a0
- arch: x86_64
- platform: linux
license: MIT
license_family: MIT
purls: []
@@ -3732,8 +3706,6 @@ packages:
- libedit >=3.1.20191231,<3.2.0a0
- libedit >=3.1.20191231,<4.0a0
- openssl >=3.3.1,<4.0a0
- arch: x86_64
- platform: osx
license: MIT
license_family: MIT
purls: []
@@ -3748,8 +3720,6 @@ packages:
- libedit >=3.1.20191231,<3.2.0a0
- libedit >=3.1.20191231,<4.0a0
- openssl >=3.3.1,<4.0a0
- arch: arm64
- platform: osx
license: MIT
license_family: MIT
purls: []
@@ -3763,8 +3733,6 @@ packages:
- ucrt >=10.0.20348.0
- vc >=14.2,<15
- vc14_runtime >=14.29.30139
- arch: x86_64
- platform: win
license: MIT
license_family: MIT
purls: []
@@ -3939,8 +3907,6 @@ packages:
- __glibc >=2.17,<3.0.a0
- libgcc >=13
- ncurses >=6.5,<7.0a0
- arch: x86_64
- platform: linux
license: BSD-2-Clause
license_family: BSD
purls: []
@@ -3953,8 +3919,6 @@ packages:
- ncurses
- __osx >=10.13
- ncurses >=6.5,<7.0a0
- arch: x86_64
- platform: osx
license: BSD-2-Clause
license_family: BSD
purls: []
@@ -3967,8 +3931,6 @@ packages:
- ncurses
- __osx >=11.0
- ncurses >=6.5,<7.0a0
- arch: arm64
- platform: osx
license: BSD-2-Clause
license_family: BSD
purls: []
@@ -4144,8 +4106,6 @@ packages:
depends:
- __glibc >=2.17,<3.0.a0
- libgcc >=13
- arch: x86_64
- platform: linux
license: LGPL-2.1-only
purls: []
size: 713084
@@ -4155,8 +4115,6 @@ packages:
md5: 6283140d7b2b55b6b095af939b71b13f
depends:
- __osx >=10.13
- arch: x86_64
- platform: osx
license: LGPL-2.1-only
purls: []
size: 669052
@@ -4166,8 +4124,6 @@ packages:
md5: 450e6bdc0c7d986acf7b8443dce87111
depends:
- __osx >=11.0
- arch: arm64
- platform: osx
license: LGPL-2.1-only
purls: []
size: 681804
@@ -4490,8 +4446,6 @@ packages:
md5: a587892d3c13b6621a6091be690dbca2
depends:
- libgcc-ng >=12
- arch: x86_64
- platform: linux
license: ISC
purls: []
size: 205978
@@ -4501,8 +4455,6 @@ packages:
md5: 6af4b059e26492da6013e79cbcb4d069
depends:
- __osx >=10.13
- arch: x86_64
- platform: osx
license: ISC
purls: []
size: 210249
@@ -4512,8 +4464,6 @@ packages:
md5: a7ce36e284c5faaf93c220dfc39e3abd
depends:
- __osx >=11.0
- arch: arm64
- platform: osx
license: ISC
purls: []
size: 164972
@@ -4525,8 +4475,6 @@ packages:
- ucrt >=10.0.20348.0
- vc >=14.2,<15
- vc14_runtime >=14.29.30139
- arch: x86_64
- platform: win
license: ISC
purls: []
size: 202344
@@ -4651,8 +4599,6 @@ packages:
- libiconv >=1.17,<2.0a0
- libzlib >=1.2.13,<2.0a0
- xz >=5.2.6,<6.0a0
- arch: x86_64
- platform: linux
license: MIT
license_family: MIT
purls: []
@@ -4669,8 +4615,6 @@ packages:
- libzlib >=1.3.1,<2.0a0
constrains:
- icu <0.0a0
- arch: x86_64
- platform: linux
license: MIT
license_family: MIT
purls: []
@@ -4685,8 +4629,6 @@ packages:
- libiconv >=1.17,<2.0a0
- libzlib >=1.2.13,<2.0a0
- xz >=5.2.6,<6.0a0
- arch: x86_64
- platform: osx
license: MIT
license_family: MIT
purls: []
@@ -4702,8 +4644,6 @@ packages:
- libzlib >=1.3.1,<2.0a0
constrains:
- icu <0.0a0
- arch: x86_64
- platform: osx
license: MIT
license_family: MIT
purls: []
@@ -4718,8 +4658,6 @@ packages:
- libiconv >=1.17,<2.0a0
- libzlib >=1.2.13,<2.0a0
- xz >=5.2.6,<6.0a0
- arch: arm64
- platform: osx
license: MIT
license_family: MIT
purls: []
@@ -4734,8 +4672,6 @@ packages:
- libiconv >=1.17,<2.0a0
- liblzma >=5.6.3,<6.0a0
- libzlib >=1.3.1,<2.0a0
- arch: arm64
- platform: osx
license: MIT
license_family: MIT
purls: []
@@ -4775,8 +4711,6 @@ packages:
depends:
- libgcc-ng >=12
- libxml2 >=2.12.1,<3.0.0a0
- arch: x86_64
- platform: linux
license: MIT
license_family: MIT
purls: []
@@ -4787,8 +4721,6 @@ packages:
md5: a6e0cec6b3517ffc6b5d36a920fc9312
depends:
- libxml2 >=2.12.1,<3.0.0a0
- arch: x86_64
- platform: osx
license: MIT
license_family: MIT
purls: []
@@ -4799,8 +4731,6 @@ packages:
md5: 560c9cacc33e927f55b998eaa0cb1732
depends:
- libxml2 >=2.12.1,<3.0.0a0
- arch: arm64
- platform: osx
license: MIT
license_family: MIT
purls: []
@@ -4814,8 +4744,6 @@ packages:
- ucrt >=10.0.20348.0
- vc >=14.2,<15
- vc14_runtime >=14.29.30139
- arch: x86_64
- platform: win
license: MIT
license_family: MIT
purls: []
@@ -5081,8 +5009,6 @@ packages:
- libzlib >=1.2.13,<2.0.0a0
- python >=3.10,<3.11.0a0
- python_abi 3.10.* *_cp310
- arch: x86_64
- platform: linux
license: BSD-3-Clause and MIT-CMU
purls:
- pkg:pypi/lxml?source=hash-mapping
@@ -5099,8 +5025,6 @@ packages:
- libzlib >=1.3.1,<2.0a0
- python >=3.10,<3.11.0a0
- python_abi 3.10.* *_cp310
- arch: x86_64
- platform: linux
license: BSD-3-Clause and MIT-CMU
purls:
- pkg:pypi/lxml?source=hash-mapping
@@ -5116,8 +5040,6 @@ packages:
- libzlib >=1.2.13,<2.0.0a0
- python >=3.10,<3.11.0a0
- python_abi 3.10.* *_cp310
- arch: x86_64
- platform: osx
license: BSD-3-Clause and MIT-CMU
purls:
- pkg:pypi/lxml?source=hash-mapping
@@ -5133,8 +5055,6 @@ packages:
- libzlib >=1.3.1,<2.0a0
- python >=3.10,<3.11.0a0
- python_abi 3.10.* *_cp310
- arch: x86_64
- platform: osx
license: BSD-3-Clause and MIT-CMU
purls:
- pkg:pypi/lxml?source=hash-mapping
@@ -5151,8 +5071,6 @@ packages:
- python >=3.10,<3.11.0a0
- python >=3.10,<3.11.0a0 *_cpython
- python_abi 3.10.* *_cp310
- arch: arm64
- platform: osx
license: BSD-3-Clause and MIT-CMU
purls:
- pkg:pypi/lxml?source=hash-mapping
@@ -5169,8 +5087,6 @@ packages:
- python >=3.10,<3.11.0a0
- python >=3.10,<3.11.0a0 *_cpython
- python_abi 3.10.* *_cp310
- arch: arm64
- platform: osx
license: BSD-3-Clause and MIT-CMU
purls:
- pkg:pypi/lxml?source=hash-mapping
@@ -5188,8 +5104,6 @@ packages:
- ucrt >=10.0.20348.0
- vc >=14.2,<15
- vc14_runtime >=14.29.30139
- arch: x86_64
- platform: win
license: BSD-3-Clause and MIT-CMU
purls:
- pkg:pypi/lxml?source=hash-mapping
@@ -5207,8 +5121,6 @@ packages:
- ucrt >=10.0.20348.0
- vc >=14.2,<15
- vc14_runtime >=14.29.30139
- arch: x86_64
- platform: win
license: BSD-3-Clause and MIT-CMU
purls:
- pkg:pypi/lxml?source=hash-mapping
@@ -5300,8 +5212,6 @@ packages:
- python_abi 3.10.* *_cp310
constrains:
- jinja2 >=3.0.0
- arch: x86_64
- platform: linux
license: BSD-3-Clause
license_family: BSD
purls:
@@ -5317,8 +5227,6 @@ packages:
- python_abi 3.10.* *_cp310
constrains:
- jinja2 >=3.0.0
- arch: x86_64
- platform: osx
license: BSD-3-Clause
license_family: BSD
purls:
@@ -5335,8 +5243,6 @@ packages:
- python_abi 3.10.* *_cp310
constrains:
- jinja2 >=3.0.0
- arch: arm64
- platform: osx
license: BSD-3-Clause
license_family: BSD
purls:
@@ -5354,8 +5260,6 @@ packages:
- vc14_runtime >=14.29.30139
constrains:
- jinja2 >=3.0.0
- arch: x86_64
- platform: win
license: BSD-3-Clause
license_family: BSD
purls:
@@ -6538,8 +6442,6 @@ packages:
- conda: https://conda.anaconda.org/conda-forge/linux-64/pandoc-3.6.3-ha770c72_0.conda
sha256: a7392b0d5403676b0b3ab9ff09c1e65d8ab9e1c34349bba9be605d76cf622640
md5: 16ff7c679250dc09f9732aab14408d2c
- arch: x86_64
- platform: linux
license: GPL-2.0-or-later
license_family: GPL
purls: []
@@ -6548,8 +6450,6 @@ packages:
- conda: https://conda.anaconda.org/conda-forge/osx-64/pandoc-3.6.3-h694c41f_0.conda
sha256: 1133230f40d30e3eaa03e209800800751423360193fdc7e6523771125b72daa8
md5: eb1ef420766e7f9bd8f61a2bc7c2961d
- arch: x86_64
- platform: osx
license: GPL-2.0-or-later
license_family: GPL
purls: []
@@ -6558,8 +6458,6 @@ packages:
- conda: https://conda.anaconda.org/conda-forge/osx-arm64/pandoc-3.6.3-hce30654_0.conda
sha256: 2013114d2405b4a4e9d9b62522eb09111e1b9e3cd2e902a42e7ccc8a88a2b05a
md5: 41603d280df06636257acd79ea326be9
- arch: arm64
- platform: osx
license: GPL-2.0-or-later
license_family: GPL
purls: []
@@ -6568,8 +6466,6 @@ packages:
- conda: https://conda.anaconda.org/conda-forge/win-64/pandoc-3.6.3-h57928b3_0.conda
sha256: ffdb8fd1da7419f6625c8b2339a12f9669a705ada4177b763cc796c60763f734
md5: 9b999036cccf0d5a94ed3c0b0edbb905
- arch: x86_64
- platform: win
license: GPL-2.0-or-later
license_family: GPL
purls: []
@@ -6725,8 +6621,6 @@ packages:
- libgcc >=13
- python >=3.10,<3.11.0a0
- python_abi 3.10.* *_cp310
- arch: x86_64
- platform: linux
license: BSD-3-Clause
license_family: BSD
purls:
@@ -6740,8 +6634,6 @@ packages:
- __osx >=10.13
- python >=3.10,<3.11.0a0
- python_abi 3.10.* *_cp310
- arch: x86_64
- platform: osx
license: BSD-3-Clause
license_family: BSD
purls:
@@ -6756,8 +6648,6 @@ packages:
- python >=3.10,<3.11.0a0
- python >=3.10,<3.11.0a0 *_cpython
- python_abi 3.10.* *_cp310
- arch: arm64
- platform: osx
license: BSD-3-Clause
license_family: BSD
purls:
@@ -6773,8 +6663,6 @@ packages:
- ucrt >=10.0.20348.0
- vc >=14.2,<15
- vc14_runtime >=14.29.30139
- arch: x86_64
- platform: win
license: BSD-3-Clause
license_family: BSD
purls:
@@ -6846,18 +6734,6 @@ packages:
- pkg:pypi/pygments?source=hash-mapping
size: 888600
timestamp: 1736243563082
-- pypi: .
- name: pyikt
- version: 0.0.0
- sha256: 44d7e5a7a3d3f511455889d8501dba6f9e58c8a36da7fb2806c84ec4fcb68883
- requires_dist:
- - numpy<=1.22
- - pandas>=1.5
- - scikit-learn>=1.2
- - numba>=0.54
- - tqdm>=4.62.3
- requires_python: '>=3.9'
- editable: true
- conda: https://conda.anaconda.org/conda-forge/noarch/pymdown-extensions-10.14.3-pyhd8ed1ab_0.conda
sha256: 83b638059eda5208e2e4acfeecf2ff53b5dcb1adc7d85fc92edf0e7a48b943eb
md5: 08bf3657d03e1ee964c66288f5b3d797
@@ -6880,8 +6756,6 @@ packages:
- python >=3.10,<3.11.0a0
- python_abi 3.10.* *_cp310
- setuptools
- arch: x86_64
- platform: osx
license: MIT
license_family: MIT
purls:
@@ -6898,8 +6772,6 @@ packages:
- python >=3.10,<3.11.0a0 *_cpython
- python_abi 3.10.* *_cp310
- setuptools
- arch: arm64
- platform: osx
license: MIT
license_family: MIT
purls:
@@ -6915,8 +6787,6 @@ packages:
- pyobjc-core 11.0.*
- python >=3.10,<3.11.0a0
- python_abi 3.10.* *_cp310
- arch: x86_64
- platform: osx
license: MIT
license_family: MIT
purls:
@@ -6933,8 +6803,6 @@ packages:
- python >=3.10,<3.11.0a0
- python >=3.10,<3.11.0a0 *_cpython
- python_abi 3.10.* *_cp310
- arch: arm64
- platform: osx
license: MIT
license_family: MIT
purls:
@@ -7286,8 +7154,6 @@ packages:
- ucrt >=10.0.20348.0
- vc >=14.2,<15
- vc14_runtime >=14.29.30139
- arch: x86_64
- platform: win
license: PSF-2.0
license_family: PSF
purls:
@@ -7304,8 +7170,6 @@ packages:
- vc >=14.2,<15
- vc14_runtime >=14.29.30139
- winpty
- arch: x86_64
- platform: win
license: MIT
license_family: MIT
purls:
@@ -7321,8 +7185,6 @@ packages:
- python >=3.10,<3.11.0a0
- python_abi 3.10.* *_cp310
- yaml >=0.2.5,<0.3.0a0
- arch: x86_64
- platform: linux
license: MIT
license_family: MIT
purls:
@@ -7337,8 +7199,6 @@ packages:
- python >=3.10,<3.11.0a0
- python_abi 3.10.* *_cp310
- yaml >=0.2.5,<0.3.0a0
- arch: x86_64
- platform: osx
license: MIT
license_family: MIT
purls:
@@ -7354,8 +7214,6 @@ packages:
- python >=3.10,<3.11.0a0 *_cpython
- python_abi 3.10.* *_cp310
- yaml >=0.2.5,<0.3.0a0
- arch: arm64
- platform: osx
license: MIT
license_family: MIT
purls:
@@ -7372,8 +7230,6 @@ packages:
- vc >=14.2,<15
- vc14_runtime >=14.29.30139
- yaml >=0.2.5,<0.3.0a0
- arch: x86_64
- platform: win
license: MIT
license_family: MIT
purls:
@@ -7403,8 +7259,6 @@ packages:
- python >=3.10,<3.11.0a0
- python_abi 3.10.* *_cp310
- zeromq >=4.3.5,<4.4.0a0
- arch: x86_64
- platform: linux
license: BSD-3-Clause
license_family: BSD
purls:
@@ -7421,8 +7275,6 @@ packages:
- python >=3.10,<3.11.0a0
- python_abi 3.10.* *_cp310
- zeromq >=4.3.5,<4.4.0a0
- arch: x86_64
- platform: osx
license: BSD-3-Clause
license_family: BSD
purls:
@@ -7440,8 +7292,6 @@ packages:
- python >=3.10,<3.11.0a0 *_cpython
- python_abi 3.10.* *_cp310
- zeromq >=4.3.5,<4.4.0a0
- arch: arm64
- platform: osx
license: BSD-3-Clause
license_family: BSD
purls:
@@ -7459,8 +7309,6 @@ packages:
- vc >=14.2,<15
- vc14_runtime >=14.29.30139
- zeromq >=4.3.5,<4.3.6.0a0
- arch: x86_64
- platform: win
license: BSD-3-Clause
license_family: BSD
purls:
@@ -7521,8 +7369,6 @@ packages:
- libgcc >=13
- python >=3.10,<3.11.0a0
- python_abi 3.10.* *_cp310
- arch: x86_64
- platform: linux
license: Python-2.0
license_family: PSF
purls:
@@ -7536,8 +7382,6 @@ packages:
- __osx >=10.13
- python >=3.10,<3.11.0a0
- python_abi 3.10.* *_cp310
- arch: x86_64
- platform: osx
license: Python-2.0
license_family: PSF
purls:
@@ -7552,8 +7396,6 @@ packages:
- python >=3.10,<3.11.0a0
- python >=3.10,<3.11.0a0 *_cpython
- python_abi 3.10.* *_cp310
- arch: arm64
- platform: osx
license: Python-2.0
license_family: PSF
purls:
@@ -7569,8 +7411,6 @@ packages:
- ucrt >=10.0.20348.0
- vc >=14.2,<15
- vc14_runtime >=14.29.30139
- arch: x86_64
- platform: win
license: Python-2.0
license_family: PSF
purls:
@@ -7604,8 +7444,6 @@ packages:
- python_abi 3.10.* *_cp310
constrains:
- __glibc >=2.17
- arch: x86_64
- platform: linux
license: MIT
license_family: MIT
purls:
@@ -7621,8 +7459,6 @@ packages:
- python_abi 3.10.* *_cp310
constrains:
- __osx >=10.13
- arch: x86_64
- platform: osx
license: MIT
license_family: MIT
purls:
@@ -7639,8 +7475,6 @@ packages:
- python_abi 3.10.* *_cp310
constrains:
- __osx >=11.0
- arch: arm64
- platform: osx
license: MIT
license_family: MIT
purls:
@@ -7659,84 +7493,77 @@ packages:
- vc14_runtime >=14.29.30139
- ucrt >=10.0.20348.0
- python_abi 3.10.* *_cp310
- arch: x86_64
- platform: win
license: MIT
license_family: MIT
purls:
- pkg:pypi/rpds-py?source=hash-mapping
size: 251319
timestamp: 1740153103392
-- conda: https://conda.anaconda.org/conda-forge/linux-64/ruff-0.4.2-py310h9065425_0.conda
- sha256: 64d9aa18c31b7bf9b5f6131afcd9ed5ab956eba0fe54d9d74d6303ac19d5bb6d
- md5: cbde66f7f4f8b6231de8614f619a95e1
+- conda: https://conda.anaconda.org/conda-forge/linux-64/ruff-0.9.9-py310h8851ac2_0.conda
+ sha256: d54c94dabed40dfaac39eed8abfe984565cac9b98211e65b0aaef4abc7baaf21
+ md5: 5858183d8265e53d813f211034aa76e4
depends:
- - libgcc-ng >=12
- - libstdcxx-ng >=12
+ - __glibc >=2.17,<3.0.a0
+ - libgcc >=13
+ - libstdcxx >=13
- python >=3.10,<3.11.0a0
- python_abi 3.10.* *_cp310
- arch: x86_64
- platform: linux
+ constrains:
+ - __glibc >=2.17
license: MIT
license_family: MIT
purls:
- pkg:pypi/ruff?source=hash-mapping
- size: 6305544
- timestamp: 1714088914090
-- conda: https://conda.anaconda.org/conda-forge/osx-64/ruff-0.4.2-py310h4a464a8_0.conda
- sha256: aa1a8e035e114b44c5855a8bc8e2b20cb164e66b17a0d8d3df182830ee0e776f
- md5: f950d318915d12ea4d0766431ac820c1
+ size: 8833469
+ timestamp: 1741052377867
+- conda: https://conda.anaconda.org/conda-forge/osx-64/ruff-0.9.9-py310h9dc1ea1_0.conda
+ sha256: 98bd018b2166d7e251de4d5b50c2bca696d5b0fa3a30ab8ef49e152a4eddce29
+ md5: 557b734112353bd913dccfad0c2d170b
depends:
- - __osx >=10.9
- - libcxx >=16
+ - __osx >=10.13
+ - libcxx >=18
- python >=3.10,<3.11.0a0
- python_abi 3.10.* *_cp310
constrains:
- - __osx >=10.12
- arch: x86_64
- platform: osx
+ - __osx >=10.13
license: MIT
license_family: MIT
purls:
- pkg:pypi/ruff?source=hash-mapping
- size: 6093722
- timestamp: 1714089668361
-- conda: https://conda.anaconda.org/conda-forge/osx-arm64/ruff-0.4.2-py310h275c10e_0.conda
- sha256: 1812ffbcd86a2b0913f9302d1616f4aa042be5a2c4a98214fe0c2281f3bfee51
- md5: 00e1b0cba8e276e87ecfaa206406301a
+ size: 8148295
+ timestamp: 1741052399938
+- conda: https://conda.anaconda.org/conda-forge/osx-arm64/ruff-0.9.9-py310hdcf0d46_0.conda
+ sha256: 5dacb1a80b426b1bc25acff74412b50825eaa8c9829f292de499d2866d48671e
+ md5: 387c7bef8f4c5b1aac49094ec333771e
depends:
- __osx >=11.0
- - libcxx >=16
+ - libcxx >=18
- python >=3.10,<3.11.0a0
- python >=3.10,<3.11.0a0 *_cpython
- python_abi 3.10.* *_cp310
constrains:
- __osx >=11.0
- arch: arm64
- platform: osx
license: MIT
license_family: MIT
purls:
- pkg:pypi/ruff?source=hash-mapping
- size: 5839236
- timestamp: 1714089852161
-- conda: https://conda.anaconda.org/conda-forge/win-64/ruff-0.4.2-py310h7f1804c_0.conda
- sha256: f7a93bba671f185de24a0b2562ca32294fc88deec50714a1489ffeec0f359fb1
- md5: 21d829b02be70b9d3244ea118b9b3544
+ size: 7759023
+ timestamp: 1741053034909
+- conda: https://conda.anaconda.org/conda-forge/win-64/ruff-0.9.9-py310h090d742_0.conda
+ sha256: c3b16e4182f713e1fc063a8c11f804e7e1ce1d5673ba170630f44360a5dcedac
+ md5: 8574062c445d472c9ce7935c7c2294a2
depends:
- python >=3.10,<3.11.0a0
- python_abi 3.10.* *_cp310
- ucrt >=10.0.20348.0
- vc >=14.2,<15
- vc14_runtime >=14.29.30139
- arch: x86_64
- platform: win
license: MIT
license_family: MIT
purls:
- pkg:pypi/ruff?source=hash-mapping
- size: 6229260
- timestamp: 1714089878264
+ size: 7924341
+ timestamp: 1741053302702
- conda: https://conda.anaconda.org/conda-forge/linux-64/scikit-learn-1.6.1-py310h27f47ee_0.conda
sha256: 5c865487412b900d0abeb934907e5357c4a6cad19093316701ffd575980d0c54
md5: 618ec5a8500fb53e8e52785e06d239f4
@@ -8152,8 +7979,6 @@ packages:
- libgcc >=13
- python >=3.10,<3.11.0a0
- python_abi 3.10.* *_cp310
- arch: x86_64
- platform: linux
license: Apache-2.0
license_family: Apache
purls:
@@ -8167,8 +7992,6 @@ packages:
- __osx >=10.13
- python >=3.10,<3.11.0a0
- python_abi 3.10.* *_cp310
- arch: x86_64
- platform: osx
license: Apache-2.0
license_family: Apache
purls:
@@ -8183,8 +8006,6 @@ packages:
- python >=3.10,<3.11.0a0
- python >=3.10,<3.11.0a0 *_cpython
- python_abi 3.10.* *_cp310
- arch: arm64
- platform: osx
license: Apache-2.0
license_family: Apache
purls:
@@ -8200,8 +8021,6 @@ packages:
- ucrt >=10.0.20348.0
- vc >=14.2,<15
- vc14_runtime >=14.29.30139
- arch: x86_64
- platform: win
license: Apache-2.0
license_family: Apache
purls:
@@ -8213,7 +8032,7 @@ packages:
version: 4.67.1
sha256: 26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2
requires_dist:
- - colorama ; platform_system == 'Windows'
+ - colorama ; sys_platform == 'win32'
- pytest>=6 ; extra == 'dev'
- pytest-cov ; extra == 'dev'
- pytest-timeout ; extra == 'dev'
@@ -8288,8 +8107,6 @@ packages:
- libstdcxx >=13
- python >=3.10,<3.11.0a0
- python_abi 3.10.* *_cp310
- arch: x86_64
- platform: linux
license: MIT
license_family: MIT
purls:
@@ -8305,8 +8122,6 @@ packages:
- libcxx >=17
- python >=3.10,<3.11.0a0
- python_abi 3.10.* *_cp310
- arch: x86_64
- platform: osx
license: MIT
license_family: MIT
purls:
@@ -8323,8 +8138,6 @@ packages:
- python >=3.10,<3.11.0a0
- python >=3.10,<3.11.0a0 *_cpython
- python_abi 3.10.* *_cp310
- arch: arm64
- platform: osx
license: MIT
license_family: MIT
purls:
@@ -8341,8 +8154,6 @@ packages:
- ucrt >=10.0.20348.0
- vc >=14.2,<15
- vc14_runtime >=14.29.30139
- arch: x86_64
- platform: win
license: MIT
license_family: MIT
purls:
@@ -8431,8 +8242,6 @@ packages:
- python >=3.10,<3.11.0a0
- python_abi 3.10.* *_cp310
- pyyaml >=3.10
- arch: x86_64
- platform: linux
license: Apache-2.0
license_family: APACHE
purls:
@@ -8447,8 +8256,6 @@ packages:
- python >=3.10,<3.11.0a0
- python_abi 3.10.* *_cp310
- pyyaml >=3.10
- arch: x86_64
- platform: osx
license: Apache-2.0
license_family: APACHE
purls:
@@ -8464,8 +8271,6 @@ packages:
- python >=3.10,<3.11.0a0 *_cpython
- python_abi 3.10.* *_cp310
- pyyaml >=3.10
- arch: arm64
- platform: osx
license: Apache-2.0
license_family: APACHE
purls:
@@ -8479,8 +8284,6 @@ packages:
- python >=3.10,<3.11.0a0
- python_abi 3.10.* *_cp310
- pyyaml >=3.10
- arch: x86_64
- platform: win
license: Apache-2.0
license_family: APACHE
purls:
@@ -8660,8 +8463,6 @@ packages:
md5: 4cb3ad778ec2d5a7acbdf254eb1c42ae
depends:
- libgcc-ng >=9.4.0
- arch: x86_64
- platform: linux
license: MIT
license_family: MIT
purls: []
@@ -8670,8 +8471,6 @@ packages:
- conda: https://conda.anaconda.org/conda-forge/osx-64/yaml-0.2.5-h0d85af4_2.tar.bz2
sha256: 5301417e2c8dea45b401ffee8df3957d2447d4ce80c83c5ff151fc6bfe1c4148
md5: d7e08fcf8259d742156188e8762b4d20
- arch: x86_64
- platform: osx
license: MIT
license_family: MIT
purls: []
@@ -8680,8 +8479,6 @@ packages:
- conda: https://conda.anaconda.org/conda-forge/osx-arm64/yaml-0.2.5-h3422bc3_2.tar.bz2
sha256: 93181a04ba8cfecfdfb162fc958436d868cc37db504c58078eab4c1a3e57fbb7
md5: 4bb3f014845110883a3c5ee811fd84b4
- arch: arm64
- platform: osx
license: MIT
license_family: MIT
purls: []
@@ -8693,8 +8490,6 @@ packages:
depends:
- vc >=14.1,<15.0a0
- vs2015_runtime >=14.16.27012
- arch: x86_64
- platform: win
license: MIT
license_family: MIT
purls: []
@@ -8709,8 +8504,6 @@ packages:
- libgcc >=13
- libsodium >=1.0.20,<1.0.21.0a0
- libstdcxx >=13
- arch: x86_64
- platform: linux
license: MPL-2.0
license_family: MOZILLA
purls: []
@@ -8724,8 +8517,6 @@ packages:
- krb5 >=1.21.3,<1.22.0a0
- libcxx >=18
- libsodium >=1.0.20,<1.0.21.0a0
- arch: x86_64
- platform: osx
license: MPL-2.0
license_family: MOZILLA
purls: []
@@ -8739,8 +8530,6 @@ packages:
- krb5 >=1.21.3,<1.22.0a0
- libcxx >=18
- libsodium >=1.0.20,<1.0.21.0a0
- arch: arm64
- platform: osx
license: MPL-2.0
license_family: MOZILLA
purls: []
@@ -8755,8 +8544,6 @@ packages:
- ucrt >=10.0.20348.0
- vc >=14.2,<15
- vc14_runtime >=14.29.30139
- arch: x86_64
- platform: win
license: MPL-2.0
license_family: MOZILLA
purls: []
diff --git a/pyikt/stream/cluster/utils/Graphviz.py b/pyikt/stream/cluster/utils/Graphviz.py
deleted file mode 100644
index 9fd756c..0000000
--- a/pyikt/stream/cluster/utils/Graphviz.py
+++ /dev/null
@@ -1,129 +0,0 @@
-class Graphviz(object):
- def __init__(self):
- self.internal_color = "lavenderblush4"
- self.colors = [
- "aquamarine", "bisque", "blue", "blueviolet", "brown", "cadetblue",
- "chartreuse", "coral", "cornflowerblue", "crimson", "darkgoldenrod",
- "darkgreen", "darkkhaki", "darkmagenta", "darkorange", "darkred",
- "darksalmon", "darkseagreen", "darkslateblue", "darkslategrey",
- "darkviolet", "deepskyblue", "dodgerblue", "firebrick",
- "forestgreen", "gainsboro", "ghostwhite", "gold", "goldenrod",
- "gray", "grey", "green", "greenyellow", "honeydew", "hotpink",
- "indianred", "indigo", "ivory", "khaki", "lavender",
- "lavenderblush", "lawngreen", "lemonchiffon", "lightblue",
- "lightcoral", "lightcyan", "lightgoldenrodyellow", "lightgray",
- "lightgreen", "lightgrey", "lightpink", "lightsalmon",
- "lightseagreen", "lightskyblue", "lightslategray", "lightslategrey",
- "lightsteelblue", "lightyellow", "limegreen", "linen", "magenta",
- "maroon", "mediumaquamarine", "mediumblue", "mediumorchid",
- "mediumpurple", "mediumseagreen", "mediumslateblue",
- "mediumturquoise", "midnightblue", "mintcream", "mistyrose",
- "moccasin", "navajowhite", "navy", "oldlace", "olive", "olivedrab",
- "orange", "orangered", "orchid", "palegoldenrod", "palegreen",
- "paleturquoise", "palevioletred", "papayawhip", "peachpuff", "peru",
- "pink", "powderblue", "purple", "red", "rosybrown", "royalblue",
- "saddlebrown", "salmon", "sandybrown", "seagreen", "seashell",
- "sienna", "silver", "skyblue", "slateblue", "slategray",
- "slategrey", "snow", "springgreen", "steelblue", "tan", "teal",
- "thistle", "tomato", "violet", "wheat", "burlywood", "chocolate"]
- self.color_map = {}
- self.color_counter = 0
-
- def format_id(self, ID):
- if not ID.startswith("id"):
- return ("id%s" % ID).replace('-', '')\
- .replace('#', '_HASH_').replace('.', '_DOT_')
- else:
- return ("%s" % ID).replace('-', '')\
- .replace('#', '_HASH_').replace('.', '_DOT_')
-
- def clean_label(self, s):
- return s.replace("[/:.]", "_")
-
- def get_node_label(self, node):
- lbl = []
- lbl.append(self.format_id(node.id))
- lbl.append('
')
- lbl.append('num pts: %d' % len(node.leaves()))
- lbl.append('
')
- try:
- lbl.append('purity: %f' % node.purity())
- except Exception:
- pass
- try:
- lbl.append('
')
- lbl.append('across: %s' % node.best_across_debug)
- except Exception:
- pass
- return ''.join(lbl)
-
- def get_color(self, lbl):
- if lbl in self.color_map:
- return self.color_map[lbl]
- else:
- self.color_map[lbl] = self.colors[self.color_counter]
- self.color_counter = (self.color_counter + 1) % len(self.colors)
- return self.color_map[lbl]
-
- def format_graphiz_node(self, node):
- """Format a graphviz node for printing."""
- s = []
- color = self.internal_color
- try:
- if node.purity() == 1.0:
- if hasattr(node, 'pts'):
- curr_node = node
- while curr_node.pts == None:
- curr_node = curr_node.children[0]
- if len(curr_node.pts)>0:
- w_gt = [x for x in curr_node.pts if x[1] and x[1] != "None"]
- if w_gt:
- color = self.get_color(w_gt[0][0])
- else:
- color = self.get_color('None')
- except Exception:
- pass
- shape = 'point'
-
- if node.parent is None:
- s.append(
- '\n%s[shape=%s;style=filled;width=1;color=%s;label=<%s
%s
>]'
- % (self.format_id(node.id), shape, color,
- self.get_node_label(node), color))
- s.append(
- '\nROOTNODE[shape=star;style=filled;color=gold;label=]')
- s.append('\nROOTNODE->%s' % self.format_id(node.id))
- else:
- leaf_m = ''
- if hasattr(node, 'pts') and node.pts and len(node.pts) > 0:
- if hasattr(node.pts[0][0], 'mid'):
- leaf_m = '%s|%s' % (node.pts[0][0].mid, node.pts[0][0].gt) \
- if node.is_leaf() else ''
- else:
- leaf_m = '%s|%s' % (node.pts[0][1], node.pts[0][0]) \
- if node.is_leaf() else ''
- s.append('\n%s[shape=%s;style=filled;width=1;color=%s;label=<%s
'
- '%s
%s
>]'
- % (self.format_id(node.id), shape, color,
- self.get_node_label(node), color, leaf_m))
- s.append('\n%s->%s' % (self.format_id(node.parent.id),
- self.format_id(node.id)))
- return ''.join(s)
-
- def graphviz_tree(self, root,):
- """Return a graphviz tree as a string."""
- s = []
- s.append('digraph TreeStructure {\n')
- s.append(self.format_graphiz_node(root))
- for d in root.descendants():
- s.append(self.format_graphiz_node(d))
- s.append('\n}')
- return ''.join(s)
-
- @staticmethod
- def write_tree(root,filename):
- """Write a graphviz tree to a file."""
- gv = Graphviz()
- tree = gv.graphviz_tree(root)
- with open(filename, 'w') as fout:
- fout.write(tree)
diff --git a/pyikt/stream/cluster/utils/deltasep_utils.py b/pyikt/stream/cluster/utils/deltasep_utils.py
deleted file mode 100644
index 8913f35..0000000
--- a/pyikt/stream/cluster/utils/deltasep_utils.py
+++ /dev/null
@@ -1,297 +0,0 @@
-"""
-Copyright (C) 2017 University of Massachusetts Amherst.
-This file is part of "xcluster"
-http://github.com/iesl/xcluster
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-http://www.apache.org/licenses/LICENSE-2.0
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-
-"""
-Utilities for creating delta separated data sets.
-"""
-import numpy as np
-
-
-def gen_k_centers(k, dim):
- """Create a k cluster data set with required separation.
-
- For the purposes of validating a proof, generate each cluster center such
- that it is at least 4 * delta away from any other cluster for some value of
- delta > 0.
-
- Args:
- k - the number of clusters.
- dim - (optional) the dimension of the points.
-
- Returns:
- A list of 2 cluster centers and a value of delta such that the clusters
- centers are 4 * delta away form each other.
- """
- delta = abs(np.random.normal(0.0, 5.0))
- eps = 0.001
- centers = []
- for i in range(k):
- c = np.random.multivariate_normal(np.zeros(dim), np.identity(dim))
- if len(centers):
- c1 = centers[0]
- x = np.random.multivariate_normal(c1, np.identity(c1.size)) - c1
- direction = x / np.linalg.norm(x)
- centers.append(c1 + 2.0 * i * delta * direction + eps)
- else:
- centers.append(c)
- return centers, delta
-
-
-
-def create_dataset(dims, size, num_clusters=20):
- """Create a delta separated data set.
-
- Generate a set of centers for the clusters and from each center draw size
- number of points that constitute the points in that cluster. Then return
- a dataset of all points.
-
- Args:
- dims - (int) the dimention of all data points.
- size - (int) the number of points to generate for each cluster.
- num_clusters - (int) the number of clusters.
- """
- clusters, delta = gen_k_centers(num_clusters, dims)
- return _create_constrained_dataset(clusters, delta, size)
-
-
-def _create_constrained_dataset(centers, delta, size):
- """Create a delta-separated dataset.
-
- For each of the centers draw size number of points. No two points may be
- farther than delta away form each other. Thus, to generate each point,
- choosea random direction and random distance from the center (of up to 0.5
- delta).
-
- Args:
- centers - a list of cluster centers.
- delta - the maximum distance between two points in the same cluster.
- size - the number of points to draw per cluster.
-
- Returns:
- A list of points that represents the dataset.
- """
- dataset = []
- count = 0
- for i, c in enumerate(centers):
- for j in range(size):
- x = np.random.multivariate_normal(c, np.identity(np.size(c))) - c
- direction = x / np.linalg.norm(x)
- magnitude = np.random.uniform(0.0, 0.5 * delta)
- # magnitude = np.random.uniform(0.0, delta) # NOT DEL-SEPARATED
- vec = c + magnitude * direction
- vec = np.append(vec, i)
- vec = np.append(vec, count)
- dataset.append(vec)
- count += 1
- return np.array(dataset)
-
-
-def gen_4_normal():
- """Create 4 cluster centers.
-
- Create gaussians centered at (1,1), (1,-1), (-1,-1) and (-1,1). Each has
- standard covariance.
-
- Args:
- None
-
- Returns:
- A list of the four cluster centers.
- """
- return [np.random.multivariate_normal(mean=np.array([1.0, 1.0]),
- cov=np.array([[1.0, 0.0], [0.0, 1.0]])),
- np.random.multivariate_normal(mean=np.array([1.0, -1.0]),
- cov=np.array([[1.0, 0.0], [0.0, 1.0]])),
- np.random.multivariate_normal(mean=np.array([-1.0, -1.0]),
- cov=np.array([[1.0, 0.0], [0.0, 1.0]])),
- np.random.multivariate_normal(mean=np.array([-1.0, 1.0]),
- cov=np.array([[1.0, 0.0], [0.0, 1.0]]))]
-
-
-def _4_normal_spread():
- """Create 4 cluster centers.
-
- Create gaussians centered at (10,10), (10,-10), (-10,-10) and (-10,10).
- Each has standard covariance.
-
- Args:
- None
-
- Returns:
- A list of the four cluster centers.
- """
- return [np.random.multivariate_normal(mean=np.array([10.0, 10.0]),
- cov=np.array([[1.0, 0.0], [0.0, 1.0]])),
- np.random.multivariate_normal(mean=np.array([10.0, -10.0]),
- cov=np.array([[1.0, 0.0], [0.0, 1.0]])),
- np.random.multivariate_normal(mean=np.array([-10.0, -10.0]),
- cov=np.array([[1.0, 0.0], [0.0, 1.0]])),
- np.random.multivariate_normal(mean=np.array([-10.0, 10.0]),
- cov=np.array([[1.0, 0.0], [0.0, 1.0]]))]
-
-
-def _5x5_grid_clusters():
- """Create a 5x5 grid of cluster centers.
-
- Create 25 cluster centers on the grid I^{[0, 4] x [0,4]}. Each center is a
- gaussian with standard covariance
-
- Args:
- None
-
- Returns:
- A list of cluster centers.
- """
- return [np.random.multivariate_normal(mean=np.array([i, j]), cov=np.array([[1.0, 0.0],
- [0.0, 1.0]]))
- for i in range(5)
- for j in range(5)]
-
-
-def _5x5_grid_clusters_spread():
- """Create a 5x5 grid of cluster centers.
-
- Create 25 cluster centers on the grid I^{[0, 4] x [0,4]}. Each center is a
- gaussian with standard covariance
-
- Args:
- None
-
- Returns:
- A list of cluster centers.
- """
- return [np.random.multivariate_normal(mean=np.array([i * 25, j * 25]), cov=np.array([[1.0, 0.0],
- [0.0, 1.0]]))
- for i in range(5)
- for j in range(5)]
-
-
-def _5x5_grid_clusters_close():
- """Create a 5x5 grid of cluster centers.
-
- Create 25 cluster centers on the grid I^{[0, 4] x [0,4]}. Each center is a
- gaussian with standard covariance
-
- Args:
- None
-
- Returns:
- A list of cluster centers.
- """
- return [np.random.multivariate_normal(mean=np.array([i * 5, j * 5]), cov=np.array([[1.0, 0.0],
- [0.0, 1.0]]))
- for i in range(5)
- for j in range(5)]
-
-
-def _2x3_grid_clusters_close():
- """Create a 3x3 grid of cluster centers.
-
- Create 25 cluster centers on the grid I^{[0, 4] x [0,4]}. Each center is a
- gaussian with standard covariance
-
- Args:
- None
-
- Returns:
- A list of cluster centers.
- """
- return [np.random.multivariate_normal(mean=np.array([i * 5, j * 5]), cov=np.array([[1.0, 0.0],
- [0.0, 1.0]]))
- for i in range(2)
- for j in range(3)]
-
-
-def _2x3_grid_clusters_spread():
- """Create a 3x3 grid of cluster centers.
-
- Create 25 cluster centers on the grid I^{[0, 4] x [0,4]}. Each center is a
- gaussian with standard covariance
-
- Args:
- None
-
- Returns:
- A list of cluster centers.
- """
- return [np.random.multivariate_normal(mean=np.array([i * 25, j * 25]), cov=np.array([[1.0, 0.0],
- [0.0, 1.0]]))
- for i in range(2)
- for j in range(3)]
-
-
-def _10x10_grid_clusters_close():
- """Create a 3x3 grid of cluster centers.
-
- Create 25 cluster centers on the grid I^{[0, 4] x [0,4]}. Each center is a
- gaussian with standard covariance
-
- Args:
- None
-
- Returns:
- A list of cluster centers.
- """
- return [np.random.multivariate_normal(mean=np.array([i * 5, j * 5]), cov=np.array([[1.0, 0.0],
- [0.0, 1.0]]))
- for i in range(10)
- for j in range(10)]
-
-
-def _10x10_grid_clusters_spread():
- """Create a 3x3 grid of cluster centers.
-
- Create 25 cluster centers on the grid I^{[0, 4] x [0,4]}. Each center is a
- gaussian with standard covariance
-
- Args:
- None
-
- Returns:
- A list of cluster centers.
- """
- return [np.random.multivariate_normal(mean=np.array([i * 25, j * 25]), cov=np.array([[1.0, 0.0],
- [0.0, 1.0]]))
- for i in range(10)
- for j in range(10)]
-
-
-def _random_standard_centers(n=100):
- """Create random cluster centers.
-
- Create n cluster centers randomly. Each cluster center is a draw from a
- gaussian distribution centered at (0,0) with standard covariance.
-
- Args:
- n - optional; the number of centers to draw (default 100).
-
- Returns:
- A list of cluster centers.
- """
- generator = np.random.multivariate_normal(mean=np.array([0, 0]),
- cov=np.array([[1.0, 0.0], [0.0, 1.0]]))
- return [np.random.multivariate_normal(mean=pt, cov=np.array([[1.0, 0.0], [0.0, 1.0]]))
- for pt in generator.rvs(size=n)]
-
-
-def _from_file(filename):
- with open(filename, 'r') as f:
- clustering = []
- for line in f:
- splits = line.split('\t')
- l, vec = int(splits[0]), np.array([float(x) for x in splits[1:]])
- clustering.append((vec, l))
- return clustering
diff --git a/pyikt/stream/cluster/utils/logger.py b/pyikt/stream/cluster/utils/logger.py
deleted file mode 100644
index 1b73494..0000000
--- a/pyikt/stream/cluster/utils/logger.py
+++ /dev/null
@@ -1,47 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- Copyright (c) 2021 Xin Han
-
- This Source Code Form is subject to the terms of the Mozilla Public
- License, v. 2.0. If a copy of the MPL was not distributed with this
- file, You can obtain one at http://mozilla.org/MPL/2.0/.
-"""
-# coding: utf-8
-
-import logging
-from logging import handlers
-
-
-class Logger(object):
- level_relations = {
- 'debug': logging.DEBUG,
- 'info': logging.INFO,
- 'warning': logging.WARNING,
- 'error': logging.ERROR,
- 'crit': logging.CRITICAL
- }
-
- def __init__(self, filename,
- level='info',
- when='D',
- backCount=25,
- fmt='%(asctime)s - %(pathname)s[line:%(lineno)d] - %(levelname)s: %(message)s'):
- self.logger = logging.getLogger(filename)
- format_str = logging.Formatter(fmt)
- self.logger.setLevel(self.level_relations.get(level))
- console_handler = logging.StreamHandler()
- console_handler.setFormatter(format_str)
- file_handler = handlers.TimedRotatingFileHandler(
- filename=filename, when=when, backupCount=backCount, encoding='utf-8')
- file_handler.setFormatter(format_str)
- self.logger.addHandler(console_handler)
- self.logger.addHandler(file_handler)
-
-
-if __name__ == '__main__':
- log = Logger('all.log', level='debug')
- log.logger.debug('debug')
- log.logger.info('info')
- log.logger.warning('warning')
- log.logger.error('error')
- Logger('error.log', level='error').logger.error('error')
diff --git a/pyproject.toml b/pyproject.toml
index 8ba0e75..24563b0 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,5 +1,5 @@
[project]
-name = "pyikt"
+name = "ikpykit"
dynamic = ["version"]
description = "Isolation Kernel Toolkit in Python."
@@ -47,10 +47,10 @@ dependencies = [
requires-python = ">=3.9"
[project.urls]
-Homepage = "https://isolationkernel.github.io/pyikt/"
-Repository = "https://github.com/IsolationKernel/pyikt"
-Documentation = "https://isolationkernel.github.io/pyikt/"
-"Release Notes" = "https://isolationkernel.github.io/pyikt/latest/releases/releases"
+Homepage = "https://isolationkernel.github.io/ikpykit/"
+Repository = "https://github.com/IsolationKernel/ikpykit"
+Documentation = "https://isolationkernel.github.io/ikpykit/"
+"Release Notes" = "https://isolationkernel.github.io/ikpykit/latest/releases/releases"
[project.license]
@@ -62,14 +62,14 @@ build-backend = "setuptools.build_meta"
[tool.setuptools_scm]
-version_file = "pyikt/_version.py"
+version_file = "ikpykit/_version.py"
local_scheme = "no-local-version"
[tool.setuptools.packages.find]
where = ["."] # list of folders that contain the packages (["."] by default)
-include = ["pyikt", "pyikt*"]
+include = ["ikpykit", "ikpykit*"]
exclude = [
- "pyikt/**/tests/*",
+ "ikpykit/**/tests/*",
] # exclude packages matching these glob patterns (empty by default)
namespaces = false # to disable scanning PEP 420 namespaces (true by default)
@@ -84,25 +84,25 @@ numba = ">=0.54"
numpy = "<=1.22"
[tool.pixi.pypi-dependencies]
-pyikt = { path = ".", editable = true }
+ikpykit = { path = ".", editable = true }
[tool.pixi.feature.lint.dependencies]
# The version below should be aligned with the one of `.pre-commit-config.yaml`
-black = "23.3.0"
+black = "24.8.0"
pre-commit = "3.7.1"
-ruff = "0.4.2"
+ruff = "0.9.9"
[tool.pixi.feature.lint.tasks]
-black = { cmd = "black --check --diff pyikt && black --check --diff examples" }
-ruff = { cmd = "ruff check --output-format=full pyikt && ruff check --output-format=full examples" }
-lint = { depends_on = ["black", "ruff"] }
+black = { cmd = "black --check --diff ikpykit && black --check --diff examples" }
+ruff = { cmd = "ruff check --output-format=full ikpykit && ruff check --output-format=full examples" }
+lint = { depends-on = ["black", "ruff"] }
[tool.pixi.feature.test.dependencies]
pytest = "*"
pytest-cov = "*"
[tool.pixi.feature.test.tasks]
-test = { cmd = "pytest -vsl --cov=pyikt --cov-report=xml pyikt" }
+test = { cmd = "pytest -vsl --cov=ikpykit --cov-report=xml ikpykit" }
[tool.pixi.feature.doc.dependencies]
mike = "2.1.3"
@@ -113,7 +113,7 @@ mkdocstrings = "0.26.1"
mkdocstrings-python = "1.11.1"
notebook = "6.4.12"
jupyter_contrib_nbextensions = "0.7.0"
-black = "23.3.0"
+black = "24.8.0"
[tool.pixi.feature.doc.tasks]
clean-doc = { cmd = "rm -rf site" }
@@ -136,7 +136,7 @@ exclude = '''
| \.vscode
)/
'''
-force-exclude = "pyikt/_version.py"
+force-exclude = "ikpykit/_version.py"
[tool.ruff]
# max line length for black
@@ -149,7 +149,7 @@ exclude = [
"doc/_build",
"doc/auto_examples",
"build",
- "pyikt/_version.py",
+ "ikpykit/_version.py",
]
[tool.ruff.lint]