diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..7e5e05e --- /dev/null +++ b/.gitignore @@ -0,0 +1,166 @@ +# custom ignores to prevent including cubes files +*.npy + +# custom ignores to prevent vscode files being included +.vscode + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ \ No newline at end of file diff --git a/README.md b/README.md index 4a12b83..aae9bdc 100644 --- a/README.md +++ b/README.md @@ -12,8 +12,6 @@ The code includes some doc strings to help you understand what it does, but in s To generate all combinations of n cubes, we first calculate all possible n-1 shapes based on the same algorithm. We begin by taking all of the n-1 shape, and for each of these add new cubes in any possible free locations. For each of these potential new shapes, we test each rotation of this shape to see if it's been seen before. Entirely new shapes are added to a set of all shapes, to check future candidates. -In order to check slightly faster than simply comparing arrays, each shape is converted into a shortened run length encoding form, which allows hashes to be computed, so we can make use of the set datastructure. - ## Running the code With python installed, you can run the code like this: @@ -21,13 +19,19 @@ With python installed, you can run the code like this: Where n is the number of cubes you'd like to calculate. If you specify `--cache` then the program will attempt to load .npy files that hold all the pre-computed cubes for n-1 and then n. If you specify `--no-cache` then everything is calcuated from scratch, and no cache files are stored. +## Testing your changes. +If you are contributing to the python version of this project, you can find some unit tests in the tests folder. +these can be run with "python -m unittest". these tests are not complete or rigerous but they might help spot obvious errors in any changes you make. + ## Pre-computed cache files -You can download the cache files for n=3 to n=11 from [here](https://drive.google.com/drive/folders/1Ls3gJCrNQ17yg1IhrIav70zLHl858Fl4?usp=drive_link). If you manage to calculate any more sets, please feel free to save them as an npy file and I'll upload them! +You can download the cache files for n=3 to n=12 from [here](https://drive.google.com/drive/folders/1Ls3gJCrNQ17yg1IhrIav70zLHl858Fl4?usp=drive_link). If you manage to calculate any more sets, please feel free to save them as an npy file and I'll upload them! ## Improving the code -This was just a bit of fun, and as soon as it broadly worked, I stopped! This code could be made a lot better, and actually the whole point of the video was to get people thinking and have a mess around yourselves. Some things you might think about: -- Another language like c or java would be substantially faster -- Other languages would also have better support for multi-threading, which would be a transformative speedup +This repo already has some improvements included, and will happily accept more via pull request. +Some things you might think about: +- C++ and Rust implementations are currently in development, if you would like to contribute to these look at the pull requests (or of course feel free to start you own). +- The main limiting factor at this time seems to be memory usage, at n=14+ you need hundereds of GB's just to store the cubes, so keeping them all in main memory gets dificult. +- Distributing the computation across many systems would allow us to scale horizontally rather than vertically, but it opens questions of how to do so without each system having a full copy of all the cubes, and how to manage the large quantities of data. - Calculating 24 rotations of a cube is slow, the only way to avoid this would be to come up with some rotationally invariant way of comparing cubes. I've not thought of one yet! ## Contributing! diff --git a/cubes.py b/cubes.py index 18e6fb5..44c33fb 100644 --- a/cubes.py +++ b/cubes.py @@ -1,266 +1,129 @@ -import os -import sys -import math import numpy as np import argparse from time import perf_counter +from libraries.cache import get_cache, save_cache, cache_exists +from libraries.resizing import expand_cube +from libraries.packing import pack, unpack +from libraries.renderer import render_shapes +from libraries.rotation import all_rotations -def all_rotations(polycube): - """ - Calculates all rotations of a polycube. - - Adapted from https://stackoverflow.com/questions/33190042/how-to-calculate-all-24-rotations-of-3d-array. - This function computes all 24 rotations around each of the axis x,y,z. It uses numpy operations to do this, to avoid unecessary copies. - The function returns a generator, to avoid computing all rotations if they are not needed. - - Parameters: - polycube (np.array): 3D Numpy byte array where 1 values indicate polycube positions - - Returns: - generator(np.array): Yields new rotations of this cube about all axes - - """ - def single_axis_rotation(polycube, axes): - """Yield four rotations of the given 3d array in the plane spanned by the given axes. - For example, a rotation in axes (0,1) is a rotation around axis 2""" - for i in range(4): - yield np.rot90(polycube, i, axes) - - # 4 rotations about axis 0 - yield from single_axis_rotation(polycube, (1,2)) - - # rotate 180 about axis 1, 4 rotations about axis 0 - yield from single_axis_rotation(np.rot90(polycube, 2, axes=(0,2)), (1,2)) - # rotate 90 or 270 about axis 1, 8 rotations about axis 2 - yield from single_axis_rotation(np.rot90(polycube, axes=(0,2)), (0,1)) - yield from single_axis_rotation(np.rot90(polycube, -1, axes=(0,2)), (0,1)) +def log_if_needed(n, total_n): + if (n == total_n or n % 100 == 0): + print(f"\rcompleted {(n / total_n) * 100:.2f}%", end="\n" if n == total_n else "") - # rotate about axis 2, 8 rotations about axis 1 - yield from single_axis_rotation(np.rot90(polycube, axes=(0,1)), (0,2)) - yield from single_axis_rotation(np.rot90(polycube, -1, axes=(0,1)), (0,2)) - -def crop_cube(cube): - """ - Crops an np.array to have no all-zero padding around the edge. - - Given in https://stackoverflow.com/questions/39465812/how-to-crop-zero-edges-of-a-numpy-array - - Parameters: - cube (np.array): 3D Numpy byte array where 1 values indicate polycube positions - - Returns: - np.array: Cropped 3D Numpy byte array equivalent to cube, but with no zero padding - - """ - for i in range(cube.ndim): - cube = np.swapaxes(cube, 0, i) # send i-th axis to front - while np.all( cube[0]==0 ): - cube = cube[1:] - while np.all( cube[-1]==0 ): - cube = cube[:-1] - cube = np.swapaxes(cube, 0, i) # send i-th axis to its original position - return cube -def expand_cube(cube): - """ - Expands a polycube by adding single blocks at all valid locations. - - Calculates all valid new positions of a polycube by shifting the existing cube +1 and -1 in each dimension. - New valid cubes are returned via a generator function, in case they are not all needed. - - Parameters: - cube (np.array): 3D Numpy byte array where 1 values indicate polycube positions - - Returns: - generator(np.array): Yields new polycubes that are extensions of cube - - """ - cube = np.pad(cube, 1, 'constant', constant_values=0) - output_cube = np.array(cube) - - xs,ys,zs = cube.nonzero() - output_cube[xs+1,ys,zs] = 1 - output_cube[xs-1,ys,zs] = 1 - output_cube[xs,ys+1,zs] = 1 - output_cube[xs,ys-1,zs] = 1 - output_cube[xs,ys,zs+1] = 1 - output_cube[xs,ys,zs-1] = 1 - - exp = (output_cube ^ cube).nonzero() - - for (x,y,z) in zip(exp[0], exp[1], exp[2]): - new_cube = np.array(cube) - new_cube[x,y,z] = 1 - yield crop_cube(new_cube) - -def generate_polycubes(n, use_cache=False): +def generate_polycubes(n: int, use_cache: bool = False) -> list[np.ndarray]: """ Generates all polycubes of size n - + Generates a list of all possible configurations of n cubes, where all cubes are connected via at least one face. Builds each new polycube from the previous set of polycubes n-1. Uses an optional cache to save and load polycubes of size n-1 for efficiency. - + Parameters: n (int): The size of the polycubes to generate, e.g. all combinations of n=4 cubes. - + use_cahe (bool): whether to use cache files. + Returns: list(np.array): Returns a list of all polycubes of size n as numpy byte arrays - + """ if n < 1: return [] elif n == 1: - return [np.ones((1,1,1), dtype=np.byte)] + return [np.ones((1, 1, 1), dtype=np.byte)] elif n == 2: - return [np.ones((2,1,1), dtype=np.byte)] - - # Check cache - cache_path = f"cubes_{n}.npy" - if use_cache and os.path.exists(cache_path): - print(f"\rLoading polycubes n={n} from cache: ", end = "") - polycubes = np.load(cache_path, allow_pickle=True) - print(f"{len(polycubes)} shapes") - return polycubes - - # Empty list of new n-polycubes - polycubes = [] - polycubes_rle = set() - - base_cubes = generate_polycubes(n-1, use_cache) - - for idx, base_cube in enumerate(base_cubes): - # Iterate over possible expansion positions - for new_cube in expand_cube(base_cube): - if not cube_exists_rle(new_cube, polycubes_rle): - polycubes.append(new_cube) - polycubes_rle.add(rle(new_cube)) - - if (idx % 100 == 0): - perc = round((idx / len(base_cubes)) * 100,2) - print(f"\rGenerating polycubes n={n}: {perc}%", end="") - - print(f"\rGenerating polycubes n={n}: 100% ") - - if use_cache: - cache_path = f"cubes_{n}.npy" - np.save(cache_path, np.array(polycubes, dtype=object), allow_pickle=True) - - return polycubes - -def rle(polycube): - """ - Computes a simple run-length encoding of a given polycube. This function allows cubes to be more quickly compared via hashing. - - Converts a {0,1} nd array into a tuple that encodes the same shape. The array is first flattened, and then the following algorithm is applied: - - 1) The first three values in tuple contain the x,y,z dimension sizes of the array - 2) Each string of zeros of length n is replaced with a single value -n - 3) Each string of ones of length m is replaced with a single value +m - - Parameters: - polycube (np.array): 3D Numpy byte array where 1 values indicate polycube positions - - Returns: - tuple(int): Run length encoded polycube in the form (X, Y, Z, a, b, c, ...) - + return [np.ones((2, 1, 1), dtype=np.byte)] + + if (use_cache and cache_exists(n)): + results = get_cache(n) + print(f"\nGot polycubes from cache n={n}") + else: + pollycubes = generate_polycubes(n-1, use_cache) + + known_ids = set() + done = 0 + print(f"\nHashing polycubes n={n}") + for base_cube in pollycubes: + for new_cube in expand_cube(base_cube): + cube_id = get_canonical_packing(new_cube, known_ids) + known_ids.add(cube_id) + log_if_needed(done, len(pollycubes)) + done += 1 + log_if_needed(done, len(pollycubes)) + + print(f"\nGenerating polycubes from hash n={n}") + results = [] + done = 0 + for cube_id in known_ids: + results.append(unpack(cube_id)) + log_if_needed(done, len(known_ids)) + done += 1 + log_if_needed(done, len(known_ids)) + + if (use_cache and not cache_exists(n)): + save_cache(n, results) + + return results + + +def get_canonical_packing(polycube: np.ndarray, + known_ids: set[bytes]) -> bytes: """ - r = [] - r.extend(polycube.shape) - current = None - val = 0 - for x in polycube.flat: - if current is None: - current = x - val = 1 - pass - elif current == x: - val += 1 - elif current != x: - r.append(val if current == 1 else -val) - current = x - val = 1 - - r.append(val if current == 1 else -val) + Determines if a polycube has already been seen. - return tuple(r) + Considers all possible rotations of a polycube against the existing + ones stored in memory. Returns the id if it's found in the set, + or the maximum id of all rotations if the polycube is new. -def cube_exists_rle(polycube, polycubes_rle): - """ - Determines if a polycube has already been seen. - - Considers all possible rotations of a cube against the existing cubes stored in memory. - Returns True if the cube exists, or False if it is new. - Parameters: - polycube (np.array): 3D Numpy byte array where 1 values indicate polycube positions - + polycube (np.array): 3D Numpy byte array where 1 values indicate + cube positions. Must be of type np.int8 + known_ids (set[bytes]): A set of all known polycube ids + Returns: - boolean: True if polycube is already present in the set of all cubes so far. - + cube_id (bytes): the id for this cube + """ + max_id = b'\x00' for cube_rotation in all_rotations(polycube): - if rle(cube_rotation) in polycubes_rle: - return True + this_id = pack(cube_rotation) + if (this_id in known_ids): + return this_id + if (this_id > max_id): + max_id = this_id + return max_id - return False if __name__ == "__main__": parser = argparse.ArgumentParser( - prog='Polycube Generator', - description='Generates all polycubes (combinations of cubes) of size n.') + prog='Polycube Generator', + description='Generates all polycubes (combinations of cubes) of size n.') parser.add_argument('n', metavar='N', type=int, - help='The number of cubes within each polycube') - - #Requires python >=3.9 + help='The number of cubes within each polycube') + + # Requires python >=3.9 parser.add_argument('--cache', action=argparse.BooleanOptionalAction) + parser.add_argument('--render', action=argparse.BooleanOptionalAction) args = parser.parse_args() - + n = args.n use_cache = args.cache if args.cache is not None else True + render = args.render if args.render is not None else False # Start the timer t1_start = perf_counter() - all_cubes = list(generate_polycubes(n, use_cache=use_cache)) + all_cubes = generate_polycubes(n, use_cache=use_cache) # Stop the timer t1_stop = perf_counter() - print (f"Found {len(all_cubes)} unique polycubes") - print (f"Elapsed time: {round(t1_stop - t1_start,3)}s") - - -# Code for if you want to generate pictures of the sets of cubes. Will work up to about n=8, before there are simply too many! -# Could be adapted for larger cube sizes by splitting the dataset up into separate images. -# def render_shapes(shapes, path): -# n = len(shapes) -# dim = max(max(a.shape) for a in shapes) -# i = math.isqrt(n) + 1 -# voxel_dim = dim * i -# voxel_array = np.zeros((voxel_dim + i,voxel_dim + i,dim), dtype=np.byte) -# pad = 1 -# for idx, shape in enumerate(shapes): -# x = (idx % i) * dim + (idx % i) -# y = (idx // i) * dim + (idx // i) -# xpad = x * pad -# ypad = y * pad -# s = shape.shape -# voxel_array[x:x + s[0], y:y + s[1] , 0 : s[2]] = shape - -# voxel_array = crop_cube(voxel_array) -# colors = np.empty(voxel_array.shape, dtype=object) -# colors[:] = '#FFD65DC0' + if (render): + render_shapes(all_cubes, "./out") -# ax = plt.figure(figsize=(20,16), dpi=600).add_subplot(projection='3d') -# ax.voxels(voxel_array, facecolors = colors, edgecolor='k', linewidth=0.1) - -# ax.set_xlim([0, voxel_array.shape[0]]) -# ax.set_ylim([0, voxel_array.shape[1]]) -# ax.set_zlim([0, voxel_array.shape[2]]) -# plt.axis("off") -# ax.set_box_aspect((1, 1, voxel_array.shape[2] / voxel_array.shape[0])) -# plt.savefig(path + ".png", bbox_inches='tight', pad_inches = 0) + print(f"\nFound {len(all_cubes)} unique polycubes") + print(f"\nElapsed time: {round(t1_stop - t1_start,3)}s") diff --git a/libraries/cache.py b/libraries/cache.py new file mode 100644 index 0000000..311e243 --- /dev/null +++ b/libraries/cache.py @@ -0,0 +1,81 @@ +import os +import numpy as np + +cache_path_fstring = "cubes_{0}.npy" + + +def cache_exists(n: int) -> bool: + """ + Checks if a cache file exists with the standard name for a given batch size + + Parameters: + n (int): the size of polycube to search for + + Returns: + bool: whether that cache exists + + """ + cache_path = cache_path_fstring.format(n) + return os.path.exists(cache_path) + + +def get_cache_raw(cache_path: str) -> list[np.ndarray]: + """ + Loads a Cache File for a given pathname + + Parameters: + cache_path (str): the file location to look for the cache file + + Returns: + list[np.ndarray]: the list of polycubes from the cache + + """ + if os.path.exists(cache_path): + + polycubes = np.load(cache_path, allow_pickle=True) + + return polycubes + else: + return None + + +def get_cache(n: int) -> np.ndarray: + """ + Loads a Cache File for a given size of polycube + + Parameters: + n (int): the size of polycube to load the cache of + + Returns: + list[np.ndarray]: the list of polycubes of that size from the cache + + """ + cache_path = cache_path_fstring.format(n) + print(f"\rLoading polycubes n={n} from cache: ", end="") + polycubes = get_cache_raw(cache_path) + print(f"{len(polycubes)} shapes") + return polycubes + + +def save_cache_raw(cache_path: str, polycubes: list[np.ndarray]) -> None: + """ + Saves a Cache File to a file at a given pathname + + Parameters: + cache_path (str): the file location to sabe the cache file + polycubes (list[np.ndarray]): the polycubes to be cached + """ + np.save(cache_path, np.array(polycubes, dtype=object), allow_pickle=True) + + +def save_cache(n: int, polycubes: np.ndarray) -> None: + """ + Saves a Cache File for a given polycube size + + Parameters: + n (int): the size of the polycubes to be cached + polycubes (list[np.ndarray]): the polycubes to be cached + """ + cache_path = cache_path_fstring.format(n) + save_cache_raw(cache_path, polycubes) + print(f"Wrote file for polycubes n={n}") diff --git a/libraries/packing.py b/libraries/packing.py new file mode 100644 index 0000000..60204de --- /dev/null +++ b/libraries/packing.py @@ -0,0 +1,43 @@ +import numpy as np +import math + + +def pack(polycube: np.ndarray) -> bytes: + """ + Converts a 3D ndarray into a single bytes object that unique identifies + the polycube, is hashable, comparable, and allows to reconstruct the + original polycube ndarray. + + Parameters: + polycube (np.array): 3D Numpy byte array where 1 values indicate polycube positions, + and 0 values indicate empty space. Must be of type np.int8. + + Returns: + cube_id (bytes): a bytes representation of the polycube + + """ + data = polycube.shape[0].to_bytes(1, 'little') \ + + polycube.shape[1].to_bytes(1, 'little') \ + + polycube.shape[2].to_bytes(1, 'little') \ + + np.packbits(polycube.flatten(), bitorder='little').tobytes() + return data + + +def unpack(cube_id: bytes) -> np.ndarray: + """ + Converts a bytes object back into a 3D ndarray + + Parameters: + cube_id (bytes): a unique bytes object + + Returns: + polycube (np.array): 3D Numpy byte array where 1 values indicate + cube positions + + """ + # Extract shape information + shape = (cube_id[0], cube_id[1], cube_id[2]) + size = shape[0] * shape[1] * shape[2] + polycube = np.unpackbits(np.frombuffer(cube_id[3:], dtype=np.uint8), count=size, bitorder='little').reshape(shape) + return polycube + diff --git a/libraries/renderer.py b/libraries/renderer.py new file mode 100644 index 0000000..9b490e7 --- /dev/null +++ b/libraries/renderer.py @@ -0,0 +1,36 @@ +import math +import numpy as np +import matplotlib.pyplot as plt + +# # Code for if you want to generate pictures of the sets of cubes. Will work up to about n=8, before there are simply too many! +# # Could be adapted for larger cube sizes by splitting the dataset up into separate images. + + +def render_shapes(shapes: list[np.ndarray], path: str): + n = len(shapes) + dim = max(max(a.shape) for a in shapes) + i = math.isqrt(n) + 1 + voxel_dim = dim * i + voxel_array = np.zeros((voxel_dim + i, voxel_dim + i, dim), dtype=np.byte) + pad = 1 + for idx, shape in enumerate(shapes): + x = (idx % i) * dim + (idx % i) + y = (idx // i) * dim + (idx // i) + xpad = x * pad + ypad = y * pad + s = shape.shape + voxel_array[x:x + s[0], y:y + s[1], 0: s[2]] = shape + + # voxel_array = crop_cube(voxel_array) + colors = np.empty(voxel_array.shape, dtype=object) + colors[:] = '#FFD65DC0' + + ax = plt.figure(figsize=(20, 16), dpi=600).add_subplot(projection='3d') + ax.voxels(voxel_array, facecolors=colors, edgecolor='k', linewidth=0.1) + + ax.set_xlim([0, voxel_array.shape[0]]) + ax.set_ylim([0, voxel_array.shape[1]]) + ax.set_zlim([0, voxel_array.shape[2]]) + plt.axis("off") + ax.set_box_aspect((1, 1, voxel_array.shape[2] / voxel_array.shape[0])) + plt.savefig(path + ".png", bbox_inches='tight', pad_inches=0) diff --git a/libraries/resizing.py b/libraries/resizing.py new file mode 100644 index 0000000..b06dea2 --- /dev/null +++ b/libraries/resizing.py @@ -0,0 +1,55 @@ +import numpy as np +from typing import Generator + + +def crop_cube(cube: np.ndarray) -> np.ndarray: + """ + Crops an np.array to have no all-zero padding around the edge. + + Given in https://stackoverflow.com/questions/39465812/how-to-crop-zero-edges-of-a-numpy-array + + Parameters: + cube (np.array): 3D Numpy byte array where 1 values indicate polycube positions + + Returns: + np.array: Cropped 3D Numpy byte array equivalent to cube, but with no zero padding + + """ + nonzero_indices = np.where(cube != 0) + cropped_cube = cube[np.min(nonzero_indices[0]):np.max(nonzero_indices[0]) + 1, + np.min(nonzero_indices[1]):np.max(nonzero_indices[1]) + 1, + np.min(nonzero_indices[2]):np.max(nonzero_indices[2]) + 1] + return cropped_cube + + +def expand_cube(cube: np.ndarray) -> Generator[np.ndarray, None, None]: + """ + Expands a polycube by adding single blocks at all valid locations. + + Calculates all valid new positions of a polycube by shifting the existing cube +1 and -1 in each dimension. + New valid cubes are returned via a generator function, in case they are not all needed. + + Parameters: + cube (np.array): 3D Numpy byte array where 1 values indicate polycube positions + + Returns: + generator(np.array): Yields new polycubes that are extensions of cube + + """ + cube = np.pad(cube, 1, 'constant', constant_values=0) + output_cube = np.array(cube) + + xs, ys, zs = cube.nonzero() + output_cube[xs+1, ys, zs] = 1 + output_cube[xs-1, ys, zs] = 1 + output_cube[xs, ys+1, zs] = 1 + output_cube[xs, ys-1, zs] = 1 + output_cube[xs, ys, zs+1] = 1 + output_cube[xs, ys, zs-1] = 1 + + exp = (output_cube ^ cube).nonzero() + + for (x, y, z) in zip(exp[0], exp[1], exp[2]): + new_cube = np.array(cube) + new_cube[x, y, z] = 1 + yield crop_cube(new_cube) diff --git a/libraries/rotation.py b/libraries/rotation.py new file mode 100644 index 0000000..e20edb6 --- /dev/null +++ b/libraries/rotation.py @@ -0,0 +1,38 @@ +import numpy as np +from typing import Generator + + +def all_rotations(polycube: np.ndarray) -> Generator[np.ndarray, None, None]: + """ + Calculates all rotations of a polycube. + + Adapted from https://stackoverflow.com/questions/33190042/how-to-calculate-all-24-rotations-of-3d-array. + This function computes all 24 rotations around each of the axis x,y,z. It uses numpy operations to do this, to avoid unecessary copies. + The function returns a generator, to avoid computing all rotations if they are not needed. + + Parameters: + polycube (np.array): 3D Numpy byte array where 1 values indicate polycube positions + + Returns: + generator(np.array): Yields new rotations of this cube about all axes + + """ + def single_axis_rotation(polycube, axes): + """Yield four rotations of the given 3d array in the plane spanned by the given axes. + For example, a rotation in axes (0,1) is a rotation around axis 2""" + for i in range(4): + yield np.rot90(polycube, i, axes) + + # 4 rotations about axis 0 + yield from single_axis_rotation(polycube, (1, 2)) + + # rotate 180 about axis 1, 4 rotations about axis 0 + yield from single_axis_rotation(np.rot90(polycube, 2, axes=(0, 2)), (1, 2)) + + # rotate 90 or 270 about axis 1, 8 rotations about axis 2 + yield from single_axis_rotation(np.rot90(polycube, axes=(0, 2)), (0, 1)) + yield from single_axis_rotation(np.rot90(polycube, -1, axes=(0, 2)), (0, 1)) + + # rotate about axis 2, 8 rotations about axis 1 + yield from single_axis_rotation(np.rot90(polycube, axes=(0, 1)), (0, 2)) + yield from single_axis_rotation(np.rot90(polycube, -1, axes=(0, 1)), (0, 2)) diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..686a12b --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1,4 @@ +from . import test_cache +from . import test_packing +from . import test_resizing +from . import test_rotation \ No newline at end of file diff --git a/tests/test_cache.py b/tests/test_cache.py new file mode 100644 index 0000000..0f57389 --- /dev/null +++ b/tests/test_cache.py @@ -0,0 +1,22 @@ +import unittest +import os +from libraries.cache import get_cache, save_cache +from numpy.testing import assert_array_equal +from .utils import get_test_data + +class CachingTests(unittest.TestCase): + + def test_cache_consistency(self): + test_data = get_test_data() + + save_cache("test_temp", test_data) + reloaded_data = get_cache("test_temp") + + for test, reloaded in zip(test_data, reloaded_data): + assert_array_equal(test, reloaded) + + @classmethod + def tearDownClass(cls): + expected_test_file_name = "cubes_test_temp.npy" + if os.path.exists(expected_test_file_name): + os.remove(expected_test_file_name) \ No newline at end of file diff --git a/tests/test_data.npy b/tests/test_data.npy new file mode 100644 index 0000000..477f6b9 Binary files /dev/null and b/tests/test_data.npy differ diff --git a/tests/test_packing.py b/tests/test_packing.py new file mode 100644 index 0000000..8b61417 --- /dev/null +++ b/tests/test_packing.py @@ -0,0 +1,52 @@ +import unittest +from numpy.testing import assert_array_equal +from libraries.packing import pack, unpack +from .utils import get_test_data + +class PackingTests(unittest.TestCase): + def test_pack_does_not_throw(self): + test_data = get_test_data() + for polycube in test_data: + try: + packed = pack(polycube) + except: + self.fail(f"pack threw on pollycube {polycube}") + + def test_unpack_does_not_throw(self): + test_data = get_test_data() + for polycube in test_data: + packed = pack(polycube) + try: + unpacked = unpack(packed) + except: + self.fail(f"unpack threw on packing {packed} for pollycube {polycube}") + + def test_pack_hashable(self): + test_data = get_test_data() + for polycube in test_data: + packed = pack(polycube) + try: + hash(packed) + except: + self.fail(f"packing of pollycube {polycube} isnt hashable") + + def test_pack_equitable(self): + test_data = get_test_data() + for polycube in test_data: + packed = pack(polycube) + self.assertEqual(packed, packed, "hash does not equal itself") + + def test_pack_unique(self): + test_data = get_test_data() + seen = set() + for polycube in test_data: + packed = pack(polycube) + self.assertNotIn(packed, seen) + seen.add(packed) + + def test_pack_symetric(self): + test_data = get_test_data() + for polycube in test_data: + packed = pack(polycube) + unpacked = unpack(packed) + assert_array_equal(polycube, unpacked, f"packing of polycube isnt symetric, unpacked polycube {polycube} packed to {packed} which unpacked to {unpacked}") \ No newline at end of file diff --git a/tests/test_resizing.py b/tests/test_resizing.py new file mode 100644 index 0000000..5a95ac0 --- /dev/null +++ b/tests/test_resizing.py @@ -0,0 +1,9 @@ +import unittest +import os +from libraries.resizing import crop_cube, expand_cube +from .utils import get_test_data + +class CroppingTests(unittest.TestCase): + #todo + # i am not smart enough to come up with helpful cropping tests right now. + pass \ No newline at end of file diff --git a/tests/test_rotation.py b/tests/test_rotation.py new file mode 100644 index 0000000..4dc66cc --- /dev/null +++ b/tests/test_rotation.py @@ -0,0 +1,26 @@ +import unittest +import numpy as np +from libraries.rotation import all_rotations +from .utils import get_test_data + +class RotatingTests(unittest.TestCase): + def test_rotate_quantity(self): + test_data = get_test_data() + for polycube in test_data: + rots = all_rotations(polycube) + self.assertEqual(len(list(rots)), 24, "all_rotations failed to produce 24 rotations") + + def test_rotate_symetric(self): + # tests that all rotations of any given rotation from an all rotations set, itself is in the all rotations set. + # e.g. repeatedly rotating doesnt change the fundemental 24 rotations of a given polycube + test_data = get_test_data() + for polycube in test_data: + base_polycube_rotations = list(all_rotations(polycube)) + for base_cube_rotation in base_polycube_rotations: + for rotated_cube_rotation in all_rotations(base_cube_rotation): + found_match = False + for inner_base_polycube_rotation in base_polycube_rotations: + if np.array_equal(rotated_cube_rotation, inner_base_polycube_rotation): + found_match = True + break + self.assertTrue(found_match, "rotation of a rotated polycube wasnt in the set of all rotations for the initial pollycube, rotating it twice has changed its shape") \ No newline at end of file diff --git a/tests/utils.py b/tests/utils.py new file mode 100644 index 0000000..1399c21 --- /dev/null +++ b/tests/utils.py @@ -0,0 +1,4 @@ +from libraries.cache import get_cache_raw + +def get_test_data(): + return get_cache_raw('./tests/test_data.npy') \ No newline at end of file