diff --git a/python/cubes.py b/python/cubes.py index 1b5647a..719aa68 100644 --- a/python/cubes.py +++ b/python/cubes.py @@ -3,10 +3,9 @@ from time import perf_counter from libraries.cache import get_cache, save_cache, cache_exists from libraries.resizing import expand_cube -from libraries.packing import pack, unpack +from libraries.packing import packShape, pack_fast from libraries.renderer import render_shapes -from libraries.rotation import all_rotations - +from libraries.rotation import all_rotations_fast, get_canon_shape def log_if_needed(n, total_n): if (n == total_n or n % 100 == 0): @@ -40,34 +39,29 @@ def generate_polycubes(n: int, use_cache: bool = False) -> list[np.ndarray]: results = get_cache(n) print(f"\nGot polycubes from cache n={n}") else: - pollycubes = generate_polycubes(n-1, use_cache) + polycubes = generate_polycubes(n-1, use_cache) known_ids = set() done = 0 + results = list() print(f"\nHashing polycubes n={n}") - for base_cube in pollycubes: + for base_cube in polycubes: for new_cube in expand_cube(base_cube): - cube_id = get_canonical_packing(new_cube, known_ids) + cube_id, canon_cube = get_canonical_packing(new_cube, known_ids) + prevLength = len(known_ids) known_ids.add(cube_id) - log_if_needed(done, len(pollycubes)) + afterLength=len(known_ids) + if not prevLength == afterLength: + results.append(canon_cube) + log_if_needed(done, len(polycubes)) done += 1 - log_if_needed(done, len(pollycubes)) - - print(f"\nGenerating polycubes from hash n={n}") - results = [] - done = 0 - for cube_id in known_ids: - results.append(unpack(cube_id)) - log_if_needed(done, len(known_ids)) - done += 1 - log_if_needed(done, len(known_ids)) + log_if_needed(done, len(polycubes)) if (use_cache and not cache_exists(n)): save_cache(n, results) return results - def get_canonical_packing(polycube: np.ndarray, known_ids: set[bytes]) -> bytes: """ @@ -87,13 +81,17 @@ def get_canonical_packing(polycube: np.ndarray, """ max_id = b'\x00' - for cube_rotation in all_rotations(polycube): - this_id = pack(cube_rotation) + curr_cube=polycube + orderedShape = get_canon_shape(polycube) + packedShape=packShape(orderedShape) + for cube_rotation in all_rotations_fast(polycube): + this_id = pack_fast(cube_rotation,packedShape) if (this_id in known_ids): - return this_id + return this_id, cube_rotation if (this_id > max_id): max_id = this_id - return max_id + curr_cube = cube_rotation + return max_id, curr_cube if __name__ == "__main__": diff --git a/python/libraries/packing.py b/python/libraries/packing.py index 60204de..b30e4c4 100644 --- a/python/libraries/packing.py +++ b/python/libraries/packing.py @@ -1,5 +1,4 @@ import numpy as np -import math def pack(polycube: np.ndarray) -> bytes: @@ -22,6 +21,38 @@ def pack(polycube: np.ndarray) -> bytes: + np.packbits(polycube.flatten(), bitorder='little').tobytes() return data +def packShape(shape): + """ + Converts the shape of a 3D numpy array into a single bytes object in an identical way as what happens in pack() + + Parameters: + shape (tuple of 3 int): the shape of a 3D numpy array reprsenting a polycube + + Returns: + (bytes): a bytes representation of the shape + + """ + data = shape[0].to_bytes(1, 'little') \ + + shape[1].to_bytes(1, 'little') \ + + shape[2].to_bytes(1, 'little') + return data + +def pack_fast(polycube, packedShape): + """ + Converts a 3D ndarray into a single bytes object that unique identifies + the polycube, is hashable, comparable, and allows to reconstruct the + original polycube ndarray. + + Parameters: + polycube (np.array): 3D Numpy byte array where 1 values indicate polycube positions, + and 0 values indicate empty space. Must be of type np.int8. + packedShape: the bytes representation of the shape + + Returns: + cube_id (bytes): a bytes representation of the polycube + + """ + return packedShape+np.packbits(polycube, bitorder='little').tobytes() def unpack(cube_id: bytes) -> np.ndarray: """ diff --git a/python/libraries/resizing.py b/python/libraries/resizing.py index 517f5f4..960bb6f 100644 --- a/python/libraries/resizing.py +++ b/python/libraries/resizing.py @@ -1,7 +1,6 @@ import numpy as np from typing import Generator - def crop_cube(cube: np.ndarray) -> np.ndarray: """ Crops an np.array to have no all-zero padding around the edge. @@ -22,7 +21,6 @@ def crop_cube(cube: np.ndarray) -> np.ndarray: cube = np.swapaxes(cube, 0, i) return cube - def expand_cube(cube: np.ndarray) -> Generator[np.ndarray, None, None]: """ Expands a polycube by adding single blocks at all valid locations. @@ -37,8 +35,10 @@ def expand_cube(cube: np.ndarray) -> Generator[np.ndarray, None, None]: generator(np.array): Yields new polycubes that are extensions of cube """ - cube = np.pad(cube, 1, 'constant', constant_values=0) - output_cube = np.array(cube) + shape = tuple(el+2 for el in cube.shape) + output_cube=np.zeros(shape,dtype=cube.dtype) + output_cube[1:-1,1:-1,1:-1]=cube + cube=output_cube.copy() xs, ys, zs = cube.nonzero() output_cube[xs+1, ys, zs] = 1 @@ -47,10 +47,42 @@ def expand_cube(cube: np.ndarray) -> Generator[np.ndarray, None, None]: output_cube[xs, ys-1, zs] = 1 output_cube[xs, ys, zs+1] = 1 output_cube[xs, ys, zs-1] = 1 + output_cube[xs, ys, zs] = 0 - exp = (output_cube ^ cube).nonzero() - - for (x, y, z) in zip(exp[0], exp[1], exp[2]): - new_cube = np.array(cube) - new_cube[x, y, z] = 1 - yield crop_cube(new_cube) + exp = output_cube.nonzero() + bounds=list() + bound=np.empty_like(exp[0]) + for i in range(3): + ind = exp[i]==0 + bound[ind]=0 + bound[~ind]=1 + bounds.append(bound.copy()) + ind=exp[i]==cube.shape[i]-1 + bound[ind]=cube.shape[i] + bound[~ind]=cube.shape[i]-1 + bounds.append(bound.copy()) + + n=len(exp[0]) + for i in range(n): + new_cube = cube.copy() + new_cube[exp[0][i], exp[1][i], exp[2][i]] = 1 + yield new_cube[bounds[0][i]:bounds[1][i],bounds[2][i]:bounds[3][i],bounds[4][i]:bounds[5][i]] + + +def test_expand(): + """ + Function to test the performance of the expand_cube() function + """ + from time import perf_counter + + n=1000 + shape=(4,3,2) + polycubes = (np.random.random((n,)+shape)>0.5).astype(np.byte) + now=perf_counter() + for cube in polycubes: + res=list(expand_cube(cube)) + # res=list(expand_cube_fast(cube)) + print(perf_counter()-now) + +if __name__ == "__main__": + test_expand() \ No newline at end of file diff --git a/python/libraries/rotation.py b/python/libraries/rotation.py index e20edb6..e8f6235 100644 --- a/python/libraries/rotation.py +++ b/python/libraries/rotation.py @@ -2,6 +2,13 @@ from typing import Generator +def single_axis_rotation(polycube, axes): + """Yield four rotations of the given 3d array in the plane spanned by the given axes. + For example, a rotation in axes (0,1) is a rotation around axis 2""" + for i in range(4): + yield np.rot90(polycube, i, axes) + + def all_rotations(polycube: np.ndarray) -> Generator[np.ndarray, None, None]: """ Calculates all rotations of a polycube. @@ -17,11 +24,6 @@ def all_rotations(polycube: np.ndarray) -> Generator[np.ndarray, None, None]: generator(np.array): Yields new rotations of this cube about all axes """ - def single_axis_rotation(polycube, axes): - """Yield four rotations of the given 3d array in the plane spanned by the given axes. - For example, a rotation in axes (0,1) is a rotation around axis 2""" - for i in range(4): - yield np.rot90(polycube, i, axes) # 4 rotations about axis 0 yield from single_axis_rotation(polycube, (1, 2)) @@ -36,3 +38,65 @@ def single_axis_rotation(polycube, axes): # rotate about axis 2, 8 rotations about axis 1 yield from single_axis_rotation(np.rot90(polycube, axes=(0, 1)), (0, 2)) yield from single_axis_rotation(np.rot90(polycube, -1, axes=(0, 1)), (0, 2)) + + +def get_canon_shape(polycube): + """ + Get the canonical shape of the polycube, ordered in descending order + """ + return tuple(sorted(polycube.shape, reverse=True)) + + +RotationIndexes = dict() +def all_rotations_fast(polycube: np.ndarray) -> Generator[np.ndarray, None, None]: + """ + Calculates all rotations of a polycube. + + Adapted from https://stackoverflow.com/questions/33190042/how-to-calculate-all-24-rotations-of-3d-array. + This function relies on a global dictionnary that holds the rotations as index permutations. + This is faster than using yield because numpy seems to optimize the index computation. + It also relies on an canonical shape which reduces the number of possible rotations. + + Parameters: + polycube (np.array): 3D Numpy byte array where 1 values indicate polycube positions + + Returns: + (np.array): all the rotations of the cube in a 4D array acting as a list of 3D cubes. + + """ + orderedShape = get_canon_shape(polycube) + if not polycube.shape in RotationIndexes: + n1, n2, n3 = polycube.shape + vec = np.arange(n1*n2*n3).reshape(polycube.shape) + uniqueRotations = set() + rotations = list() + + def func(el): + s = el.shape + el = tuple(el.ravel().tolist()) + if not el in uniqueRotations and s == orderedShape: + uniqueRotations.add(el) + rotations.append(el) + + # 4 rotations about axis 0 + for el in single_axis_rotation(vec, (1, 2)): + func(el) + + # rotate 180 about axis 1, 4 rotations about axis 0 + for el in single_axis_rotation(np.rot90(vec, 2, axes=(0, 2)), (1, 2)): + func(el) + + # rotate 90 or 270 about axis 1, 8 rotations about axis 2 + for el in single_axis_rotation(np.rot90(vec, axes=(0, 2)), (0, 1)): + func(el) + for el in single_axis_rotation(np.rot90(vec, -1, axes=(0, 2)), (0, 1)): + func(el) + + # rotate about axis 2, 8 rotations about axis 1 + for el in single_axis_rotation(np.rot90(vec, axes=(0, 1)), (0, 2)): + func(el) + for el in single_axis_rotation(np.rot90(vec, -1, axes=(0, 1)), (0, 2)): + func(el) + RotationIndexes[polycube.shape] = np.stack(rotations, axis=0) + ind = RotationIndexes[polycube.shape] + return polycube.ravel()[ind].reshape((len(ind),)+orderedShape)