From 826bb73f059b6d03b1ccfc68d2f2cbc402ba1d36 Mon Sep 17 00:00:00 2001 From: Wagyx Date: Wed, 23 Aug 2023 20:55:14 +0200 Subject: [PATCH 01/11] speed up rotation by stocking indices and --- python/cubes.py | 4 +-- python/libraries/rotation.py | 53 ++++++++++++++++++++++++++++++++---- 2 files changed, 50 insertions(+), 7 deletions(-) diff --git a/python/cubes.py b/python/cubes.py index 1b5647a..4315642 100644 --- a/python/cubes.py +++ b/python/cubes.py @@ -5,7 +5,7 @@ from libraries.resizing import expand_cube from libraries.packing import pack, unpack from libraries.renderer import render_shapes -from libraries.rotation import all_rotations +from libraries.rotation import all_rotations, all_rotations_fast def log_if_needed(n, total_n): @@ -87,7 +87,7 @@ def get_canonical_packing(polycube: np.ndarray, """ max_id = b'\x00' - for cube_rotation in all_rotations(polycube): + for cube_rotation in all_rotations_fast(polycube): this_id = pack(cube_rotation) if (this_id in known_ids): return this_id diff --git a/python/libraries/rotation.py b/python/libraries/rotation.py index e20edb6..b698d10 100644 --- a/python/libraries/rotation.py +++ b/python/libraries/rotation.py @@ -1,6 +1,11 @@ import numpy as np from typing import Generator +def single_axis_rotation(polycube, axes): + """Yield four rotations of the given 3d array in the plane spanned by the given axes. + For example, a rotation in axes (0,1) is a rotation around axis 2""" + for i in range(4): + yield np.rot90(polycube, i, axes) def all_rotations(polycube: np.ndarray) -> Generator[np.ndarray, None, None]: """ @@ -17,11 +22,6 @@ def all_rotations(polycube: np.ndarray) -> Generator[np.ndarray, None, None]: generator(np.array): Yields new rotations of this cube about all axes """ - def single_axis_rotation(polycube, axes): - """Yield four rotations of the given 3d array in the plane spanned by the given axes. - For example, a rotation in axes (0,1) is a rotation around axis 2""" - for i in range(4): - yield np.rot90(polycube, i, axes) # 4 rotations about axis 0 yield from single_axis_rotation(polycube, (1, 2)) @@ -36,3 +36,46 @@ def single_axis_rotation(polycube, axes): # rotate about axis 2, 8 rotations about axis 1 yield from single_axis_rotation(np.rot90(polycube, axes=(0, 1)), (0, 2)) yield from single_axis_rotation(np.rot90(polycube, -1, axes=(0, 1)), (0, 2)) + +RotationIndexes={} + +def all_rotations_fast(polycube: np.ndarray) -> Generator[np.ndarray, None, None]: + orderedShape = tuple(sorted(polycube.shape,reverse=True)) + if polycube.shape in RotationIndexes: + ind = RotationIndexes[polycube.shape] + return polycube.ravel()[ind].reshape((len(ind),)+orderedShape) + else: + n1,n2,n3 = polycube.shape + vec = np.arange(n1*n2*n3).reshape(polycube.shape) + uniqueRotations = set() + rotations = list() + + def func(el): + s = el.shape + el = tuple(el.ravel().tolist()) + if not el in uniqueRotations and s == orderedShape: + uniqueRotations.add(el) + rotations.append(el) + + # 4 rotations about axis 0 + for el in single_axis_rotation(vec, (1, 2)): + func(el) + + # rotate 180 about axis 1, 4 rotations about axis 0 + for el in single_axis_rotation(np.rot90(vec, 2, axes=(0, 2)), (1, 2)): + func(el) + + # rotate 90 or 270 about axis 1, 8 rotations about axis 2 + for el in single_axis_rotation(np.rot90(vec, axes=(0, 2)), (0, 1)): + func(el) + for el in single_axis_rotation(np.rot90(vec, -1, axes=(0, 2)), (0, 1)): + func(el) + + # rotate about axis 2, 8 rotations about axis 1 + for el in single_axis_rotation(np.rot90(vec, axes=(0, 1)), (0, 2)): + func(el) + for el in single_axis_rotation(np.rot90(vec, -1, axes=(0, 1)), (0, 2)): + func(el) + + RotationIndexes[polycube.shape] = np.stack(rotations, axis=0) + return polycube.ravel()[RotationIndexes[polycube.shape]].reshape((len(rotations),)+orderedShape) From 27d8b0af0544cc381e0667a522b16ad479b4fea9 Mon Sep 17 00:00:00 2001 From: Wagyx Date: Wed, 23 Aug 2023 20:56:13 +0200 Subject: [PATCH 02/11] me being nipticky --- python/libraries/resizing.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/python/libraries/resizing.py b/python/libraries/resizing.py index 517f5f4..1f3a30d 100644 --- a/python/libraries/resizing.py +++ b/python/libraries/resizing.py @@ -38,7 +38,7 @@ def expand_cube(cube: np.ndarray) -> Generator[np.ndarray, None, None]: """ cube = np.pad(cube, 1, 'constant', constant_values=0) - output_cube = np.array(cube) + output_cube = cube.copy() xs, ys, zs = cube.nonzero() output_cube[xs+1, ys, zs] = 1 @@ -50,7 +50,7 @@ def expand_cube(cube: np.ndarray) -> Generator[np.ndarray, None, None]: exp = (output_cube ^ cube).nonzero() - for (x, y, z) in zip(exp[0], exp[1], exp[2]): - new_cube = np.array(cube) + for (x, y, z) in zip(*exp): + new_cube = cube.copy() new_cube[x, y, z] = 1 yield crop_cube(new_cube) From 9cfc47ff0224bae0697659709eb00006d62c1800 Mon Sep 17 00:00:00 2001 From: Wagyx Date: Wed, 23 Aug 2023 20:57:17 +0200 Subject: [PATCH 03/11] pollycubes ? --- python/cubes.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/python/cubes.py b/python/cubes.py index 4315642..179291d 100644 --- a/python/cubes.py +++ b/python/cubes.py @@ -40,18 +40,18 @@ def generate_polycubes(n: int, use_cache: bool = False) -> list[np.ndarray]: results = get_cache(n) print(f"\nGot polycubes from cache n={n}") else: - pollycubes = generate_polycubes(n-1, use_cache) + polycubes = generate_polycubes(n-1, use_cache) known_ids = set() done = 0 print(f"\nHashing polycubes n={n}") - for base_cube in pollycubes: + for base_cube in polycubes: for new_cube in expand_cube(base_cube): cube_id = get_canonical_packing(new_cube, known_ids) known_ids.add(cube_id) - log_if_needed(done, len(pollycubes)) + log_if_needed(done, len(polycubes)) done += 1 - log_if_needed(done, len(pollycubes)) + log_if_needed(done, len(polycubes)) print(f"\nGenerating polycubes from hash n={n}") results = [] From 080956d04f6ff2ffb7d3ae026ccc89f1027178b4 Mon Sep 17 00:00:00 2001 From: Wagyx Date: Wed, 23 Aug 2023 22:11:48 +0200 Subject: [PATCH 04/11] remove crop cube since it is so slow --- python/libraries/packing.py | 1 - python/libraries/resizing.py | 9 ++++++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/python/libraries/packing.py b/python/libraries/packing.py index 60204de..18775be 100644 --- a/python/libraries/packing.py +++ b/python/libraries/packing.py @@ -1,5 +1,4 @@ import numpy as np -import math def pack(polycube: np.ndarray) -> bytes: diff --git a/python/libraries/resizing.py b/python/libraries/resizing.py index 1f3a30d..b65cca8 100644 --- a/python/libraries/resizing.py +++ b/python/libraries/resizing.py @@ -53,4 +53,11 @@ def expand_cube(cube: np.ndarray) -> Generator[np.ndarray, None, None]: for (x, y, z) in zip(*exp): new_cube = cube.copy() new_cube[x, y, z] = 1 - yield crop_cube(new_cube) + xl = 0 if x==0 else 1 + yl = 0 if y==0 else 1 + zl = 0 if z==0 else 1 + xr = cube.shape[0] - (not x==cube.shape[0]-1) + yr = cube.shape[1] - (not y==cube.shape[1]-1) + zr = cube.shape[2] - (not z==cube.shape[2]-1) + + yield new_cube[xl:xr,yl:yr,zl:zr] From c7c65b7013bbccbcfb31e1dead6f792def2a8020 Mon Sep 17 00:00:00 2001 From: Wagyx Date: Wed, 23 Aug 2023 22:38:27 +0200 Subject: [PATCH 05/11] replace padding with raw array creation since it is faster --- python/libraries/resizing.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/python/libraries/resizing.py b/python/libraries/resizing.py index b65cca8..38f550a 100644 --- a/python/libraries/resizing.py +++ b/python/libraries/resizing.py @@ -37,8 +37,10 @@ def expand_cube(cube: np.ndarray) -> Generator[np.ndarray, None, None]: generator(np.array): Yields new polycubes that are extensions of cube """ - cube = np.pad(cube, 1, 'constant', constant_values=0) - output_cube = cube.copy() + shape = tuple(el+2 for el in cube.shape) + output_cube=np.zeros(shape,dtype=cube.dtype) + output_cube[1:-1,1:-1,1:-1]=cube + cube=output_cube.copy() xs, ys, zs = cube.nonzero() output_cube[xs+1, ys, zs] = 1 From 43edebae333dd8253f004f5f40daee65508b3001 Mon Sep 17 00:00:00 2001 From: Wagyx Date: Wed, 23 Aug 2023 22:40:36 +0200 Subject: [PATCH 06/11] make packing faster by removing unecessary ravel and splitting the shape packing out --- python/cubes.py | 10 ++++++---- python/libraries/packing.py | 8 ++++++++ python/libraries/rotation.py | 6 +++++- 3 files changed, 19 insertions(+), 5 deletions(-) diff --git a/python/cubes.py b/python/cubes.py index 179291d..74624f1 100644 --- a/python/cubes.py +++ b/python/cubes.py @@ -3,10 +3,10 @@ from time import perf_counter from libraries.cache import get_cache, save_cache, cache_exists from libraries.resizing import expand_cube -from libraries.packing import pack, unpack +from libraries.packing import pack, unpack, packShape, pack_fast from libraries.renderer import render_shapes -from libraries.rotation import all_rotations, all_rotations_fast - +from libraries.rotation import all_rotations, all_rotations_fast, get_canon_shape +import cProfile def log_if_needed(n, total_n): if (n == total_n or n % 100 == 0): @@ -87,8 +87,10 @@ def get_canonical_packing(polycube: np.ndarray, """ max_id = b'\x00' + orderedShape = get_canon_shape(polycube) + packedShape=packShape(orderedShape) for cube_rotation in all_rotations_fast(polycube): - this_id = pack(cube_rotation) + this_id = pack_fast(cube_rotation,packedShape) if (this_id in known_ids): return this_id if (this_id > max_id): diff --git a/python/libraries/packing.py b/python/libraries/packing.py index 18775be..70be2f7 100644 --- a/python/libraries/packing.py +++ b/python/libraries/packing.py @@ -21,6 +21,14 @@ def pack(polycube: np.ndarray) -> bytes: + np.packbits(polycube.flatten(), bitorder='little').tobytes() return data +def packShape(shape): + data = shape[0].to_bytes(1, 'little') \ + + shape[1].to_bytes(1, 'little') \ + + shape[2].to_bytes(1, 'little') + return data + +def pack_fast(polycube, packedShape): + return packedShape+np.packbits(polycube, bitorder='little').tobytes() def unpack(cube_id: bytes) -> np.ndarray: """ diff --git a/python/libraries/rotation.py b/python/libraries/rotation.py index b698d10..a0ac79f 100644 --- a/python/libraries/rotation.py +++ b/python/libraries/rotation.py @@ -39,8 +39,12 @@ def all_rotations(polycube: np.ndarray) -> Generator[np.ndarray, None, None]: RotationIndexes={} +def get_canon_shape(polycube): + return tuple(sorted(polycube.shape,reverse=True)) + + def all_rotations_fast(polycube: np.ndarray) -> Generator[np.ndarray, None, None]: - orderedShape = tuple(sorted(polycube.shape,reverse=True)) + orderedShape = get_canon_shape(polycube) if polycube.shape in RotationIndexes: ind = RotationIndexes[polycube.shape] return polycube.ravel()[ind].reshape((len(ind),)+orderedShape) From 08fb04a927a590842b59117b1cb81111e0349321 Mon Sep 17 00:00:00 2001 From: Wagyx Date: Wed, 23 Aug 2023 22:42:30 +0200 Subject: [PATCH 07/11] why bother unpacking cube you already have ? --- python/cubes.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/python/cubes.py b/python/cubes.py index 74624f1..3832470 100644 --- a/python/cubes.py +++ b/python/cubes.py @@ -44,11 +44,16 @@ def generate_polycubes(n: int, use_cache: bool = False) -> list[np.ndarray]: known_ids = set() done = 0 + results = list() print(f"\nHashing polycubes n={n}") for base_cube in polycubes: for new_cube in expand_cube(base_cube): - cube_id = get_canonical_packing(new_cube, known_ids) + cube_id, canon_cube = get_canonical_packing(new_cube, known_ids) + prevLength = len(known_ids) known_ids.add(cube_id) + afterLength=len(known_ids) + if not prevLength == afterLength: + results.append(canon_cube) log_if_needed(done, len(polycubes)) done += 1 log_if_needed(done, len(polycubes)) @@ -87,15 +92,17 @@ def get_canonical_packing(polycube: np.ndarray, """ max_id = b'\x00' + curr_cube=polycube orderedShape = get_canon_shape(polycube) packedShape=packShape(orderedShape) for cube_rotation in all_rotations_fast(polycube): this_id = pack_fast(cube_rotation,packedShape) if (this_id in known_ids): - return this_id + return this_id, cube_rotation if (this_id > max_id): max_id = this_id - return max_id + curr_cube = cube_rotation + return max_id, curr_cube if __name__ == "__main__": From 8540f753a2e77fff34e070da6f16fceb85465417 Mon Sep 17 00:00:00 2001 From: Wagyx Date: Wed, 23 Aug 2023 23:38:11 +0200 Subject: [PATCH 08/11] using vectorization to compute the bounds is twice as fast --- python/libraries/resizing.py | 54 ++++++++++++++++++++++++++---------- python/libraries/rotation.py | 9 ++---- 2 files changed, 43 insertions(+), 20 deletions(-) diff --git a/python/libraries/resizing.py b/python/libraries/resizing.py index 38f550a..ba8a9af 100644 --- a/python/libraries/resizing.py +++ b/python/libraries/resizing.py @@ -1,7 +1,6 @@ import numpy as np from typing import Generator - def crop_cube(cube: np.ndarray) -> np.ndarray: """ Crops an np.array to have no all-zero padding around the edge. @@ -22,7 +21,6 @@ def crop_cube(cube: np.ndarray) -> np.ndarray: cube = np.swapaxes(cube, 0, i) return cube - def expand_cube(cube: np.ndarray) -> Generator[np.ndarray, None, None]: """ Expands a polycube by adding single blocks at all valid locations. @@ -49,17 +47,45 @@ def expand_cube(cube: np.ndarray) -> Generator[np.ndarray, None, None]: output_cube[xs, ys-1, zs] = 1 output_cube[xs, ys, zs+1] = 1 output_cube[xs, ys, zs-1] = 1 + output_cube[xs, ys, zs] = 0 - exp = (output_cube ^ cube).nonzero() - - for (x, y, z) in zip(*exp): + exp = output_cube.nonzero() + bounds=list() + bound=np.empty_like(exp[0]) + for i in range(3): + ind = exp[i]==0 + bound[ind]=0 + bound[~ind]=1 + bounds.append(bound.copy()) + ind=exp[i]==cube.shape[i]-1 + bound[ind]=cube.shape[i] + bound[~ind]=cube.shape[i]-1 + bounds.append(bound.copy()) + + n=len(exp[0]) + for i in range(n): new_cube = cube.copy() - new_cube[x, y, z] = 1 - xl = 0 if x==0 else 1 - yl = 0 if y==0 else 1 - zl = 0 if z==0 else 1 - xr = cube.shape[0] - (not x==cube.shape[0]-1) - yr = cube.shape[1] - (not y==cube.shape[1]-1) - zr = cube.shape[2] - (not z==cube.shape[2]-1) - - yield new_cube[xl:xr,yl:yr,zl:zr] + new_cube[exp[0][i], exp[1][i], exp[2][i]] = 1 + yield new_cube[bounds[0][i]:bounds[1][i],bounds[2][i]:bounds[3][i],bounds[4][i]:bounds[5][i]] + + # vec=np.arange(len(exp[0])) + # new_cube = np.tile(cube,(len(vec),1,1,1)) + # new_cube[vec,exp[0],exp[1],exp[2]]=1 + # return new_cube + + +def test_packing(): + from time import perf_counter + + n=1000 + shape=(4,3,2) + polycubes = (np.random.random((n,)+shape)>0.5).astype(np.byte) + now=perf_counter() + for cube in polycubes: + res=list(expand_cube(cube)) + # res=list(expand_cube_fast(cube)) + # res=expand_cube(cube) + print(perf_counter()-now) + +if __name__ == "__main__": + test_packing() \ No newline at end of file diff --git a/python/libraries/rotation.py b/python/libraries/rotation.py index a0ac79f..a37a990 100644 --- a/python/libraries/rotation.py +++ b/python/libraries/rotation.py @@ -45,10 +45,7 @@ def get_canon_shape(polycube): def all_rotations_fast(polycube: np.ndarray) -> Generator[np.ndarray, None, None]: orderedShape = get_canon_shape(polycube) - if polycube.shape in RotationIndexes: - ind = RotationIndexes[polycube.shape] - return polycube.ravel()[ind].reshape((len(ind),)+orderedShape) - else: + if not polycube.shape in RotationIndexes: n1,n2,n3 = polycube.shape vec = np.arange(n1*n2*n3).reshape(polycube.shape) uniqueRotations = set() @@ -80,6 +77,6 @@ def func(el): func(el) for el in single_axis_rotation(np.rot90(vec, -1, axes=(0, 1)), (0, 2)): func(el) - RotationIndexes[polycube.shape] = np.stack(rotations, axis=0) - return polycube.ravel()[RotationIndexes[polycube.shape]].reshape((len(rotations),)+orderedShape) + ind = RotationIndexes[polycube.shape] + return polycube.ravel()[ind].reshape((len(ind),)+orderedShape) \ No newline at end of file From 7642f655c0a9d20f5e576736bc7c2718406aa69c Mon Sep 17 00:00:00 2001 From: Wagyx Date: Wed, 23 Aug 2023 23:42:44 +0200 Subject: [PATCH 09/11] up main --- python/cubes.py | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/python/cubes.py b/python/cubes.py index 3832470..0bf9752 100644 --- a/python/cubes.py +++ b/python/cubes.py @@ -7,6 +7,7 @@ from libraries.renderer import render_shapes from libraries.rotation import all_rotations, all_rotations_fast, get_canon_shape import cProfile +from pstats import Stats, SortKey def log_if_needed(n, total_n): if (n == total_n or n % 100 == 0): @@ -58,21 +59,11 @@ def generate_polycubes(n: int, use_cache: bool = False) -> list[np.ndarray]: done += 1 log_if_needed(done, len(polycubes)) - print(f"\nGenerating polycubes from hash n={n}") - results = [] - done = 0 - for cube_id in known_ids: - results.append(unpack(cube_id)) - log_if_needed(done, len(known_ids)) - done += 1 - log_if_needed(done, len(known_ids)) - if (use_cache and not cache_exists(n)): save_cache(n, results) return results - def get_canonical_packing(polycube: np.ndarray, known_ids: set[bytes]) -> bytes: """ From 5978a06cb08616b7c6dabdea067cb3a0b6cb1f97 Mon Sep 17 00:00:00 2001 From: Wagyx Date: Thu, 24 Aug 2023 22:20:16 +0200 Subject: [PATCH 10/11] adds doctrsing for new functions --- python/cubes.py | 6 ++---- python/libraries/packing.py | 24 ++++++++++++++++++++++++ python/libraries/resizing.py | 15 ++++++--------- python/libraries/rotation.py | 20 +++++++++++++++++++- 4 files changed, 51 insertions(+), 14 deletions(-) diff --git a/python/cubes.py b/python/cubes.py index 0bf9752..719aa68 100644 --- a/python/cubes.py +++ b/python/cubes.py @@ -3,11 +3,9 @@ from time import perf_counter from libraries.cache import get_cache, save_cache, cache_exists from libraries.resizing import expand_cube -from libraries.packing import pack, unpack, packShape, pack_fast +from libraries.packing import packShape, pack_fast from libraries.renderer import render_shapes -from libraries.rotation import all_rotations, all_rotations_fast, get_canon_shape -import cProfile -from pstats import Stats, SortKey +from libraries.rotation import all_rotations_fast, get_canon_shape def log_if_needed(n, total_n): if (n == total_n or n % 100 == 0): diff --git a/python/libraries/packing.py b/python/libraries/packing.py index 70be2f7..b30e4c4 100644 --- a/python/libraries/packing.py +++ b/python/libraries/packing.py @@ -22,12 +22,36 @@ def pack(polycube: np.ndarray) -> bytes: return data def packShape(shape): + """ + Converts the shape of a 3D numpy array into a single bytes object in an identical way as what happens in pack() + + Parameters: + shape (tuple of 3 int): the shape of a 3D numpy array reprsenting a polycube + + Returns: + (bytes): a bytes representation of the shape + + """ data = shape[0].to_bytes(1, 'little') \ + shape[1].to_bytes(1, 'little') \ + shape[2].to_bytes(1, 'little') return data def pack_fast(polycube, packedShape): + """ + Converts a 3D ndarray into a single bytes object that unique identifies + the polycube, is hashable, comparable, and allows to reconstruct the + original polycube ndarray. + + Parameters: + polycube (np.array): 3D Numpy byte array where 1 values indicate polycube positions, + and 0 values indicate empty space. Must be of type np.int8. + packedShape: the bytes representation of the shape + + Returns: + cube_id (bytes): a bytes representation of the polycube + + """ return packedShape+np.packbits(polycube, bitorder='little').tobytes() def unpack(cube_id: bytes) -> np.ndarray: diff --git a/python/libraries/resizing.py b/python/libraries/resizing.py index ba8a9af..960bb6f 100644 --- a/python/libraries/resizing.py +++ b/python/libraries/resizing.py @@ -67,14 +67,12 @@ def expand_cube(cube: np.ndarray) -> Generator[np.ndarray, None, None]: new_cube = cube.copy() new_cube[exp[0][i], exp[1][i], exp[2][i]] = 1 yield new_cube[bounds[0][i]:bounds[1][i],bounds[2][i]:bounds[3][i],bounds[4][i]:bounds[5][i]] - - # vec=np.arange(len(exp[0])) - # new_cube = np.tile(cube,(len(vec),1,1,1)) - # new_cube[vec,exp[0],exp[1],exp[2]]=1 - # return new_cube - + -def test_packing(): +def test_expand(): + """ + Function to test the performance of the expand_cube() function + """ from time import perf_counter n=1000 @@ -84,8 +82,7 @@ def test_packing(): for cube in polycubes: res=list(expand_cube(cube)) # res=list(expand_cube_fast(cube)) - # res=expand_cube(cube) print(perf_counter()-now) if __name__ == "__main__": - test_packing() \ No newline at end of file + test_expand() \ No newline at end of file diff --git a/python/libraries/rotation.py b/python/libraries/rotation.py index a37a990..02e71b2 100644 --- a/python/libraries/rotation.py +++ b/python/libraries/rotation.py @@ -37,13 +37,31 @@ def all_rotations(polycube: np.ndarray) -> Generator[np.ndarray, None, None]: yield from single_axis_rotation(np.rot90(polycube, axes=(0, 1)), (0, 2)) yield from single_axis_rotation(np.rot90(polycube, -1, axes=(0, 1)), (0, 2)) -RotationIndexes={} def get_canon_shape(polycube): + """ + Get the canonical shape of the polycube, ordered in descending order + """ return tuple(sorted(polycube.shape,reverse=True)) +RotationIndexes=set() def all_rotations_fast(polycube: np.ndarray) -> Generator[np.ndarray, None, None]: + """ + Calculates all rotations of a polycube. + + Adapted from https://stackoverflow.com/questions/33190042/how-to-calculate-all-24-rotations-of-3d-array. + This function relies on a global dictionnary that holds the rotations as index permutations. + This is faster than using yield because numpy seems to optimize the index computation. + It also relies on an canonical shape which reduces the number of possible rotations. + + Parameters: + polycube (np.array): 3D Numpy byte array where 1 values indicate polycube positions + + Returns: + (np.array): all the rotations of the cube in a 4D array acting as a list of 3D cubes. + + """ orderedShape = get_canon_shape(polycube) if not polycube.shape in RotationIndexes: n1,n2,n3 = polycube.shape From be0b0db196960ba2bd11a45b81db71a3c3aa4dc1 Mon Sep 17 00:00:00 2001 From: Wagyx Date: Thu, 24 Aug 2023 22:22:53 +0200 Subject: [PATCH 11/11] fix --- python/libraries/rotation.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/python/libraries/rotation.py b/python/libraries/rotation.py index 02e71b2..e8f6235 100644 --- a/python/libraries/rotation.py +++ b/python/libraries/rotation.py @@ -1,12 +1,14 @@ import numpy as np from typing import Generator + def single_axis_rotation(polycube, axes): """Yield four rotations of the given 3d array in the plane spanned by the given axes. For example, a rotation in axes (0,1) is a rotation around axis 2""" for i in range(4): yield np.rot90(polycube, i, axes) + def all_rotations(polycube: np.ndarray) -> Generator[np.ndarray, None, None]: """ Calculates all rotations of a polycube. @@ -42,10 +44,10 @@ def get_canon_shape(polycube): """ Get the canonical shape of the polycube, ordered in descending order """ - return tuple(sorted(polycube.shape,reverse=True)) + return tuple(sorted(polycube.shape, reverse=True)) -RotationIndexes=set() +RotationIndexes = dict() def all_rotations_fast(polycube: np.ndarray) -> Generator[np.ndarray, None, None]: """ Calculates all rotations of a polycube. @@ -64,7 +66,7 @@ def all_rotations_fast(polycube: np.ndarray) -> Generator[np.ndarray, None, None """ orderedShape = get_canon_shape(polycube) if not polycube.shape in RotationIndexes: - n1,n2,n3 = polycube.shape + n1, n2, n3 = polycube.shape vec = np.arange(n1*n2*n3).reshape(polycube.shape) uniqueRotations = set() rotations = list() @@ -97,4 +99,4 @@ def func(el): func(el) RotationIndexes[polycube.shape] = np.stack(rotations, axis=0) ind = RotationIndexes[polycube.shape] - return polycube.ravel()[ind].reshape((len(ind),)+orderedShape) \ No newline at end of file + return polycube.ravel()[ind].reshape((len(ind),)+orderedShape)