Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 23 additions & 19 deletions cubes.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,54 +42,58 @@ def generate_polycubes(n: int, use_cache: bool = False) -> list[np.ndarray]:
else:
pollycubes = generate_polycubes(n-1, use_cache)

hashes = set()
known_ids = set()
done = 0
print(f"\nHashing polycubes n={n}")
for base_cube in pollycubes:
for new_cube in expand_cube(base_cube):
cube_hash = get_canoincal_packing(new_cube, hashes)
hashes.add(cube_hash)
cube_id = get_canonical_packing(new_cube, known_ids)
known_ids.add(cube_id)
log_if_needed(done, len(pollycubes))
done += 1
log_if_needed(done, len(pollycubes))

print(f"\nGenerating polycubes from hash n={n}")
results = []
done = 0
for cube_hash in hashes:
results.append(unpack(cube_hash))
log_if_needed(done, len(hashes))
for cube_id in known_ids:
results.append(unpack(cube_id))
log_if_needed(done, len(known_ids))
done += 1
log_if_needed(done, len(hashes))
log_if_needed(done, len(known_ids))

if (use_cache and not cache_exists(n)):
save_cache(n, results)

return results


def get_canoincal_packing(polycube: np.ndarray, known_hashes: set[int]) -> int:
def get_canonical_packing(polycube: np.ndarray,
known_ids: set[bytes]) -> bytes:
"""
Determines if a polycube has already been seen.

Considers all possible rotations of a cube against the existing cubes stored in memory.
Returns True if the cube exists, or False if it is new.
Considers all possible rotations of a polycube against the existing
ones stored in memory. Returns the id if it's found in the set,
or the maximum id of all rotations if the polycube is new.

Parameters:
polycube (np.array): 3D Numpy byte array where 1 values indicate polycube positions
polycube (np.array): 3D Numpy byte array where 1 values indicate
cube positions. Must be of type np.int8
known_ids (set[bytes]): A set of all known polycube ids

Returns:
hash: the hash for this cube
cube_id (bytes): the id for this cube

"""
max_hash = 0
max_id = b'\x00'
for cube_rotation in all_rotations(polycube):
this_hash = pack(cube_rotation)
if(this_hash in known_hashes):
return this_hash
if (this_hash > max_hash):
max_hash = this_hash
return max_hash
this_id = pack(cube_rotation)
if (this_id in known_ids):
return this_id
if (this_id > max_id):
max_id = this_id
return max_id


if __name__ == "__main__":
Expand Down
55 changes: 30 additions & 25 deletions libraries/packing.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,20 +2,22 @@
import math


def pack(polycube: np.ndarray) -> int:
def pack(polycube: np.ndarray) -> bytes:
"""
Converts a 3D ndarray into a single unsigned integer for quick hashing and efficient storage

Converts a {0,1} nd array into a single unique large integer
Converts a 3D ndarray into a single bytes object that unique identifies
the polycube, is hashable, comparable, and allows to reconstruct the
original polycube ndarray.

Parameters:
polycube (np.array): 3D Numpy byte array where 1 values indicate polycube positions
polycube (np.array): 3D Numpy byte array where 1 values indicate polycube positions,
and 0 values indicate empty space. Must be of type np.int8.

Returns:
int: a unique integer hash
cube_id (bytes): a bytes representation of the polycube

"""

# # Previous implementation:
# pack_cube = np.packbits(polycube.flatten(), bitorder='big')
# cube_hash = 0
# for index in polycube.shape:
Expand All @@ -24,31 +26,34 @@ def pack(polycube: np.ndarray) -> int:
# cube_hash = (cube_hash << 8) + int(part)
# return cube_hash

data = polycube.tobytes() + polycube.shape[0].to_bytes(1, 'big') + polycube.shape[1].to_bytes(1, 'big') + polycube.shape[2].to_bytes(1, 'big')
return int.from_bytes(data, 'big')
# # dtype should be np.int8: (commented out for efficiency)
# if polycube.dtype != np.int8:
# raise TypeError("Polycube must be of type np.int8")

# pack cube
data = polycube.tobytes() + polycube.shape[0].to_bytes(1, 'big') \
+ polycube.shape[1].to_bytes(1, 'big') \
+ polycube.shape[2].to_bytes(1, 'big')
return data

def unpack(cube_hash: int) -> np.ndarray:
"""
Converts a single integer back into a 3D ndarray

def unpack(cube_id: bytes) -> np.ndarray:
"""
Converts a bytes object back into a 3D ndarray

Parameters:
cube_hash (int): a unique integer hash
cube_id (bytes): a unique bytes object

Returns:
np.array: 3D Numpy byte array where 1 values indicate polycube positions

polycube (np.array): 3D Numpy byte array where 1 values indicate
cube positions

"""
# Extract shape information
shape = (cube_id[-3], cube_id[-2], cube_id[-1])

# Create ndarray from byte data
polycube = np.frombuffer(cube_id[:-3], dtype=np.int8)
polycube = polycube.reshape(shape)
return polycube

length = math.ceil(math.log2(cube_hash))
parts = cube_hash.to_bytes(length, byteorder='big')
shape = (
parts[-3],
parts[-2],
parts[-1],
)
size = shape[0] * shape[1] * shape[2]
raw = np.frombuffer(parts[:-3], dtype=np.uint8)
final = raw[(len(raw) - size):len(raw)].reshape(shape)
return final