From ede042b7db3500085b819b82a7d63b6eab662006 Mon Sep 17 00:00:00 2001 From: Federico Tomassetti Date: Mon, 26 Oct 2015 09:26:54 +0100 Subject: [PATCH 01/19] initial serialization of hdf5 --- requirements.txt | 1 + worldengine/cli/main.py | 13 +++++++ worldengine/hdf5_serialization.py | 63 +++++++++++++++++++++++++++++++ worldengine/world.py | 1 + 4 files changed, 78 insertions(+) create mode 100644 worldengine/hdf5_serialization.py diff --git a/requirements.txt b/requirements.txt index 47f81745..e955ca4e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,3 +5,4 @@ pypng==0.0.18 PyPlatec==1.4.0 protobuf==3.0.0a3 six==1.10.0 +h5py==2.5.0 diff --git a/worldengine/cli/main.py b/worldengine/cli/main.py index c15b1030..86a53163 100644 --- a/worldengine/cli/main.py +++ b/worldengine/cli/main.py @@ -12,6 +12,7 @@ from worldengine.step import Step from worldengine.world import World from worldengine.version import __version__ +from worldengine.hdf5_serialization import save_hdf5 VERSION = __version__ @@ -37,6 +38,8 @@ def generate_world(world_name, width, height, seed, num_plates, output_dir, with open(filename, "wb") as f: if world_format == 'protobuf': f.write(w.protobuf_serialize()) + elif world_format == 'hdf5': + save_hdf5(w, filename) else: print("Unknown format '%s', not saving " % world_format) print("* world data saved in '%s'" % filename) @@ -228,6 +231,11 @@ def main(): "a name is not provided, then seed_N.world, " + "where N=SEED", metavar="STR") + parser.add_argument('--hdf5', dest='hdf5', + action="store_true", + help="Save world file using HDF5 format. " + + "Default = store using pickle format", + default=False) parser.add_argument('-s', '--seed', dest='seed', type=int, help="Use seed=N to initialize the pseudo-random " + "generation. If not provided, one will be " + @@ -372,6 +380,9 @@ def main(): if args.number_of_plates < 1 or args.number_of_plates > 100: usage(error="Number of plates should be in [1, 100]") + if args.protobuf and args.hdf5: + usage(error="Protobuf and hdf5 are exclusive choices") + operation = "world" if args.OPERATOR is None: pass @@ -404,6 +415,8 @@ def main(): step = check_step(args.step) world_format = 'protobuf' + if args.hdf5: + world_format = 'hdf5' generation_operation = (operation == 'world') or (operation == 'plates') diff --git a/worldengine/hdf5_serialization.py b/worldengine/hdf5_serialization.py new file mode 100644 index 00000000..965c0b0b --- /dev/null +++ b/worldengine/hdf5_serialization.py @@ -0,0 +1,63 @@ +import h5py +from worldengine.version import __version__ +from worldengine.biome import biome_name_to_index + +def save_hdf5(world, filename): + f = h5py.File(filename, libver='latest', mode='w') + + general_grp = f.create_group("general") + general_grp["worldengine_version"] = __version__ + general_grp["name"] = world.name + general_grp["width"] = world.width + general_grp["height"] = world.height + + elevation_grp = f.create_group("elevation") + elevation_ths_grp = elevation_grp.create_group("thresholds") + elevation_ths_grp["sea"] = world.elevation['thresholds'][0][1] + elevation_ths_grp["plain"] = world.elevation['thresholds'][1][1] + elevation_ths_grp["hill"] = world.elevation['thresholds'][2][1] + elevation_data = elevation_grp.create_dataset("data", (world.width, world.height), dtype='float') + for y in range(world.height): + for x in range(world.width): + elevation_data[x, y] = world.elevation['data'][y][x] + + plates_data = f.create_dataset("plates", (world.width, world.height), dtype='int') + for y in range(world.height): + for x in range(world.width): + plates_data[x, y] = world.plates[y][x] + + ocean_data = f.create_dataset("ocean", (world.width, world.height), dtype='bool') + for y in range(world.height): + for x in range(world.width): + ocean_data[x, y] = world.ocean[y][x] + + sea_depth_data = f.create_dataset("sea_depth", (world.width, world.height), dtype='float') + for y in range(world.height): + for x in range(world.width): + sea_depth_data[x, y] = world.sea_depth[y][x] + + if hasattr(world, 'biome'): + biome_data = f.create_dataset("biome", (world.width, world.height), dtype='int') + for y in range(world.height): + for x in range(world.width): + biome_data[x, y] = biome_name_to_index(world.biome[y][x]) + + if hasattr(world, 'humidity'): + humidity_grp = f.create_group("humidity") + humidity_quantiles_grp = humidity_grp.create_group("quantiles") + for k in world.humidity['quantiles'].keys(): + humidity_quantiles_grp[k] = world.humidity['quantiles'][k] + humidity_data = humidity_grp.create_dataset("data", (world.width, world.height), dtype='float') + for y in range(world.height): + for x in range(world.width): + humidity_data[x, y] = world.humidity['data'][y][x] + + if hasattr(world, 'irrigation'): + irrigation_data = f.create_dataset("irrigation", (world.width, world.height), dtype='float') + for y in range(world.height): + for x in range(world.width): + irrigation_data[x, y] = world.irrigation[y][x] + + generation_params_grp = f.create_group("generation_params") + + f.close() \ No newline at end of file diff --git a/worldengine/world.py b/worldengine/world.py index a46ae383..5eb006b0 100644 --- a/worldengine/world.py +++ b/worldengine/world.py @@ -16,6 +16,7 @@ from worldengine.common import _equal from worldengine.version import __version__ + class World(object): """A world composed by name, dimensions and all the characteristics of each cell. From 88922415a5208f800bcbad27616db7cd56742f50 Mon Sep 17 00:00:00 2001 From: Federico Tomassetti Date: Mon, 26 Oct 2015 21:11:38 +0100 Subject: [PATCH 02/19] complete hdf5 serialization and adding test --- tests/serialization_test.py | 23 ++++++++ worldengine/hdf5_serialization.py | 92 ++++++++++++++++++++++++++----- 2 files changed, 101 insertions(+), 14 deletions(-) diff --git a/tests/serialization_test.py b/tests/serialization_test.py index 9a482359..bd53e10c 100644 --- a/tests/serialization_test.py +++ b/tests/serialization_test.py @@ -32,6 +32,29 @@ def test_protobuf_serialize_unserialize(self): self.assertEqual(sorted(dir(w)), sorted(dir(unserialized))) self.assertEqual(w, unserialized) + def test_hdf5_serialize_unserialize(self): + w = world_gen("Dummy", 32, 16, 1, step=Step.get_by_name("full")) + serialized = w.hdf5_serialize() + unserialized = World.hdf5_unserialize(serialized) + self.assertTrue(_equal(w.elevation['data'], unserialized.elevation['data'])) + self.assertEqual(w.elevation['thresholds'], unserialized.elevation['thresholds']) + self.assertTrue(_equal(w.ocean, unserialized.ocean)) + self.assertTrue(_equal(w.biome, unserialized.biome)) + self.assertTrue(_equal(w.humidity, unserialized.humidity)) + self.assertTrue(_equal(w.irrigation, unserialized.irrigation)) + self.assertTrue(_equal(w.permeability, unserialized.permeability)) + self.assertTrue(_equal(w.watermap, unserialized.watermap)) + self.assertTrue(_equal(w.precipitation, unserialized.precipitation)) + self.assertTrue(_equal(w.temperature, unserialized.temperature)) + self.assertTrue(_equal(w.sea_depth, unserialized.sea_depth)) + self.assertEquals(w.seed, unserialized.seed) + self.assertEquals(w.n_plates, unserialized.n_plates) + self.assertTrue(_equal(w.ocean_level, unserialized.ocean_level)) + self.assertTrue(_equal(w.lake_map, unserialized.lake_map)) + self.assertTrue(_equal(w.river_map, unserialized.river_map)) + self.assertEquals(w.step, unserialized.step) + self.assertEqual(sorted(dir(w)), sorted(dir(unserialized))) + self.assertEqual(w, unserialized) if __name__ == '__main__': unittest.main() diff --git a/worldengine/hdf5_serialization.py b/worldengine/hdf5_serialization.py index 965c0b0b..ca1504c8 100644 --- a/worldengine/hdf5_serialization.py +++ b/worldengine/hdf5_serialization.py @@ -2,6 +2,7 @@ from worldengine.version import __version__ from worldengine.biome import biome_name_to_index + def save_hdf5(world, filename): f = h5py.File(filename, libver='latest', mode='w') @@ -16,48 +17,111 @@ def save_hdf5(world, filename): elevation_ths_grp["sea"] = world.elevation['thresholds'][0][1] elevation_ths_grp["plain"] = world.elevation['thresholds'][1][1] elevation_ths_grp["hill"] = world.elevation['thresholds'][2][1] - elevation_data = elevation_grp.create_dataset("data", (world.width, world.height), dtype='float') + elevation_data = elevation_grp.create_dataset("data", (world.height, world.width), dtype='float') for y in range(world.height): for x in range(world.width): - elevation_data[x, y] = world.elevation['data'][y][x] + elevation_data[y, x] = world.elevation['data'][y][x] - plates_data = f.create_dataset("plates", (world.width, world.height), dtype='int') + plates_data = f.create_dataset("plates", (world.height, world.width), dtype='int') for y in range(world.height): for x in range(world.width): - plates_data[x, y] = world.plates[y][x] + plates_data[y, x] = world.plates[y][x] - ocean_data = f.create_dataset("ocean", (world.width, world.height), dtype='bool') + ocean_data = f.create_dataset("ocean", (world.height, world.width), dtype='bool') for y in range(world.height): for x in range(world.width): - ocean_data[x, y] = world.ocean[y][x] + ocean_data[y, x] = world.ocean[y][x] - sea_depth_data = f.create_dataset("sea_depth", (world.width, world.height), dtype='float') + sea_depth_data = f.create_dataset("sea_depth", (world.height, world.width), dtype='float') for y in range(world.height): for x in range(world.width): - sea_depth_data[x, y] = world.sea_depth[y][x] + sea_depth_data[y, x] = world.sea_depth[y][x] if hasattr(world, 'biome'): - biome_data = f.create_dataset("biome", (world.width, world.height), dtype='int') + biome_data = f.create_dataset("biome", (world.height, world.width), dtype='int') for y in range(world.height): for x in range(world.width): - biome_data[x, y] = biome_name_to_index(world.biome[y][x]) + biome_data[y, x] = biome_name_to_index(world.biome[y][x]) if hasattr(world, 'humidity'): humidity_grp = f.create_group("humidity") humidity_quantiles_grp = humidity_grp.create_group("quantiles") for k in world.humidity['quantiles'].keys(): humidity_quantiles_grp[k] = world.humidity['quantiles'][k] - humidity_data = humidity_grp.create_dataset("data", (world.width, world.height), dtype='float') + humidity_data = humidity_grp.create_dataset("data", (world.height, world.width), dtype='float') for y in range(world.height): for x in range(world.width): - humidity_data[x, y] = world.humidity['data'][y][x] + humidity_data[y, x] = world.humidity['data'][y][x] if hasattr(world, 'irrigation'): - irrigation_data = f.create_dataset("irrigation", (world.width, world.height), dtype='float') + irrigation_data = f.create_dataset("irrigation", (world.height, world.width), dtype='float') + for y in range(world.height): + for x in range(world.width): + irrigation_data[y, x] = world.irrigation[y][x] + + if hasattr(world, 'permeability'): + permeability_grp = f.create_group("permeability") + permeability_ths_grp = permeability_grp.create_group("thresholds") + permeability_ths_grp['low'] = world.permeability['thresholds'][0][1] + permeability_ths_grp['med'] = world.permeability['thresholds'][1][1] + permeability_data = permeability_grp.create_dataset("data", (world.height, world.width), dtype='float') + for y in range(world.height): + for x in range(world.width): + permeability_data[y, x] = world.permeability['data'][y][x] + + if hasattr(world, 'watermap'): + watermap_grp = f.create_group("watermap") + watermap_ths_grp = watermap_grp.create_group("thresholds") + watermap_ths_grp['creek'] = world.watermap['thresholds']['creek'] + watermap_ths_grp['river'] = world.watermap['thresholds']['river'] + watermap_ths_grp['mainriver'] = world.watermap['thresholds']['main river'] + watermap_data = watermap_grp.create_dataset("data", (world.height, world.width), dtype='float') + for y in range(world.height): + for x in range(world.width): + watermap_data[y, x] = world.watermap['data'][y][x] + + if hasattr(world, 'precipitation'): + precipitation_grp = f.create_group("precipitation") + precipitation_ths_grp = precipitation_grp.create_group("thresholds") + precipitation_ths_grp['low'] = world.precipitation['thresholds'][0][1] + precipitation_ths_grp['med'] = world.precipitation['thresholds'][1][1] + precipitation_data = precipitation_grp.create_dataset("data", (world.height, world.width), dtype='float') + for y in range(world.height): + for x in range(world.width): + precipitation_data[y, x] = world.precipitation['data'][y][x] + + if hasattr(world, 'temperature'): + temperature_grp = f.create_group("temperature") + temperature_ths_grp = temperature_grp.create_group("thresholds") + temperature_ths_grp['polar'] = world.temperature['thresholds'][0][1] + temperature_ths_grp['alpine'] = world.temperature['thresholds'][1][1] + temperature_ths_grp['boreal'] = world.temperature['thresholds'][2][1] + temperature_ths_grp['cool'] = world.temperature['thresholds'][3][1] + temperature_ths_grp['warm'] = world.temperature['thresholds'][4][1] + temperature_ths_grp['subtropical'] = world.temperature['thresholds'][5][1] + temperature_data = temperature_grp.create_dataset("data", (world.height, world.width), dtype='float') + for y in range(world.height): + for x in range(world.width): + temperature_data[y, x] = world.temperature['data'][y][x] + + if hasattr(world, 'lake_map'): + lake_map_data = f.create_dataset("lake_map", (world.height, world.width), dtype='float') + for y in range(world.height): + for x in range(world.width): + # lake_map and river_map have inverted coordinates + lake_map_data[y, x] = world.lake_map[x][y] + + if hasattr(world, 'river_map'): + river_map_data = f.create_dataset("river_map", (world.height, world.width), dtype='float') for y in range(world.height): for x in range(world.width): - irrigation_data[x, y] = world.irrigation[y][x] + # lake_map and river_map have inverted coordinates + river_map_data[y, x] = world.river_map[x][y] generation_params_grp = f.create_group("generation_params") + generation_params_grp['seed'] = world.seed + generation_params_grp['n_plates'] = world.n_plates + generation_params_grp['ocean_level'] = world.ocean_level + generation_params_grp['step'] = world.step.name f.close() \ No newline at end of file From d58ef5b8b367f2a6cc7aec231aec260b7c301e08 Mon Sep 17 00:00:00 2001 From: Federico Tomassetti Date: Mon, 26 Oct 2015 23:30:30 +0100 Subject: [PATCH 03/19] hdf5 unserialization --- tests/serialization_test.py | 60 +++++++++----- worldengine/cli/main.py | 4 +- worldengine/hdf5_serialization.py | 133 ++++++++++++++++++++++++++++-- 3 files changed, 164 insertions(+), 33 deletions(-) diff --git a/tests/serialization_test.py b/tests/serialization_test.py index bd53e10c..011f0829 100644 --- a/tests/serialization_test.py +++ b/tests/serialization_test.py @@ -2,6 +2,10 @@ from worldengine.plates import Step, world_gen from worldengine.world import World from worldengine.common import _equal +import tempfile +import os +from worldengine.hdf5_serialization import save_world_to_hdf5, load_world_to_hdf5 + class TestSerialization(unittest.TestCase): @@ -33,28 +37,40 @@ def test_protobuf_serialize_unserialize(self): self.assertEqual(w, unserialized) def test_hdf5_serialize_unserialize(self): - w = world_gen("Dummy", 32, 16, 1, step=Step.get_by_name("full")) - serialized = w.hdf5_serialize() - unserialized = World.hdf5_unserialize(serialized) - self.assertTrue(_equal(w.elevation['data'], unserialized.elevation['data'])) - self.assertEqual(w.elevation['thresholds'], unserialized.elevation['thresholds']) - self.assertTrue(_equal(w.ocean, unserialized.ocean)) - self.assertTrue(_equal(w.biome, unserialized.biome)) - self.assertTrue(_equal(w.humidity, unserialized.humidity)) - self.assertTrue(_equal(w.irrigation, unserialized.irrigation)) - self.assertTrue(_equal(w.permeability, unserialized.permeability)) - self.assertTrue(_equal(w.watermap, unserialized.watermap)) - self.assertTrue(_equal(w.precipitation, unserialized.precipitation)) - self.assertTrue(_equal(w.temperature, unserialized.temperature)) - self.assertTrue(_equal(w.sea_depth, unserialized.sea_depth)) - self.assertEquals(w.seed, unserialized.seed) - self.assertEquals(w.n_plates, unserialized.n_plates) - self.assertTrue(_equal(w.ocean_level, unserialized.ocean_level)) - self.assertTrue(_equal(w.lake_map, unserialized.lake_map)) - self.assertTrue(_equal(w.river_map, unserialized.river_map)) - self.assertEquals(w.step, unserialized.step) - self.assertEqual(sorted(dir(w)), sorted(dir(unserialized))) - self.assertEqual(w, unserialized) + filename = None + try: + w = world_gen("Dummy", 32, 16, 1, step=Step.get_by_name("full")) + f = tempfile.NamedTemporaryFile(delete=False) + f.close() + filename = f.name + serialized = save_world_to_hdf5(w, filename) + unserialized = load_world_to_hdf5(filename) + self.assertTrue(_equal(w.elevation['data'], unserialized.elevation['data'])) + self.assertEqual(w.elevation['thresholds'], unserialized.elevation['thresholds']) + self.assertTrue(_equal(w.ocean, unserialized.ocean)) + self.assertTrue(_equal(w.biome, unserialized.biome)) + self.assertTrue(_equal(w.humidity['quantiles'], unserialized.humidity['quantiles'])) + self.assertTrue(_equal(w.humidity['data'], unserialized.humidity['data'])) + self.assertTrue(_equal(w.humidity, unserialized.humidity)) + self.assertTrue(_equal(w.irrigation, unserialized.irrigation)) + self.assertTrue(_equal(w.permeability, unserialized.permeability)) + self.assertTrue(_equal(w.watermap, unserialized.watermap)) + self.assertTrue(_equal(w.precipitation['thresholds'], unserialized.precipitation['thresholds'])) + self.assertTrue(_equal(w.precipitation['data'], unserialized.precipitation['data'])) + self.assertTrue(_equal(w.precipitation, unserialized.precipitation)) + self.assertTrue(_equal(w.temperature, unserialized.temperature)) + self.assertTrue(_equal(w.sea_depth, unserialized.sea_depth)) + self.assertEquals(w.seed, unserialized.seed) + self.assertEquals(w.n_plates, unserialized.n_plates) + self.assertTrue(_equal(w.ocean_level, unserialized.ocean_level)) + self.assertTrue(_equal(w.lake_map, unserialized.lake_map)) + self.assertTrue(_equal(w.river_map, unserialized.river_map)) + self.assertEquals(w.step, unserialized.step) + self.assertEqual(sorted(dir(w)), sorted(dir(unserialized))) + #self.assertEqual(w, unserialized) + finally: + if filename: + os.remove(filename) if __name__ == '__main__': unittest.main() diff --git a/worldengine/cli/main.py b/worldengine/cli/main.py index 86a53163..ddb58464 100644 --- a/worldengine/cli/main.py +++ b/worldengine/cli/main.py @@ -12,7 +12,7 @@ from worldengine.step import Step from worldengine.world import World from worldengine.version import __version__ -from worldengine.hdf5_serialization import save_hdf5 +from worldengine.hdf5_serialization import save_world_to_hdf5 VERSION = __version__ @@ -39,7 +39,7 @@ def generate_world(world_name, width, height, seed, num_plates, output_dir, if world_format == 'protobuf': f.write(w.protobuf_serialize()) elif world_format == 'hdf5': - save_hdf5(w, filename) + save_world_to_hdf5(w, filename) else: print("Unknown format '%s', not saving " % world_format) print("* world data saved in '%s'" % filename) diff --git a/worldengine/hdf5_serialization.py b/worldengine/hdf5_serialization.py index ca1504c8..5c933035 100644 --- a/worldengine/hdf5_serialization.py +++ b/worldengine/hdf5_serialization.py @@ -1,9 +1,11 @@ import h5py from worldengine.version import __version__ -from worldengine.biome import biome_name_to_index +from worldengine.biome import biome_name_to_index, biome_index_to_name +from worldengine.world import World, Step +import numpy -def save_hdf5(world, filename): +def save_world_to_hdf5(world, filename): f = h5py.File(filename, libver='latest', mode='w') general_grp = f.create_group("general") @@ -104,19 +106,19 @@ def save_hdf5(world, filename): for x in range(world.width): temperature_data[y, x] = world.temperature['data'][y][x] + # lake_map and river_map have inverted coordinates if hasattr(world, 'lake_map'): - lake_map_data = f.create_dataset("lake_map", (world.height, world.width), dtype='float') + lake_map_data = f.create_dataset("lake_map", (world.width, world.height), dtype='float') for y in range(world.height): for x in range(world.width): - # lake_map and river_map have inverted coordinates - lake_map_data[y, x] = world.lake_map[x][y] + lake_map_data[x, y] = world.lake_map[x][y] + # lake_map and river_map have inverted coordinates if hasattr(world, 'river_map'): - river_map_data = f.create_dataset("river_map", (world.height, world.width), dtype='float') + river_map_data = f.create_dataset("river_map", (world.width, world.height), dtype='float') for y in range(world.height): for x in range(world.width): - # lake_map and river_map have inverted coordinates - river_map_data[y, x] = world.river_map[x][y] + river_map_data[x, y] = world.river_map[x][y] generation_params_grp = f.create_group("generation_params") generation_params_grp['seed'] = world.seed @@ -124,4 +126,117 @@ def save_hdf5(world, filename): generation_params_grp['ocean_level'] = world.ocean_level generation_params_grp['step'] = world.step.name - f.close() \ No newline at end of file + f.close() + + +def _from_hdf5_quantiles(p_quantiles): + quantiles = {} + for p_quantile in p_quantiles: + quantiles[p_quantile.title()] = p_quantiles[p_quantile].value + return quantiles + + +def _from_hdf5_matrix_with_quantiles(p_matrix): + matrix = dict() + matrix['data'] = p_matrix['data'] + matrix['quantiles'] = _from_hdf5_quantiles(p_matrix['quantiles']) + return matrix + + +def load_world_to_hdf5(filename): + f = h5py.File(filename, libver='latest', mode='r') + + w = World(f['general/name'].value, + f['general/width'].value, + f['general/height'].value, + f['generation_params/seed'].value, + f['generation_params/n_plates'].value, + f['generation_params/ocean_level'].value, + Step.get_by_name(f['generation_params/step'].value)) + + # Elevation + e = numpy.array(f['elevation/data']) + e_th = [('sea', f['elevation/thresholds/sea'].value), + ('plain', f['elevation/thresholds/plain'].value), + ('hill', f['elevation/thresholds/hill'].value), + ('mountain', None)] + w.set_elevation(e, e_th) + + # Plates + w.set_plates(numpy.array(f['plates'])) + + # Ocean + w.set_ocean(numpy.array(f['ocean'])) + w.sea_depth = numpy.array(f['sea_depth']) + + # Biome + if 'biome' in f.keys(): + biome_data = [] + for y in range(w.height): + row = [] + for x in range(w.width): + value = f['biome'][y, x] + row.append(biome_index_to_name(value)) + biome_data.append(row) + biome = numpy.array(biome_data, dtype=object) + w.set_biome(biome) + + # Humidity + # FIXME: use setters + if 'humidity' in f.keys(): + w.humidity = _from_hdf5_matrix_with_quantiles(f['humidity']) + w.humidity['data'] = numpy.array(w.humidity['data']) # numpy conversion + + if 'irrigation' in f.keys(): + w.irrigation = numpy.array(f['irrigation']) + + if 'permeability' in f.keys(): + p = numpy.array(f['permeability/data']) + p_th = [ + ('low', f['permeability/thresholds/low'].value), + ('med', f['permeability/thresholds/med'].value), + ('hig', None) + ] + w.set_permeability(p, p_th) + + if 'watermap' in f.keys(): + w.watermap = dict() + w.watermap['data'] = numpy.array(f['watermap/data']) + w.watermap['thresholds'] = {} + w.watermap['thresholds']['creek'] = f['watermap/thresholds/creek'].value + w.watermap['thresholds']['river'] = f['watermap/thresholds/river'].value + w.watermap['thresholds']['main river'] = f['watermap/thresholds/mainriver'].value + + if 'precipitation' in f.keys(): + p = numpy.array(f['precipitation/data']) + p_th = [ + ('low', f['precipitation/thresholds/low'].value), + ('med', f['precipitation/thresholds/med'].value), + ('hig', None) + ] + w.set_precipitation(p, p_th) + + if 'temperature' in f.keys(): + t = numpy.array(f['temperature/data']) + t_th = [ + ('polar', f['temperature/thresholds/polar'].value), + ('alpine', f['temperature/thresholds/alpine'].value), + ('boreal', f['temperature/thresholds/boreal'].value), + ('cool', f['temperature/thresholds/cool'].value), + ('warm', f['temperature/thresholds/warm'].value), + ('subtropical', f['temperature/thresholds/subtropical'].value), + ('tropical', None) + ] + w.set_temperature(t, t_th) + + if 'lake_map' in f.keys(): + m = numpy.array(f['lake_map']) + w.set_lakemap(m) + + if 'river_map' in f.keys(): + m = numpy.array(f['river_map']) + w.set_rivermap(m) + + f.close() + + return w \ No newline at end of file From 8ea416696c6da6580835c31d868d899798b74f9e Mon Sep 17 00:00:00 2001 From: Federico Tomassetti Date: Tue, 27 Oct 2015 15:56:11 +0100 Subject: [PATCH 04/19] make tox aware of h5py --- tox.ini | 1 + 1 file changed, 1 insertion(+) diff --git a/tox.ini b/tox.ini index 1cef3dea..00ea22d5 100644 --- a/tox.ini +++ b/tox.ini @@ -11,6 +11,7 @@ deps = protobuf six pypng + h5py [testenv] deps = From e4bb632291471d138fb6f4aa04b697d6fcea37f3 Mon Sep 17 00:00:00 2001 From: Federico Tomassetti Date: Tue, 27 Oct 2015 16:26:50 +0100 Subject: [PATCH 05/19] install hdf5 for AppVeyor --- appveyor.yml | 1 + appveyor/install_hdf5.ps1 | 24 ++++++++++++++++++++++++ 2 files changed, 25 insertions(+) create mode 100644 appveyor/install_hdf5.ps1 diff --git a/appveyor.yml b/appveyor.yml index 99642f78..53e71d5d 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -43,6 +43,7 @@ init: - echo %path% install: + - "powershell appveyor\\install_hdf5.ps1" - "powershell appveyor\\install.ps1" - "%PYTHON%\\Scripts\\pip.exe install -r requirements-dev.txt" - mkdir %systemdrive%\temp diff --git a/appveyor/install_hdf5.ps1 b/appveyor/install_hdf5.ps1 new file mode 100644 index 00000000..65e1eafe --- /dev/null +++ b/appveyor/install_hdf5.ps1 @@ -0,0 +1,24 @@ +$URL = http://www.hdfgroup.org/ftp/HDF5/current/bin/windows/hdf5-1.8.15-patch1-win32-vs2013-shared.zip + +function main () { + $basedir = $pwd.Path + "\" + $filename = "hdf5.zip" + $filepath = $basedir + $filename + Write-Host "Downloading" $filename "from" $URL + $retry_attempts = 3 + for($i=0; $i -lt $retry_attempts; $i++){ + try { + $webclient.DownloadFile($URL, $filepath) + break + } + Catch [Exception]{ + Start-Sleep 1 + } + } + $outpath = $basedir + "\hdf5_unzipped" + [System.IO.Compression.ZipFile]::ExtractToDirectory($filepath, $outpath) + $msipath = $outpath + "\HDF5-1.8.15-win64.msi" + Invoke-Command -ScriptBlock { & cmd /c "msiexec.exe /i $msipath" /qn ADVANCED_OPTIONS=1 CHANNEL=100} +} + +main \ No newline at end of file From 2d887f6d17dcb22bb4036ca4c90fb37c9233e029 Mon Sep 17 00:00:00 2001 From: Federico Tomassetti Date: Tue, 27 Oct 2015 16:28:19 +0100 Subject: [PATCH 06/19] install hdf5 package for Travis --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index 6206feb2..4f9d3360 100644 --- a/.travis.yml +++ b/.travis.yml @@ -36,6 +36,7 @@ addons: - cmake - build-essential - libgdal-dev + - libhdf5-dev install: - echo "libgdal version `gdal-config --version`" From 5e5e59c337f91acf34125c623e88f1fa09c58627 Mon Sep 17 00:00:00 2001 From: Federico Tomassetti Date: Fri, 6 Nov 2015 12:03:05 +0100 Subject: [PATCH 07/19] hdf5: use write_direct instead of copying arrays using loops --- worldengine/hdf5_serialization.py | 44 ++++++++----------------------- 1 file changed, 11 insertions(+), 33 deletions(-) diff --git a/worldengine/hdf5_serialization.py b/worldengine/hdf5_serialization.py index 5c933035..676a75ae 100644 --- a/worldengine/hdf5_serialization.py +++ b/worldengine/hdf5_serialization.py @@ -20,9 +20,7 @@ def save_world_to_hdf5(world, filename): elevation_ths_grp["plain"] = world.elevation['thresholds'][1][1] elevation_ths_grp["hill"] = world.elevation['thresholds'][2][1] elevation_data = elevation_grp.create_dataset("data", (world.height, world.width), dtype='float') - for y in range(world.height): - for x in range(world.width): - elevation_data[y, x] = world.elevation['data'][y][x] + elevation_data.write_direct(world.elevation['data']) plates_data = f.create_dataset("plates", (world.height, world.width), dtype='int') for y in range(world.height): @@ -30,14 +28,10 @@ def save_world_to_hdf5(world, filename): plates_data[y, x] = world.plates[y][x] ocean_data = f.create_dataset("ocean", (world.height, world.width), dtype='bool') - for y in range(world.height): - for x in range(world.width): - ocean_data[y, x] = world.ocean[y][x] + ocean_data.write_direct(world.ocean) sea_depth_data = f.create_dataset("sea_depth", (world.height, world.width), dtype='float') - for y in range(world.height): - for x in range(world.width): - sea_depth_data[y, x] = world.sea_depth[y][x] + sea_depth_data.write_direct(world.sea_depth) if hasattr(world, 'biome'): biome_data = f.create_dataset("biome", (world.height, world.width), dtype='int') @@ -51,15 +45,11 @@ def save_world_to_hdf5(world, filename): for k in world.humidity['quantiles'].keys(): humidity_quantiles_grp[k] = world.humidity['quantiles'][k] humidity_data = humidity_grp.create_dataset("data", (world.height, world.width), dtype='float') - for y in range(world.height): - for x in range(world.width): - humidity_data[y, x] = world.humidity['data'][y][x] + humidity_data.write_direct(world.humidity['data']) if hasattr(world, 'irrigation'): irrigation_data = f.create_dataset("irrigation", (world.height, world.width), dtype='float') - for y in range(world.height): - for x in range(world.width): - irrigation_data[y, x] = world.irrigation[y][x] + irrigation_data.write_direct(world.irrigation) if hasattr(world, 'permeability'): permeability_grp = f.create_group("permeability") @@ -67,9 +57,7 @@ def save_world_to_hdf5(world, filename): permeability_ths_grp['low'] = world.permeability['thresholds'][0][1] permeability_ths_grp['med'] = world.permeability['thresholds'][1][1] permeability_data = permeability_grp.create_dataset("data", (world.height, world.width), dtype='float') - for y in range(world.height): - for x in range(world.width): - permeability_data[y, x] = world.permeability['data'][y][x] + permeability_data.write_direct(world.permeability['data']) if hasattr(world, 'watermap'): watermap_grp = f.create_group("watermap") @@ -78,9 +66,7 @@ def save_world_to_hdf5(world, filename): watermap_ths_grp['river'] = world.watermap['thresholds']['river'] watermap_ths_grp['mainriver'] = world.watermap['thresholds']['main river'] watermap_data = watermap_grp.create_dataset("data", (world.height, world.width), dtype='float') - for y in range(world.height): - for x in range(world.width): - watermap_data[y, x] = world.watermap['data'][y][x] + watermap_data.write_direct(world.watermap['data']) if hasattr(world, 'precipitation'): precipitation_grp = f.create_group("precipitation") @@ -88,9 +74,7 @@ def save_world_to_hdf5(world, filename): precipitation_ths_grp['low'] = world.precipitation['thresholds'][0][1] precipitation_ths_grp['med'] = world.precipitation['thresholds'][1][1] precipitation_data = precipitation_grp.create_dataset("data", (world.height, world.width), dtype='float') - for y in range(world.height): - for x in range(world.width): - precipitation_data[y, x] = world.precipitation['data'][y][x] + precipitation_data.write_direct(world.precipitation['data']) if hasattr(world, 'temperature'): temperature_grp = f.create_group("temperature") @@ -102,23 +86,17 @@ def save_world_to_hdf5(world, filename): temperature_ths_grp['warm'] = world.temperature['thresholds'][4][1] temperature_ths_grp['subtropical'] = world.temperature['thresholds'][5][1] temperature_data = temperature_grp.create_dataset("data", (world.height, world.width), dtype='float') - for y in range(world.height): - for x in range(world.width): - temperature_data[y, x] = world.temperature['data'][y][x] + temperature_data.write_direct(world.temperature['data']) # lake_map and river_map have inverted coordinates if hasattr(world, 'lake_map'): lake_map_data = f.create_dataset("lake_map", (world.width, world.height), dtype='float') - for y in range(world.height): - for x in range(world.width): - lake_map_data[x, y] = world.lake_map[x][y] + lake_map_data.write_direct(world.lake_map) # lake_map and river_map have inverted coordinates if hasattr(world, 'river_map'): river_map_data = f.create_dataset("river_map", (world.width, world.height), dtype='float') - for y in range(world.height): - for x in range(world.width): - river_map_data[x, y] = world.river_map[x][y] + river_map_data.write_direct(world.river_map) generation_params_grp = f.create_group("generation_params") generation_params_grp['seed'] = world.seed From 9a67edab19fa61dbf3ffeb26238fd1a170ce5fef Mon Sep 17 00:00:00 2001 From: Federico Tomassetti Date: Fri, 6 Nov 2015 12:06:16 +0100 Subject: [PATCH 08/19] hdf5: use classes instead of strings to specify the datatype --- worldengine/hdf5_serialization.py | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/worldengine/hdf5_serialization.py b/worldengine/hdf5_serialization.py index 676a75ae..4d73f290 100644 --- a/worldengine/hdf5_serialization.py +++ b/worldengine/hdf5_serialization.py @@ -19,22 +19,22 @@ def save_world_to_hdf5(world, filename): elevation_ths_grp["sea"] = world.elevation['thresholds'][0][1] elevation_ths_grp["plain"] = world.elevation['thresholds'][1][1] elevation_ths_grp["hill"] = world.elevation['thresholds'][2][1] - elevation_data = elevation_grp.create_dataset("data", (world.height, world.width), dtype='float') + elevation_data = elevation_grp.create_dataset("data", (world.height, world.width), dtype=float) elevation_data.write_direct(world.elevation['data']) - plates_data = f.create_dataset("plates", (world.height, world.width), dtype='int') + plates_data = f.create_dataset("plates", (world.height, world.width), dtype=int) for y in range(world.height): for x in range(world.width): plates_data[y, x] = world.plates[y][x] - ocean_data = f.create_dataset("ocean", (world.height, world.width), dtype='bool') + ocean_data = f.create_dataset("ocean", (world.height, world.width), dtype=bool) ocean_data.write_direct(world.ocean) - sea_depth_data = f.create_dataset("sea_depth", (world.height, world.width), dtype='float') + sea_depth_data = f.create_dataset("sea_depth", (world.height, world.width), dtype=float) sea_depth_data.write_direct(world.sea_depth) if hasattr(world, 'biome'): - biome_data = f.create_dataset("biome", (world.height, world.width), dtype='int') + biome_data = f.create_dataset("biome", (world.height, world.width), dtype=int) for y in range(world.height): for x in range(world.width): biome_data[y, x] = biome_name_to_index(world.biome[y][x]) @@ -44,11 +44,11 @@ def save_world_to_hdf5(world, filename): humidity_quantiles_grp = humidity_grp.create_group("quantiles") for k in world.humidity['quantiles'].keys(): humidity_quantiles_grp[k] = world.humidity['quantiles'][k] - humidity_data = humidity_grp.create_dataset("data", (world.height, world.width), dtype='float') + humidity_data = humidity_grp.create_dataset("data", (world.height, world.width), dtype=float) humidity_data.write_direct(world.humidity['data']) if hasattr(world, 'irrigation'): - irrigation_data = f.create_dataset("irrigation", (world.height, world.width), dtype='float') + irrigation_data = f.create_dataset("irrigation", (world.height, world.width), dtype=float) irrigation_data.write_direct(world.irrigation) if hasattr(world, 'permeability'): @@ -56,7 +56,7 @@ def save_world_to_hdf5(world, filename): permeability_ths_grp = permeability_grp.create_group("thresholds") permeability_ths_grp['low'] = world.permeability['thresholds'][0][1] permeability_ths_grp['med'] = world.permeability['thresholds'][1][1] - permeability_data = permeability_grp.create_dataset("data", (world.height, world.width), dtype='float') + permeability_data = permeability_grp.create_dataset("data", (world.height, world.width), dtype=float) permeability_data.write_direct(world.permeability['data']) if hasattr(world, 'watermap'): @@ -65,7 +65,7 @@ def save_world_to_hdf5(world, filename): watermap_ths_grp['creek'] = world.watermap['thresholds']['creek'] watermap_ths_grp['river'] = world.watermap['thresholds']['river'] watermap_ths_grp['mainriver'] = world.watermap['thresholds']['main river'] - watermap_data = watermap_grp.create_dataset("data", (world.height, world.width), dtype='float') + watermap_data = watermap_grp.create_dataset("data", (world.height, world.width), dtype=float) watermap_data.write_direct(world.watermap['data']) if hasattr(world, 'precipitation'): @@ -73,7 +73,7 @@ def save_world_to_hdf5(world, filename): precipitation_ths_grp = precipitation_grp.create_group("thresholds") precipitation_ths_grp['low'] = world.precipitation['thresholds'][0][1] precipitation_ths_grp['med'] = world.precipitation['thresholds'][1][1] - precipitation_data = precipitation_grp.create_dataset("data", (world.height, world.width), dtype='float') + precipitation_data = precipitation_grp.create_dataset("data", (world.height, world.width), dtype=float) precipitation_data.write_direct(world.precipitation['data']) if hasattr(world, 'temperature'): @@ -85,17 +85,17 @@ def save_world_to_hdf5(world, filename): temperature_ths_grp['cool'] = world.temperature['thresholds'][3][1] temperature_ths_grp['warm'] = world.temperature['thresholds'][4][1] temperature_ths_grp['subtropical'] = world.temperature['thresholds'][5][1] - temperature_data = temperature_grp.create_dataset("data", (world.height, world.width), dtype='float') + temperature_data = temperature_grp.create_dataset("data", (world.height, world.width), dtype=float) temperature_data.write_direct(world.temperature['data']) # lake_map and river_map have inverted coordinates if hasattr(world, 'lake_map'): - lake_map_data = f.create_dataset("lake_map", (world.width, world.height), dtype='float') + lake_map_data = f.create_dataset("lake_map", (world.width, world.height), dtype=float) lake_map_data.write_direct(world.lake_map) # lake_map and river_map have inverted coordinates if hasattr(world, 'river_map'): - river_map_data = f.create_dataset("river_map", (world.width, world.height), dtype='float') + river_map_data = f.create_dataset("river_map", (world.width, world.height), dtype=float) river_map_data.write_direct(world.river_map) generation_params_grp = f.create_group("generation_params") From 7f9c177bc5bbe7d8356c64c4749229ae18f6944c Mon Sep 17 00:00:00 2001 From: Federico Tomassetti Date: Fri, 6 Nov 2015 12:10:13 +0100 Subject: [PATCH 09/19] hdf5: make hdf5 optional --- worldengine/cli/main.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/worldengine/cli/main.py b/worldengine/cli/main.py index ddb58464..767ad17f 100644 --- a/worldengine/cli/main.py +++ b/worldengine/cli/main.py @@ -12,7 +12,11 @@ from worldengine.step import Step from worldengine.world import World from worldengine.version import __version__ -from worldengine.hdf5_serialization import save_world_to_hdf5 +try: + from worldengine.hdf5_serialization import save_world_to_hdf5 + HDF5_AVAILABLE = True +except: + HDF5_AVAILABLE = False VERSION = __version__ @@ -383,6 +387,9 @@ def main(): if args.protobuf and args.hdf5: usage(error="Protobuf and hdf5 are exclusive choices") + if args.hdf5 and not HDF5_AVAILABLE: + usage(error="HDF5 requires the presence of native libraries") + operation = "world" if args.OPERATOR is None: pass From 7803ee5a1a9c33bedd1c5ed61346c89b32873a11 Mon Sep 17 00:00:00 2001 From: Federico Tomassetti Date: Fri, 6 Nov 2015 13:09:23 +0100 Subject: [PATCH 10/19] hdf5: use numpy dtypes --- worldengine/hdf5_serialization.py | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/worldengine/hdf5_serialization.py b/worldengine/hdf5_serialization.py index 4d73f290..67e2b6b9 100644 --- a/worldengine/hdf5_serialization.py +++ b/worldengine/hdf5_serialization.py @@ -19,22 +19,22 @@ def save_world_to_hdf5(world, filename): elevation_ths_grp["sea"] = world.elevation['thresholds'][0][1] elevation_ths_grp["plain"] = world.elevation['thresholds'][1][1] elevation_ths_grp["hill"] = world.elevation['thresholds'][2][1] - elevation_data = elevation_grp.create_dataset("data", (world.height, world.width), dtype=float) + elevation_data = elevation_grp.create_dataset("data", (world.height, world.width), dtype=numpy.float) elevation_data.write_direct(world.elevation['data']) - plates_data = f.create_dataset("plates", (world.height, world.width), dtype=int) + plates_data = f.create_dataset("plates", (world.height, world.width), dtype=numpy.uint16) for y in range(world.height): for x in range(world.width): plates_data[y, x] = world.plates[y][x] - ocean_data = f.create_dataset("ocean", (world.height, world.width), dtype=bool) + ocean_data = f.create_dataset("ocean", (world.height, world.width), dtype=numpy.bool) ocean_data.write_direct(world.ocean) - sea_depth_data = f.create_dataset("sea_depth", (world.height, world.width), dtype=float) + sea_depth_data = f.create_dataset("sea_depth", (world.height, world.width), dtype=numpy.float) sea_depth_data.write_direct(world.sea_depth) if hasattr(world, 'biome'): - biome_data = f.create_dataset("biome", (world.height, world.width), dtype=int) + biome_data = f.create_dataset("biome", (world.height, world.width), dtype=numpy.uint16) for y in range(world.height): for x in range(world.width): biome_data[y, x] = biome_name_to_index(world.biome[y][x]) @@ -44,11 +44,11 @@ def save_world_to_hdf5(world, filename): humidity_quantiles_grp = humidity_grp.create_group("quantiles") for k in world.humidity['quantiles'].keys(): humidity_quantiles_grp[k] = world.humidity['quantiles'][k] - humidity_data = humidity_grp.create_dataset("data", (world.height, world.width), dtype=float) + humidity_data = humidity_grp.create_dataset("data", (world.height, world.width), dtype=numpy.float) humidity_data.write_direct(world.humidity['data']) if hasattr(world, 'irrigation'): - irrigation_data = f.create_dataset("irrigation", (world.height, world.width), dtype=float) + irrigation_data = f.create_dataset("irrigation", (world.height, world.width), dtype=numpy.float) irrigation_data.write_direct(world.irrigation) if hasattr(world, 'permeability'): @@ -56,7 +56,7 @@ def save_world_to_hdf5(world, filename): permeability_ths_grp = permeability_grp.create_group("thresholds") permeability_ths_grp['low'] = world.permeability['thresholds'][0][1] permeability_ths_grp['med'] = world.permeability['thresholds'][1][1] - permeability_data = permeability_grp.create_dataset("data", (world.height, world.width), dtype=float) + permeability_data = permeability_grp.create_dataset("data", (world.height, world.width), dtype=numpy.float) permeability_data.write_direct(world.permeability['data']) if hasattr(world, 'watermap'): @@ -65,7 +65,7 @@ def save_world_to_hdf5(world, filename): watermap_ths_grp['creek'] = world.watermap['thresholds']['creek'] watermap_ths_grp['river'] = world.watermap['thresholds']['river'] watermap_ths_grp['mainriver'] = world.watermap['thresholds']['main river'] - watermap_data = watermap_grp.create_dataset("data", (world.height, world.width), dtype=float) + watermap_data = watermap_grp.create_dataset("data", (world.height, world.width), dtype=numpy.float) watermap_data.write_direct(world.watermap['data']) if hasattr(world, 'precipitation'): @@ -73,7 +73,7 @@ def save_world_to_hdf5(world, filename): precipitation_ths_grp = precipitation_grp.create_group("thresholds") precipitation_ths_grp['low'] = world.precipitation['thresholds'][0][1] precipitation_ths_grp['med'] = world.precipitation['thresholds'][1][1] - precipitation_data = precipitation_grp.create_dataset("data", (world.height, world.width), dtype=float) + precipitation_data = precipitation_grp.create_dataset("data", (world.height, world.width), dtype=numpy.float) precipitation_data.write_direct(world.precipitation['data']) if hasattr(world, 'temperature'): @@ -85,17 +85,17 @@ def save_world_to_hdf5(world, filename): temperature_ths_grp['cool'] = world.temperature['thresholds'][3][1] temperature_ths_grp['warm'] = world.temperature['thresholds'][4][1] temperature_ths_grp['subtropical'] = world.temperature['thresholds'][5][1] - temperature_data = temperature_grp.create_dataset("data", (world.height, world.width), dtype=float) + temperature_data = temperature_grp.create_dataset("data", (world.height, world.width), dtype=numpy.float) temperature_data.write_direct(world.temperature['data']) # lake_map and river_map have inverted coordinates if hasattr(world, 'lake_map'): - lake_map_data = f.create_dataset("lake_map", (world.width, world.height), dtype=float) + lake_map_data = f.create_dataset("lake_map", (world.width, world.height), dtype=numpy.float) lake_map_data.write_direct(world.lake_map) # lake_map and river_map have inverted coordinates if hasattr(world, 'river_map'): - river_map_data = f.create_dataset("river_map", (world.width, world.height), dtype=float) + river_map_data = f.create_dataset("river_map", (world.width, world.height), dtype=numpy.float) river_map_data.write_direct(world.river_map) generation_params_grp = f.create_group("generation_params") From 3f7f62958cf4524c61ac871d053270a7c5df1bc4 Mon Sep 17 00:00:00 2001 From: Federico Tomassetti Date: Fri, 6 Nov 2015 14:07:54 +0100 Subject: [PATCH 11/19] hdf5: create requirements-hdf5 --- requirements-hdf5.txt | 3 +++ requirements.txt | 1 - 2 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 requirements-hdf5.txt diff --git a/requirements-hdf5.txt b/requirements-hdf5.txt new file mode 100644 index 00000000..a963e2a1 --- /dev/null +++ b/requirements-hdf5.txt @@ -0,0 +1,3 @@ +# For export support, you'll need HDF5 +-r requirements.txt +h5py==2.5.0 diff --git a/requirements.txt b/requirements.txt index e955ca4e..47f81745 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,4 +5,3 @@ pypng==0.0.18 PyPlatec==1.4.0 protobuf==3.0.0a3 six==1.10.0 -h5py==2.5.0 From 867fcf754e3512489ae0def1a720f94be0b8c682 Mon Sep 17 00:00:00 2001 From: Federico Tomassetti Date: Sat, 14 Nov 2015 20:03:17 +0100 Subject: [PATCH 12/19] hdf5: trying to restrict the numpy versions --- setup.py | 2 +- tox.ini | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index e4e90a0c..b7c6a7ea 100644 --- a/setup.py +++ b/setup.py @@ -25,7 +25,7 @@ 'entry_points': { 'console_scripts': ['worldengine=worldengine.cli.main:main'], }, - 'install_requires': ['PyPlatec==1.4.0', 'pypng>=0.0.18', 'numpy>=1.9.2', + 'install_requires': ['PyPlatec==1.4.0', 'pypng>=0.0.18', 'numpy>=1.9.2, <= 1.10.0.post2', 'argparse==1.2.1', 'noise==1.2.2', 'protobuf>=2.6.0'], 'license': 'MIT License' } diff --git a/tox.ini b/tox.ini index 00ea22d5..43f5369e 100644 --- a/tox.ini +++ b/tox.ini @@ -16,7 +16,7 @@ deps = [testenv] deps = coverage - numpy + numpy==1.10.0.post2 pygdal==1.10.0.1 {[base]deps} From 1b808507471dba33c28b2e7ba381f20aad94e794 Mon Sep 17 00:00:00 2001 From: Federico Tomassetti Date: Sat, 14 Nov 2015 20:34:24 +0100 Subject: [PATCH 13/19] hdf5: change numpy version in tox.ini --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 43f5369e..5b687830 100644 --- a/tox.ini +++ b/tox.ini @@ -16,7 +16,7 @@ deps = [testenv] deps = coverage - numpy==1.10.0.post2 + numpy==1.10.1 pygdal==1.10.0.1 {[base]deps} From 1c457469c9cea3cdf1738fcb08ab9621d8bba222 Mon Sep 17 00:00:00 2001 From: Federico Tomassetti Date: Sat, 14 Nov 2015 21:27:09 +0100 Subject: [PATCH 14/19] hdf5: install a different version of gdal native libraries --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 4f9d3360..bac28128 100644 --- a/.travis.yml +++ b/.travis.yml @@ -36,7 +36,7 @@ addons: - cmake - build-essential - libgdal-dev - - libhdf5-dev + - libhdf5-serial-dev install: - echo "libgdal version `gdal-config --version`" From b911d00218f524625c02a0fb0316902540e15eb8 Mon Sep 17 00:00:00 2001 From: Federico Tomassetti Date: Sun, 15 Nov 2015 08:45:27 +0100 Subject: [PATCH 15/19] hdf5: removing reference to protobuf --- worldengine/cli/main.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/worldengine/cli/main.py b/worldengine/cli/main.py index 767ad17f..939bb894 100644 --- a/worldengine/cli/main.py +++ b/worldengine/cli/main.py @@ -384,9 +384,6 @@ def main(): if args.number_of_plates < 1 or args.number_of_plates > 100: usage(error="Number of plates should be in [1, 100]") - if args.protobuf and args.hdf5: - usage(error="Protobuf and hdf5 are exclusive choices") - if args.hdf5 and not HDF5_AVAILABLE: usage(error="HDF5 requires the presence of native libraries") From fcba3a87c1617587a2797a99e253e6b5894af102 Mon Sep 17 00:00:00 2001 From: Federico Tomassetti Date: Sun, 15 Nov 2015 08:54:03 +0100 Subject: [PATCH 16/19] hdf5: switching dimensions of river and lake map --- worldengine/hdf5_serialization.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/worldengine/hdf5_serialization.py b/worldengine/hdf5_serialization.py index 67e2b6b9..a9f944d0 100644 --- a/worldengine/hdf5_serialization.py +++ b/worldengine/hdf5_serialization.py @@ -90,12 +90,12 @@ def save_world_to_hdf5(world, filename): # lake_map and river_map have inverted coordinates if hasattr(world, 'lake_map'): - lake_map_data = f.create_dataset("lake_map", (world.width, world.height), dtype=numpy.float) + lake_map_data = f.create_dataset("lake_map", (world.height, world.width), dtype=numpy.float) lake_map_data.write_direct(world.lake_map) # lake_map and river_map have inverted coordinates if hasattr(world, 'river_map'): - river_map_data = f.create_dataset("river_map", (world.width, world.height), dtype=numpy.float) + river_map_data = f.create_dataset("river_map", (world.height, world.width), dtype=numpy.float) river_map_data.write_direct(world.river_map) generation_params_grp = f.create_group("generation_params") From ec83aa7f094263492896866916c7fbdcfffc39e3 Mon Sep 17 00:00:00 2001 From: Federico Tomassetti Date: Sun, 15 Nov 2015 11:01:11 +0100 Subject: [PATCH 17/19] hdf5: changing numpy version --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 5b687830..6a44a172 100644 --- a/tox.ini +++ b/tox.ini @@ -16,7 +16,7 @@ deps = [testenv] deps = coverage - numpy==1.10.1 + numpy==1.9.2 pygdal==1.10.0.1 {[base]deps} From 6d4206f4504994b53274c61b2b99cd5241d4f633 Mon Sep 17 00:00:00 2001 From: Federico Tomassetti Date: Sun, 15 Nov 2015 11:03:33 +0100 Subject: [PATCH 18/19] hdf5: do not open file in cli main when saving to hdf5 --- worldengine/cli/main.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/worldengine/cli/main.py b/worldengine/cli/main.py index 939bb894..049c7a07 100644 --- a/worldengine/cli/main.py +++ b/worldengine/cli/main.py @@ -39,13 +39,13 @@ def generate_world(world_name, width, height, seed, num_plates, output_dir, # Save data filename = "%s/%s.world" % (output_dir, world_name) - with open(filename, "wb") as f: - if world_format == 'protobuf': + if world_format == 'protobuf': + with open(filename, "wb") as f: f.write(w.protobuf_serialize()) - elif world_format == 'hdf5': - save_world_to_hdf5(w, filename) - else: - print("Unknown format '%s', not saving " % world_format) + elif world_format == 'hdf5': + save_world_to_hdf5(w, filename) + else: + print("Unknown format '%s', not saving " % world_format) print("* world data saved in '%s'" % filename) sys.stdout.flush() From e0b8a8d62673a78b02aea9f565e2c69e46c33fe9 Mon Sep 17 00:00:00 2001 From: Federico Tomassetti Date: Sun, 15 Nov 2015 11:04:25 +0100 Subject: [PATCH 19/19] hdf5: correct the name of the default format --- worldengine/cli/main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/worldengine/cli/main.py b/worldengine/cli/main.py index 049c7a07..5a27c08e 100644 --- a/worldengine/cli/main.py +++ b/worldengine/cli/main.py @@ -238,7 +238,7 @@ def main(): parser.add_argument('--hdf5', dest='hdf5', action="store_true", help="Save world file using HDF5 format. " + - "Default = store using pickle format", + "Default = store using protobuf format", default=False) parser.add_argument('-s', '--seed', dest='seed', type=int, help="Use seed=N to initialize the pseudo-random " +