From fcb9c6a35de617a5b0642f94396e65fd43f6d94d Mon Sep 17 00:00:00 2001 From: julianhille Date: Wed, 5 Mar 2025 18:18:09 +0100 Subject: [PATCH 01/29] split open, write and close functionalities of H5MD files, Ref #038 --- mrmd/io/DumpH5MDParallel.cpp | 168 +++++++++++++++++++++++++++++++++++ mrmd/io/DumpH5MDParallel.hpp | 9 ++ mrmd/io/H5MD.test.cpp | 16 ++++ 3 files changed, 193 insertions(+) diff --git a/mrmd/io/DumpH5MDParallel.cpp b/mrmd/io/DumpH5MDParallel.cpp index bd262005..5e8ca281 100644 --- a/mrmd/io/DumpH5MDParallel.cpp +++ b/mrmd/io/DumpH5MDParallel.cpp @@ -39,9 +39,21 @@ class DumpH5MDParallelImpl public: explicit DumpH5MDParallelImpl(DumpH5MDParallel& config) : config_(config) {} + std::vector open(const std::string& filename); + void dumpStep(const hid_t& file_id, + const data::Subdomain& subdomain, + const data::Atoms& atoms, + const idx_t step, + const real_t dt); + void close(const hid_t& file_id, const hid_t& group1, const hid_t& group2); void dump(const std::string& filename, const data::Subdomain& subdomain, const data::Atoms& atoms); + hid_t openGroup(const hid_t& fileId, const std::string& groupName) const; + void closeGroup(const hid_t& groupId) const; + std::string openBox(const hid_t& fileId) const; + void closeBox(const hid_t& boxGroupId, const hid_t& edgesGroupId) const; + void writeBoxStep(const hid_t& fileId, const std::string& boxGroupName, const data::Subdomain& subdomain) const; private: void updateCache(const data::HostAtoms& atoms); @@ -142,6 +154,59 @@ void DumpH5MDParallelImpl::writeHeader(hid_t fileId) const CHECK_HDF5(H5LTset_attribute_string(fileId, "/h5md/creator", "version", MRMD_VERSION.c_str())); } +std::string DumpH5MDParallelImpl::openBox(const hid_t& fileId) const +{ + std::string boxGroupName = "/particles/" + config_.particleGroupName + "/box"; + + auto boxGroupId = openGroup(fileId, boxGroupName); + + std::vector dims = {3}; + CHECK_HDF5( + H5LTset_attribute_int(fileId, boxGroupName.c_str(), "dimension", dims.data(), dims.size())); + + auto boundaryType = H5Tcopy(H5T_C_S1); + CHECK_HDF5(H5Tset_size(boundaryType, 8)); + CHECK_HDF5(H5Tset_strpad(boundaryType, H5T_STR_NULLPAD)); + std::vector boundaryDims = {3}; + auto space = H5Screate_simple(int_c(boundaryDims.size()), boundaryDims.data(), nullptr); + auto att = H5Acreate(boxGroupId, "boundary", boundaryType, space, H5P_DEFAULT, H5P_DEFAULT); + CHECK_HDF5(H5Awrite(att, boundaryType, "periodicperiodicperiodic")); + CHECK_HDF5(H5Aclose(att)); + CHECK_HDF5(H5Sclose(space)); + CHECK_HDF5(H5Tclose(boundaryType)); + + CHECK_HDF5(H5Gclose(boxGroupId)); + return boxGroupName; +} + +void DumpH5MDParallelImpl::writeBoxStep(const hid_t& fileId, const std::string& boxGroupName, const data::Subdomain& subdomain) const +{ + std::string edgesGroupName = boxGroupName + "/edges"; + auto edgesGroupId = + H5Gcreate(fileId, edgesGroupName.c_str(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + + std::vector edgesStepDims = {1}; + std::vector step = {0}; + + std::string stepDatasetName = edgesGroupName + "/step"; + CHECK_HDF5(H5LTmake_dataset(edgesGroupId, + stepDatasetName.c_str(), + 1, + edgesStepDims.data(), + typeToHDF5(), + step.data())); + std::vector edgesValueDims = {1, 3}; + std::string valueDatasetName = edgesGroupName + "/value"; + CHECK_HDF5(H5LTmake_dataset(edgesGroupId, + valueDatasetName.c_str(), + 2, + edgesValueDims.data(), + typeToHDF5(), + subdomain.diameter.data())); + + CHECK_HDF5(H5Gclose(edgesGroupId)); +} + void DumpH5MDParallelImpl::writeBox(hid_t fileId, const data::Subdomain& subdomain) const { std::string groupName = "/particles/" + config_.particleGroupName + "/box"; @@ -462,6 +527,67 @@ void DumpH5MDParallelImpl::updateCache(const data::HostAtoms& atoms) if (config_.mpiInfo->rank == 0) particleOffset = 0; } +std::vector DumpH5MDParallelImpl::open(const std::string& filename) +{ + MPI_Info info = MPI_INFO_NULL; + + auto plist = CHECK_HDF5(H5Pcreate(H5P_FILE_ACCESS)); + CHECK_HDF5(H5Pset_fapl_mpio(plist, config_.mpiInfo->comm, info)); + + auto file_id = CHECK_HDF5(H5Fcreate(filename.c_str(), H5F_ACC_TRUNC, H5P_DEFAULT, plist)); + + auto group1 = + CHECK_HDF5(H5Gcreate(file_id, "/particles", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)); + std::string particleGroup = "/particles/" + config_.particleGroupName; + auto group2 = CHECK_HDF5( + H5Gcreate(file_id, particleGroup.c_str(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)); + + writeHeader(file_id); + + return {file_id, group1, group2}; +} + +hid_t DumpH5MDParallelImpl::openGroup(const hid_t& fileId, const std::string& groupName) const +{ + auto groupId = + CHECK_HDF5(H5Gcreate(fileId, groupName.c_str(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)); + return groupId; +} + +void DumpH5MDParallelImpl::closeGroup(const hid_t& groupId) const +{ + CHECK_HDF5(H5Gclose(groupId)); +} + +void DumpH5MDParallelImpl::dumpStep(const hid_t& file_id, + const data::Subdomain& subdomain, + const data::Atoms& atoms, + const idx_t /*step*/, + const real_t /*dt*/) +{ + data::HostAtoms h_atoms(atoms); // NOLINT + + updateCache(h_atoms); + + auto boxGroupName = openBox(file_id); + writeBoxStep(file_id, boxGroupName, subdomain); + if (config_.dumpPos) writePos(file_id, h_atoms); + if (config_.dumpVel) writeVel(file_id, h_atoms); + if (config_.dumpForce) writeForce(file_id, h_atoms); + if (config_.dumpType) writeType(file_id, h_atoms); + if (config_.dumpMass) writeMass(file_id, h_atoms); + if (config_.dumpCharge) writeCharge(file_id, h_atoms); + if (config_.dumpRelativeMass) writeRelativeMass(file_id, h_atoms); +} + +void DumpH5MDParallelImpl::close(const hid_t& file_id, const hid_t& group1, const hid_t& group2) +{ + CHECK_HDF5(H5Gclose(group1)); + CHECK_HDF5(H5Gclose(group2)); + + CHECK_HDF5(H5Fclose(file_id)); +} + void DumpH5MDParallelImpl::dump(const std::string& filename, const data::Subdomain& subdomain, const data::Atoms& atoms) @@ -501,6 +627,29 @@ void DumpH5MDParallelImpl::dump(const std::string& filename, } // namespace impl +std::vector DumpH5MDParallel::open(const std::string& filename) +{ + impl::DumpH5MDParallelImpl helper(*this); + auto dump_ids = helper.open(filename); + return dump_ids; +} + +void DumpH5MDParallel::dumpStep(const hid_t& file_id, + const data::Subdomain& subdomain, + const data::Atoms& atoms, + const idx_t step, + const real_t dt) +{ + impl::DumpH5MDParallelImpl helper(*this); + helper.dumpStep(file_id, subdomain, atoms, step, dt); +} + +void DumpH5MDParallel::close(const hid_t& file_id, const hid_t& group1, const hid_t& group2) +{ + impl::DumpH5MDParallelImpl helper(*this); + helper.close(file_id, group1, group2); +} + void DumpH5MDParallel::dump(const std::string& filename, const data::Subdomain& subdomain, const data::Atoms& atoms) @@ -509,6 +658,25 @@ void DumpH5MDParallel::dump(const std::string& filename, helper.dump(filename, subdomain, atoms); } #else +void DumpH5MDParallel::open(const std::string& /*filename*/) +{ + MRMD_HOST_CHECK(false, "HDF5 Support not available!"); + exit(EXIT_FAILURE); +} + +void DumpH5MDParallel::close(const hid_t& /*file_id*/); +{ + MRMD_HOST_CHECK(false, "HDF5 Support not available!"); + exit(EXIT_FAILURE); +} +void DumpH5MDParallel::dumpStep(const hid_t& /*file_id*/, + const data::Subdomain& /*subdomain*/, + const data::Atoms& /*atoms*/) +{ +MRMD_HOST_CHECK(false, "HDF5 Support not available!"); +exit(EXIT_FAILURE); +} + void DumpH5MDParallel::dump(const std::string& /*filename*/, const data::Subdomain& /*subdomain*/, const data::Atoms& /*atoms*/) diff --git a/mrmd/io/DumpH5MDParallel.hpp b/mrmd/io/DumpH5MDParallel.hpp index 64d54fcf..013ecb97 100644 --- a/mrmd/io/DumpH5MDParallel.hpp +++ b/mrmd/io/DumpH5MDParallel.hpp @@ -32,6 +32,15 @@ class DumpH5MDParallel : mpiInfo(mpiInfoArg), author(authorArg), particleGroupName(particleGroupNameArg) { } + std::vector open(const std::string& filename); + + void dumpStep(const hid_t& file_id, + const data::Subdomain& subdomain, + const data::Atoms& atoms, + const idx_t step, + const real_t dt); + + void close(const hid_t& file_id, const hid_t& group1, const hid_t& group2); void dump(const std::string& filename, const data::Subdomain& subdomain, diff --git a/mrmd/io/H5MD.test.cpp b/mrmd/io/H5MD.test.cpp index 0a683280..9eeabdcc 100644 --- a/mrmd/io/H5MD.test.cpp +++ b/mrmd/io/H5MD.test.cpp @@ -117,5 +117,21 @@ TEST(H5MD, dump) EXPECT_FLOAT_EQ(h_atoms1.getRelativeMass()(idx), h_atoms2.getRelativeMass()(idx)); } } + +TEST(H5MD, dumpMultipleSteps) +{ + auto mpiInfo = std::make_shared(MPI_COMM_WORLD); + + auto subdomain1 = data::Subdomain({1_r, 2_r, 3_r}, {4_r, 6_r, 8_r}, 0.5_r); + auto atoms1 = getAtoms(mpiInfo); + + auto dump = DumpH5MDParallel(mpiInfo, "XzzX"); + + auto dump_ids = dump.open("dummyMultipleSteps.hdf5"); + + dump.dumpStep(dump_ids[0], subdomain1, atoms1, 0, 0_r); + + dump.close(dump_ids[0], dump_ids[1], dump_ids[2]); +} } // namespace io } // namespace mrmd \ No newline at end of file From 09b5d85a36793fb57a0195cb1eac90e43a564909 Mon Sep 17 00:00:00 2001 From: julianhille Date: Thu, 6 Mar 2025 11:27:31 +0100 Subject: [PATCH 02/29] split writeBox function, Ref #038 --- mrmd/io/DumpH5MDParallel.cpp | 236 ++++++++++++++++++----------------- 1 file changed, 120 insertions(+), 116 deletions(-) diff --git a/mrmd/io/DumpH5MDParallel.cpp b/mrmd/io/DumpH5MDParallel.cpp index 5e8ca281..e7d3a263 100644 --- a/mrmd/io/DumpH5MDParallel.cpp +++ b/mrmd/io/DumpH5MDParallel.cpp @@ -51,9 +51,9 @@ class DumpH5MDParallelImpl const data::Atoms& atoms); hid_t openGroup(const hid_t& fileId, const std::string& groupName) const; void closeGroup(const hid_t& groupId) const; - std::string openBox(const hid_t& fileId) const; + std::vector openBox(const hid_t& fileId) const; void closeBox(const hid_t& boxGroupId, const hid_t& edgesGroupId) const; - void writeBoxStep(const hid_t& fileId, const std::string& boxGroupName, const data::Subdomain& subdomain) const; + void writeBoxStep(const hid_t& boxGroupId, const data::Subdomain& subdomain) const; private: void updateCache(const data::HostAtoms& atoms); @@ -83,6 +83,124 @@ class DumpH5MDParallelImpl int64_t particleOffset = -1; }; +std::vector DumpH5MDParallelImpl::open(const std::string& filename) +{ + MPI_Info info = MPI_INFO_NULL; + + auto plist = CHECK_HDF5(H5Pcreate(H5P_FILE_ACCESS)); + CHECK_HDF5(H5Pset_fapl_mpio(plist, config_.mpiInfo->comm, info)); + + auto file_id = CHECK_HDF5(H5Fcreate(filename.c_str(), H5F_ACC_TRUNC, H5P_DEFAULT, plist)); + + auto group1 = + CHECK_HDF5(H5Gcreate(file_id, "/particles", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)); + std::string particleGroup = "/particles/" + config_.particleGroupName; + auto group2 = CHECK_HDF5( + H5Gcreate(file_id, particleGroup.c_str(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)); + + writeHeader(file_id); + + return {file_id, group1, group2}; +} + +hid_t DumpH5MDParallelImpl::openGroup(const hid_t& fileId, const std::string& groupName) const +{ + auto groupId = + CHECK_HDF5(H5Gcreate(fileId, groupName.c_str(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)); + return groupId; +} + +void DumpH5MDParallelImpl::closeGroup(const hid_t& groupId) const +{ + CHECK_HDF5(H5Gclose(groupId)); +} + +void DumpH5MDParallelImpl::dumpStep(const hid_t& file_id, + const data::Subdomain& subdomain, + const data::Atoms& atoms, + const idx_t /*step*/, + const real_t /*dt*/) +{ + data::HostAtoms h_atoms(atoms); // NOLINT + + updateCache(h_atoms); + + auto boxIds = openBox(file_id); + writeBoxStep(boxIds[1], subdomain); + closeBox(boxIds[0], boxIds[1]); +} + +void DumpH5MDParallelImpl::close(const hid_t& file_id, const hid_t& group1, const hid_t& group2) +{ + CHECK_HDF5(H5Gclose(group1)); + CHECK_HDF5(H5Gclose(group2)); + + CHECK_HDF5(H5Fclose(file_id)); +} + +std::vector DumpH5MDParallelImpl::openBox(const hid_t& fileId) const +{ + std::string boxGroupName = "/particles/" + config_.particleGroupName + "/box"; + + auto boxGroupId = openGroup(fileId, boxGroupName); + + std::vector dims = {3}; + CHECK_HDF5( + H5LTset_attribute_int(fileId, boxGroupName.c_str(), "dimension", dims.data(), dims.size())); + + auto boundaryType = H5Tcopy(H5T_C_S1); + CHECK_HDF5(H5Tset_size(boundaryType, 8)); + CHECK_HDF5(H5Tset_strpad(boundaryType, H5T_STR_NULLPAD)); + std::vector boundaryDims = {3}; + auto space = H5Screate_simple(int_c(boundaryDims.size()), boundaryDims.data(), nullptr); + auto att = H5Acreate(boxGroupId, "boundary", boundaryType, space, H5P_DEFAULT, H5P_DEFAULT); + CHECK_HDF5(H5Awrite(att, boundaryType, "periodicperiodicperiodic")); + CHECK_HDF5(H5Aclose(att)); + CHECK_HDF5(H5Sclose(space)); + CHECK_HDF5(H5Tclose(boundaryType)); + + auto edgesGroupId = + H5Gcreate(boxGroupId, "edges", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + + return {boxGroupId, edgesGroupId}; +} + +void DumpH5MDParallelImpl::closeBox(const hid_t& boxGroupId, const hid_t& edgesGroupId) const +{ + CHECK_HDF5(H5Gclose(edgesGroupId)); + CHECK_HDF5(H5Gclose(boxGroupId)); +} + +void DumpH5MDParallelImpl::writeBoxStep(const hid_t& edgesGroupId, const data::Subdomain& subdomain) const +{ + std::vector edgesStepDims = {1}; + std::vector step = {0}; + + CHECK_HDF5(H5LTmake_dataset(edgesGroupId, + "step", + 1, + edgesStepDims.data(), + typeToHDF5(), + step.data())); + std::vector edgesValueDims = {1, 3}; + CHECK_HDF5(H5LTmake_dataset(edgesGroupId, + "value", + 2, + edgesValueDims.data(), + typeToHDF5(), + subdomain.diameter.data())); +} + + + + + + + + + + + template void DumpH5MDParallelImpl::writeParallel(hid_t fileId, const std::string& name, @@ -154,59 +272,6 @@ void DumpH5MDParallelImpl::writeHeader(hid_t fileId) const CHECK_HDF5(H5LTset_attribute_string(fileId, "/h5md/creator", "version", MRMD_VERSION.c_str())); } -std::string DumpH5MDParallelImpl::openBox(const hid_t& fileId) const -{ - std::string boxGroupName = "/particles/" + config_.particleGroupName + "/box"; - - auto boxGroupId = openGroup(fileId, boxGroupName); - - std::vector dims = {3}; - CHECK_HDF5( - H5LTset_attribute_int(fileId, boxGroupName.c_str(), "dimension", dims.data(), dims.size())); - - auto boundaryType = H5Tcopy(H5T_C_S1); - CHECK_HDF5(H5Tset_size(boundaryType, 8)); - CHECK_HDF5(H5Tset_strpad(boundaryType, H5T_STR_NULLPAD)); - std::vector boundaryDims = {3}; - auto space = H5Screate_simple(int_c(boundaryDims.size()), boundaryDims.data(), nullptr); - auto att = H5Acreate(boxGroupId, "boundary", boundaryType, space, H5P_DEFAULT, H5P_DEFAULT); - CHECK_HDF5(H5Awrite(att, boundaryType, "periodicperiodicperiodic")); - CHECK_HDF5(H5Aclose(att)); - CHECK_HDF5(H5Sclose(space)); - CHECK_HDF5(H5Tclose(boundaryType)); - - CHECK_HDF5(H5Gclose(boxGroupId)); - return boxGroupName; -} - -void DumpH5MDParallelImpl::writeBoxStep(const hid_t& fileId, const std::string& boxGroupName, const data::Subdomain& subdomain) const -{ - std::string edgesGroupName = boxGroupName + "/edges"; - auto edgesGroupId = - H5Gcreate(fileId, edgesGroupName.c_str(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); - - std::vector edgesStepDims = {1}; - std::vector step = {0}; - - std::string stepDatasetName = edgesGroupName + "/step"; - CHECK_HDF5(H5LTmake_dataset(edgesGroupId, - stepDatasetName.c_str(), - 1, - edgesStepDims.data(), - typeToHDF5(), - step.data())); - std::vector edgesValueDims = {1, 3}; - std::string valueDatasetName = edgesGroupName + "/value"; - CHECK_HDF5(H5LTmake_dataset(edgesGroupId, - valueDatasetName.c_str(), - 2, - edgesValueDims.data(), - typeToHDF5(), - subdomain.diameter.data())); - - CHECK_HDF5(H5Gclose(edgesGroupId)); -} - void DumpH5MDParallelImpl::writeBox(hid_t fileId, const data::Subdomain& subdomain) const { std::string groupName = "/particles/" + config_.particleGroupName + "/box"; @@ -527,67 +592,6 @@ void DumpH5MDParallelImpl::updateCache(const data::HostAtoms& atoms) if (config_.mpiInfo->rank == 0) particleOffset = 0; } -std::vector DumpH5MDParallelImpl::open(const std::string& filename) -{ - MPI_Info info = MPI_INFO_NULL; - - auto plist = CHECK_HDF5(H5Pcreate(H5P_FILE_ACCESS)); - CHECK_HDF5(H5Pset_fapl_mpio(plist, config_.mpiInfo->comm, info)); - - auto file_id = CHECK_HDF5(H5Fcreate(filename.c_str(), H5F_ACC_TRUNC, H5P_DEFAULT, plist)); - - auto group1 = - CHECK_HDF5(H5Gcreate(file_id, "/particles", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)); - std::string particleGroup = "/particles/" + config_.particleGroupName; - auto group2 = CHECK_HDF5( - H5Gcreate(file_id, particleGroup.c_str(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)); - - writeHeader(file_id); - - return {file_id, group1, group2}; -} - -hid_t DumpH5MDParallelImpl::openGroup(const hid_t& fileId, const std::string& groupName) const -{ - auto groupId = - CHECK_HDF5(H5Gcreate(fileId, groupName.c_str(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)); - return groupId; -} - -void DumpH5MDParallelImpl::closeGroup(const hid_t& groupId) const -{ - CHECK_HDF5(H5Gclose(groupId)); -} - -void DumpH5MDParallelImpl::dumpStep(const hid_t& file_id, - const data::Subdomain& subdomain, - const data::Atoms& atoms, - const idx_t /*step*/, - const real_t /*dt*/) -{ - data::HostAtoms h_atoms(atoms); // NOLINT - - updateCache(h_atoms); - - auto boxGroupName = openBox(file_id); - writeBoxStep(file_id, boxGroupName, subdomain); - if (config_.dumpPos) writePos(file_id, h_atoms); - if (config_.dumpVel) writeVel(file_id, h_atoms); - if (config_.dumpForce) writeForce(file_id, h_atoms); - if (config_.dumpType) writeType(file_id, h_atoms); - if (config_.dumpMass) writeMass(file_id, h_atoms); - if (config_.dumpCharge) writeCharge(file_id, h_atoms); - if (config_.dumpRelativeMass) writeRelativeMass(file_id, h_atoms); -} - -void DumpH5MDParallelImpl::close(const hid_t& file_id, const hid_t& group1, const hid_t& group2) -{ - CHECK_HDF5(H5Gclose(group1)); - CHECK_HDF5(H5Gclose(group2)); - - CHECK_HDF5(H5Fclose(file_id)); -} - void DumpH5MDParallelImpl::dump(const std::string& filename, const data::Subdomain& subdomain, const data::Atoms& atoms) From 8dce55be044217096059b95a212f83fe57d31c63 Mon Sep 17 00:00:00 2001 From: julianhille Date: Thu, 6 Mar 2025 17:21:50 +0100 Subject: [PATCH 03/29] isolated step-wise writing of step number, Ref #038 --- mrmd/io/DumpH5MDParallel.cpp | 73 ++++++++++++++++++++++++------------ 1 file changed, 50 insertions(+), 23 deletions(-) diff --git a/mrmd/io/DumpH5MDParallel.cpp b/mrmd/io/DumpH5MDParallel.cpp index e7d3a263..1cdea5bd 100644 --- a/mrmd/io/DumpH5MDParallel.cpp +++ b/mrmd/io/DumpH5MDParallel.cpp @@ -46,14 +46,16 @@ class DumpH5MDParallelImpl const idx_t step, const real_t dt); void close(const hid_t& file_id, const hid_t& group1, const hid_t& group2); - void dump(const std::string& filename, - const data::Subdomain& subdomain, - const data::Atoms& atoms); hid_t openGroup(const hid_t& fileId, const std::string& groupName) const; void closeGroup(const hid_t& groupId) const; std::vector openBox(const hid_t& fileId) const; void closeBox(const hid_t& boxGroupId, const hid_t& edgesGroupId) const; - void writeBoxStep(const hid_t& boxGroupId, const data::Subdomain& subdomain) const; + void writeStep(const hid_t& stepSetId, const idx_t& step) const; + hid_t createStepSet(const hid_t& edgesGroupId, const hsize_t* dims, const hsize_t& ndims) const; + + void dump(const std::string& filename, + const data::Subdomain& subdomain, + const data::Atoms& atoms); private: void updateCache(const data::HostAtoms& atoms); @@ -118,7 +120,7 @@ void DumpH5MDParallelImpl::closeGroup(const hid_t& groupId) const void DumpH5MDParallelImpl::dumpStep(const hid_t& file_id, const data::Subdomain& subdomain, const data::Atoms& atoms, - const idx_t /*step*/, + const idx_t step, const real_t /*dt*/) { data::HostAtoms h_atoms(atoms); // NOLINT @@ -126,7 +128,7 @@ void DumpH5MDParallelImpl::dumpStep(const hid_t& file_id, updateCache(h_atoms); auto boxIds = openBox(file_id); - writeBoxStep(boxIds[1], subdomain); + writeStep(boxIds[2], step); closeBox(boxIds[0], boxIds[1]); } @@ -161,8 +163,13 @@ std::vector DumpH5MDParallelImpl::openBox(const hid_t& fileId) const auto edgesGroupId = H5Gcreate(boxGroupId, "edges", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + + const hsize_t stepNumDims = 1; + hsize_t stepDimsCreate[stepNumDims] = {0}; - return {boxGroupId, edgesGroupId}; + auto stepSetId = createStepSet(edgesGroupId, stepDimsCreate, stepNumDims); + + return {boxGroupId, edgesGroupId, stepSetId}; } void DumpH5MDParallelImpl::closeBox(const hid_t& boxGroupId, const hid_t& edgesGroupId) const @@ -171,24 +178,44 @@ void DumpH5MDParallelImpl::closeBox(const hid_t& boxGroupId, const hid_t& edgesG CHECK_HDF5(H5Gclose(boxGroupId)); } -void DumpH5MDParallelImpl::writeBoxStep(const hid_t& edgesGroupId, const data::Subdomain& subdomain) const +hid_t DumpH5MDParallelImpl::createStepSet(const hid_t& edgesGroupId, const hsize_t* dims, const hsize_t& ndims) const { - std::vector edgesStepDims = {1}; - std::vector step = {0}; + hsize_t max_dims[ndims] = {H5S_UNLIMITED}; + hid_t file_space = H5Screate_simple(ndims, dims, max_dims); - CHECK_HDF5(H5LTmake_dataset(edgesGroupId, - "step", - 1, - edgesStepDims.data(), - typeToHDF5(), - step.data())); - std::vector edgesValueDims = {1, 3}; - CHECK_HDF5(H5LTmake_dataset(edgesGroupId, - "value", - 2, - edgesValueDims.data(), - typeToHDF5(), - subdomain.diameter.data())); + hid_t plist = H5Pcreate(H5P_DATASET_CREATE); + H5Pset_layout(plist, H5D_CHUNKED); + + hsize_t chunk_dims[ndims] = {2}; + H5Pset_chunk(plist, ndims, chunk_dims); + + hid_t stepSetId = H5Dcreate(edgesGroupId, "step", H5T_NATIVE_INT64, file_space, H5P_DEFAULT, plist, H5P_DEFAULT); + + H5Pclose(plist); + H5Sclose(file_space); + + return stepSetId; +} + +void DumpH5MDParallelImpl::writeStep(const hid_t& stepSetId, const idx_t& step) const +{ + const hsize_t stepNumDims = 1; + hsize_t stepDimsAppend[stepNumDims] = {1}; + + hid_t mem_space = H5Screate_simple(stepNumDims, stepDimsAppend, NULL); + + H5Dset_extent(stepSetId, stepDimsAppend); + + auto file_space = H5Dget_space(stepSetId); + hsize_t start[1] = {0}; + hsize_t count[1] = {1}; + H5Sselect_hyperslab(file_space, H5S_SELECT_SET, start, NULL, count, NULL); + + H5Dwrite(stepSetId, H5T_NATIVE_INT64, mem_space, file_space, H5P_DEFAULT, &step); + + H5Sclose(file_space); + H5Sclose(mem_space); + H5Dclose(stepSetId); } From 39f8156ed8034f7294cb44c9716427875901c3c3 Mon Sep 17 00:00:00 2001 From: julianhille Date: Fri, 7 Mar 2025 15:09:08 +0100 Subject: [PATCH 04/29] major redesigns incorporating new Identifiers struct, Ref #038 --- mrmd/io/DumpH5MDParallel.cpp | 176 +++++++++++++++++--------------- mrmd/io/DumpH5MDParallel.hpp | 25 +++-- mrmd/io/H5MD.test.cpp | 8 +- mrmd/io/RestoreH5MDParallel.hpp | 4 +- 4 files changed, 114 insertions(+), 99 deletions(-) diff --git a/mrmd/io/DumpH5MDParallel.cpp b/mrmd/io/DumpH5MDParallel.cpp index 1cdea5bd..d38a0be4 100644 --- a/mrmd/io/DumpH5MDParallel.cpp +++ b/mrmd/io/DumpH5MDParallel.cpp @@ -39,25 +39,29 @@ class DumpH5MDParallelImpl public: explicit DumpH5MDParallelImpl(DumpH5MDParallel& config) : config_(config) {} - std::vector open(const std::string& filename); - void dumpStep(const hid_t& file_id, + io::Identifiers open(const std::string& filename) const; + void dumpStep( + const io::Identifiers& ids, const data::Subdomain& subdomain, const data::Atoms& atoms, const idx_t step, const real_t dt); - void close(const hid_t& file_id, const hid_t& group1, const hid_t& group2); - hid_t openGroup(const hid_t& fileId, const std::string& groupName) const; - void closeGroup(const hid_t& groupId) const; - std::vector openBox(const hid_t& fileId) const; - void closeBox(const hid_t& boxGroupId, const hid_t& edgesGroupId) const; - void writeStep(const hid_t& stepSetId, const idx_t& step) const; - hid_t createStepSet(const hid_t& edgesGroupId, const hsize_t* dims, const hsize_t& ndims) const; + void close(const io::Identifiers& ids) const; void dump(const std::string& filename, const data::Subdomain& subdomain, const data::Atoms& atoms); private: + hid_t createFile(const std::string& filename, const hid_t& propertyList) const; + void closeFile(const hid_t& fileId) const; + hid_t createGroup(const hid_t& parentElementId, const std::string& groupName) const; + void closeGroup(const hid_t& groupId) const; + void openBox(io::Identifiers& ids) const; + void writeStep(const hid_t& stepSetId, const idx_t& step) const; + hid_t createStepDataset(const hid_t& groupId, const hsize_t* dims, const hsize_t& ndims) const; + void closeDataset(const hid_t& datasetId) const; + void updateCache(const data::HostAtoms& atoms); void writeHeader(hid_t fileId) const; @@ -72,10 +76,10 @@ class DumpH5MDParallelImpl template void writeParallel(hid_t fileId, - const std::string& name, - const std::vector& globalDims, - const std::vector& localDims, - const std::vector& data); + const std::string& name, + const std::vector& globalDims, + const std::vector& localDims, + const std::vector& data); DumpH5MDParallel& config_; @@ -85,30 +89,40 @@ class DumpH5MDParallelImpl int64_t particleOffset = -1; }; -std::vector DumpH5MDParallelImpl::open(const std::string& filename) +io::Identifiers DumpH5MDParallelImpl::open(const std::string& filename) const { + io::Identifiers ids; MPI_Info info = MPI_INFO_NULL; - auto plist = CHECK_HDF5(H5Pcreate(H5P_FILE_ACCESS)); - CHECK_HDF5(H5Pset_fapl_mpio(plist, config_.mpiInfo->comm, info)); + auto propertyList = CHECK_HDF5(H5Pcreate(H5P_FILE_ACCESS)); + CHECK_HDF5(H5Pset_fapl_mpio(propertyList, config_.mpiInfo->comm, info)); - auto file_id = CHECK_HDF5(H5Fcreate(filename.c_str(), H5F_ACC_TRUNC, H5P_DEFAULT, plist)); + ids.fileId = createFile(filename, propertyList); - auto group1 = - CHECK_HDF5(H5Gcreate(file_id, "/particles", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)); - std::string particleGroup = "/particles/" + config_.particleGroupName; - auto group2 = CHECK_HDF5( - H5Gcreate(file_id, particleGroup.c_str(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)); + CHECK_HDF5(H5Pclose(propertyList)); - writeHeader(file_id); + ids.particleGroupId = createGroup(ids.fileId, "particles"); + ids.particleSubGroupId = createGroup(ids.particleGroupId, config_.particleSubGroupName); + writeHeader(ids.fileId); + openBox(ids); + return ids; +} + +hid_t DumpH5MDParallelImpl::createFile(const std::string& filename, const hid_t& propertyList) const +{ + auto fileId = CHECK_HDF5(H5Fcreate(filename.c_str(), H5F_ACC_TRUNC, H5P_DEFAULT, propertyList)); + return fileId; +} - return {file_id, group1, group2}; +void DumpH5MDParallelImpl::closeFile(const hid_t& fileId) const +{ + CHECK_HDF5(H5Fclose(fileId)); } -hid_t DumpH5MDParallelImpl::openGroup(const hid_t& fileId, const std::string& groupName) const +hid_t DumpH5MDParallelImpl::createGroup(const hid_t& parentElementId, const std::string& groupName) const { auto groupId = - CHECK_HDF5(H5Gcreate(fileId, groupName.c_str(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)); + CHECK_HDF5(H5Gcreate(parentElementId, groupName.c_str(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)); return groupId; } @@ -117,68 +131,58 @@ void DumpH5MDParallelImpl::closeGroup(const hid_t& groupId) const CHECK_HDF5(H5Gclose(groupId)); } -void DumpH5MDParallelImpl::dumpStep(const hid_t& file_id, - const data::Subdomain& subdomain, - const data::Atoms& atoms, - const idx_t step, - const real_t /*dt*/) +void DumpH5MDParallelImpl::dumpStep( + const io::Identifiers& ids, + const data::Subdomain& subdomain, + const data::Atoms& atoms, + const idx_t step, + const real_t /*dt*/) { data::HostAtoms h_atoms(atoms); // NOLINT updateCache(h_atoms); - auto boxIds = openBox(file_id); - writeStep(boxIds[2], step); - closeBox(boxIds[0], boxIds[1]); + writeStep(ids.stepSetId, step); } -void DumpH5MDParallelImpl::close(const hid_t& file_id, const hid_t& group1, const hid_t& group2) +void DumpH5MDParallelImpl::close(const io::Identifiers& ids) const { - CHECK_HDF5(H5Gclose(group1)); - CHECK_HDF5(H5Gclose(group2)); - - CHECK_HDF5(H5Fclose(file_id)); + closeDataset(ids.stepSetId); + closeGroup(ids.edgesGroupId); + closeGroup(ids.boxGroupId); + closeGroup(ids.particleSubGroupId); + closeGroup(ids.particleGroupId); + closeFile(ids.fileId); } -std::vector DumpH5MDParallelImpl::openBox(const hid_t& fileId) const -{ - std::string boxGroupName = "/particles/" + config_.particleGroupName + "/box"; - - auto boxGroupId = openGroup(fileId, boxGroupName); +void DumpH5MDParallelImpl::openBox(io::Identifiers& ids) const +{ + ids.boxGroupId = createGroup(ids.particleSubGroupId, "box"); std::vector dims = {3}; CHECK_HDF5( - H5LTset_attribute_int(fileId, boxGroupName.c_str(), "dimension", dims.data(), dims.size())); + H5LTset_attribute_int(ids.particleSubGroupId, "box", "dimension", dims.data(), dims.size())); auto boundaryType = H5Tcopy(H5T_C_S1); CHECK_HDF5(H5Tset_size(boundaryType, 8)); CHECK_HDF5(H5Tset_strpad(boundaryType, H5T_STR_NULLPAD)); std::vector boundaryDims = {3}; auto space = H5Screate_simple(int_c(boundaryDims.size()), boundaryDims.data(), nullptr); - auto att = H5Acreate(boxGroupId, "boundary", boundaryType, space, H5P_DEFAULT, H5P_DEFAULT); + auto att = H5Acreate(ids.boxGroupId, "boundary", boundaryType, space, H5P_DEFAULT, H5P_DEFAULT); CHECK_HDF5(H5Awrite(att, boundaryType, "periodicperiodicperiodic")); CHECK_HDF5(H5Aclose(att)); CHECK_HDF5(H5Sclose(space)); CHECK_HDF5(H5Tclose(boundaryType)); - auto edgesGroupId = - H5Gcreate(boxGroupId, "edges", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + ids.edgesGroupId = createGroup(ids.boxGroupId, "edges"); const hsize_t stepNumDims = 1; hsize_t stepDimsCreate[stepNumDims] = {0}; - auto stepSetId = createStepSet(edgesGroupId, stepDimsCreate, stepNumDims); - - return {boxGroupId, edgesGroupId, stepSetId}; + ids.stepSetId = createStepDataset(ids.edgesGroupId, stepDimsCreate, stepNumDims); } -void DumpH5MDParallelImpl::closeBox(const hid_t& boxGroupId, const hid_t& edgesGroupId) const -{ - CHECK_HDF5(H5Gclose(edgesGroupId)); - CHECK_HDF5(H5Gclose(boxGroupId)); -} - -hid_t DumpH5MDParallelImpl::createStepSet(const hid_t& edgesGroupId, const hsize_t* dims, const hsize_t& ndims) const +hid_t DumpH5MDParallelImpl::createStepDataset(const hid_t& groupId, const hsize_t* dims, const hsize_t& ndims) const { hsize_t max_dims[ndims] = {H5S_UNLIMITED}; hid_t file_space = H5Screate_simple(ndims, dims, max_dims); @@ -189,7 +193,7 @@ hid_t DumpH5MDParallelImpl::createStepSet(const hid_t& edgesGroupId, const hsize hsize_t chunk_dims[ndims] = {2}; H5Pset_chunk(plist, ndims, chunk_dims); - hid_t stepSetId = H5Dcreate(edgesGroupId, "step", H5T_NATIVE_INT64, file_space, H5P_DEFAULT, plist, H5P_DEFAULT); + auto stepSetId = H5Dcreate(groupId, "step", H5T_NATIVE_INT64, file_space, H5P_DEFAULT, plist, H5P_DEFAULT); H5Pclose(plist); H5Sclose(file_space); @@ -197,25 +201,29 @@ hid_t DumpH5MDParallelImpl::createStepSet(const hid_t& edgesGroupId, const hsize return stepSetId; } +void DumpH5MDParallelImpl::closeDataset(const hid_t& datasetId) const +{ + H5Dclose(datasetId); +} + void DumpH5MDParallelImpl::writeStep(const hid_t& stepSetId, const idx_t& step) const { const hsize_t stepNumDims = 1; - hsize_t stepDimsAppend[stepNumDims] = {1}; + const hsize_t stepDimsAppend[stepNumDims] = {1}; - hid_t mem_space = H5Screate_simple(stepNumDims, stepDimsAppend, NULL); + const hid_t mem_space = H5Screate_simple(stepNumDims, stepDimsAppend, NULL); H5Dset_extent(stepSetId, stepDimsAppend); - auto file_space = H5Dget_space(stepSetId); - hsize_t start[1] = {0}; - hsize_t count[1] = {1}; + const auto file_space = H5Dget_space(stepSetId); + const hsize_t start[1] = {0}; + const hsize_t count[1] = {1}; H5Sselect_hyperslab(file_space, H5S_SELECT_SET, start, NULL, count, NULL); H5Dwrite(stepSetId, H5T_NATIVE_INT64, mem_space, file_space, H5P_DEFAULT, &step); H5Sclose(file_space); H5Sclose(mem_space); - H5Dclose(stepSetId); } @@ -226,8 +234,6 @@ void DumpH5MDParallelImpl::writeStep(const hid_t& stepSetId, const idx_t& step) - - template void DumpH5MDParallelImpl::writeParallel(hid_t fileId, const std::string& name, @@ -301,7 +307,7 @@ void DumpH5MDParallelImpl::writeHeader(hid_t fileId) const void DumpH5MDParallelImpl::writeBox(hid_t fileId, const data::Subdomain& subdomain) const { - std::string groupName = "/particles/" + config_.particleGroupName + "/box"; + std::string groupName = "/particles/" + config_.particleSubGroupName + "/box"; auto group = CHECK_HDF5(H5Gcreate(fileId, groupName.c_str(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)); std::vector dims = {3}; @@ -365,7 +371,7 @@ void DumpH5MDParallelImpl::writePos(hid_t fileId, const data::HostAtoms& atoms) using Datatype = real_t; constexpr int64_t dimensions = 3; ///< dimensions of the property - std::string groupName = "/particles/" + config_.particleGroupName + "/" + config_.posDataset; + std::string groupName = "/particles/" + config_.particleSubGroupName + "/" + config_.posDataset; auto group = H5Gcreate(fileId, groupName.c_str(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); std::vector data; @@ -401,7 +407,7 @@ void DumpH5MDParallelImpl::writeVel(hid_t fileId, const data::HostAtoms& atoms) using Datatype = real_t; constexpr int64_t dimensions = 3; ///< dimensions of the property - std::string groupName = "/particles/" + config_.particleGroupName + "/" + config_.velDataset; + std::string groupName = "/particles/" + config_.particleSubGroupName + "/" + config_.velDataset; auto group = H5Gcreate(fileId, groupName.c_str(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); std::vector data; @@ -437,7 +443,7 @@ void DumpH5MDParallelImpl::writeForce(hid_t fileId, const data::HostAtoms& atoms using Datatype = real_t; constexpr int64_t dimensions = 3; ///< dimensions of the property - std::string groupName = "/particles/" + config_.particleGroupName + "/" + config_.forceDataset; + std::string groupName = "/particles/" + config_.particleSubGroupName + "/" + config_.forceDataset; auto group = H5Gcreate(fileId, groupName.c_str(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); std::vector data; @@ -473,7 +479,7 @@ void DumpH5MDParallelImpl::writeType(hid_t fileId, const data::HostAtoms& atoms) using Datatype = idx_t; constexpr int64_t dimensions = 1; ///< dimensions of the property - std::string groupName = "/particles/" + config_.particleGroupName + "/" + config_.typeDataset; + std::string groupName = "/particles/" + config_.particleSubGroupName + "/" + config_.typeDataset; auto group = H5Gcreate(fileId, groupName.c_str(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); std::vector data; @@ -507,7 +513,7 @@ void DumpH5MDParallelImpl::writeMass(hid_t fileId, const data::HostAtoms& atoms) using Datatype = real_t; constexpr int64_t dimensions = 1; ///< dimensions of the property - std::string groupName = "/particles/" + config_.particleGroupName + "/" + config_.massDataset; + std::string groupName = "/particles/" + config_.particleSubGroupName + "/" + config_.massDataset; auto group = H5Gcreate(fileId, groupName.c_str(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); std::vector data; @@ -541,7 +547,7 @@ void DumpH5MDParallelImpl::writeCharge(hid_t fileId, const data::HostAtoms& atom using Datatype = real_t; constexpr int64_t dimensions = 1; ///< dimensions of the property - std::string groupName = "/particles/" + config_.particleGroupName + "/" + config_.chargeDataset; + std::string groupName = "/particles/" + config_.particleSubGroupName + "/" + config_.chargeDataset; auto group = H5Gcreate(fileId, groupName.c_str(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); std::vector data; @@ -576,7 +582,7 @@ void DumpH5MDParallelImpl::writeRelativeMass(hid_t fileId, const data::HostAtoms constexpr int64_t dimensions = 1; ///< dimensions of the property std::string groupName = - "/particles/" + config_.particleGroupName + "/" + config_.relativeMassDataset; + "/particles/" + config_.particleSubGroupName + "/" + config_.relativeMassDataset; auto group = H5Gcreate(fileId, groupName.c_str(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); std::vector data; @@ -636,9 +642,9 @@ void DumpH5MDParallelImpl::dump(const std::string& filename, auto group1 = CHECK_HDF5(H5Gcreate(file_id, "/particles", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)); - std::string particleGroup = "/particles/" + config_.particleGroupName; + std::string particleSubGroup = "/particles/" + config_.particleSubGroupName; auto group2 = CHECK_HDF5( - H5Gcreate(file_id, particleGroup.c_str(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)); + H5Gcreate(file_id, particleSubGroup.c_str(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)); writeHeader(file_id); writeBox(file_id, subdomain); @@ -655,30 +661,30 @@ void DumpH5MDParallelImpl::dump(const std::string& filename, CHECK_HDF5(H5Fclose(file_id)); } - } // namespace impl -std::vector DumpH5MDParallel::open(const std::string& filename) +io::Identifiers DumpH5MDParallel::open(const std::string& filename) { impl::DumpH5MDParallelImpl helper(*this); - auto dump_ids = helper.open(filename); - return dump_ids; + auto ids = helper.open(filename); + return ids; } -void DumpH5MDParallel::dumpStep(const hid_t& file_id, +void DumpH5MDParallel::dumpStep( + const io::Identifiers& ids, const data::Subdomain& subdomain, const data::Atoms& atoms, const idx_t step, const real_t dt) { impl::DumpH5MDParallelImpl helper(*this); - helper.dumpStep(file_id, subdomain, atoms, step, dt); + helper.dumpStep(ids, subdomain, atoms, step, dt); } -void DumpH5MDParallel::close(const hid_t& file_id, const hid_t& group1, const hid_t& group2) +void DumpH5MDParallel::close(const io::Identifiers& ids) { impl::DumpH5MDParallelImpl helper(*this); - helper.close(file_id, group1, group2); + helper.close(ids); } void DumpH5MDParallel::dump(const std::string& filename, diff --git a/mrmd/io/DumpH5MDParallel.hpp b/mrmd/io/DumpH5MDParallel.hpp index 013ecb97..4ef78ebb 100644 --- a/mrmd/io/DumpH5MDParallel.hpp +++ b/mrmd/io/DumpH5MDParallel.hpp @@ -23,24 +23,36 @@ namespace mrmd::io { +struct Identifiers +{ +public: + hid_t fileId; + hid_t particleGroupId; + hid_t particleSubGroupId; + hid_t boxGroupId; + hid_t edgesGroupId; + hid_t stepSetId; +}; + class DumpH5MDParallel { public: DumpH5MDParallel(const std::shared_ptr& mpiInfoArg, const std::string& authorArg, - const std::string& particleGroupNameArg = "atoms") - : mpiInfo(mpiInfoArg), author(authorArg), particleGroupName(particleGroupNameArg) + const std::string& particleSubGroupNameArg = "atoms") + : mpiInfo(mpiInfoArg), author(authorArg), particleSubGroupName(particleSubGroupNameArg) { } - std::vector open(const std::string& filename); + Identifiers open(const std::string& filename); - void dumpStep(const hid_t& file_id, + void dumpStep( + const Identifiers& ids, const data::Subdomain& subdomain, const data::Atoms& atoms, const idx_t step, const real_t dt); - void close(const hid_t& file_id, const hid_t& group1, const hid_t& group2); + void close(const Identifiers& ids); void dump(const std::string& filename, const data::Subdomain& subdomain, @@ -65,7 +77,6 @@ class DumpH5MDParallel std::shared_ptr mpiInfo; std::string author = "xxx"; - std::string particleGroupName = "atoms"; + std::string particleSubGroupName = "atoms"; }; - } // namespace mrmd::io \ No newline at end of file diff --git a/mrmd/io/H5MD.test.cpp b/mrmd/io/H5MD.test.cpp index 9eeabdcc..461c7ad1 100644 --- a/mrmd/io/H5MD.test.cpp +++ b/mrmd/io/H5MD.test.cpp @@ -127,11 +127,9 @@ TEST(H5MD, dumpMultipleSteps) auto dump = DumpH5MDParallel(mpiInfo, "XzzX"); - auto dump_ids = dump.open("dummyMultipleSteps.hdf5"); - - dump.dumpStep(dump_ids[0], subdomain1, atoms1, 0, 0_r); - - dump.close(dump_ids[0], dump_ids[1], dump_ids[2]); + auto ids = dump.open("dummyMultipleSteps.hdf5"); + dump.dumpStep(ids, subdomain1, atoms1, 0, 0_r); + dump.close(ids); } } // namespace io } // namespace mrmd \ No newline at end of file diff --git a/mrmd/io/RestoreH5MDParallel.hpp b/mrmd/io/RestoreH5MDParallel.hpp index 45766ef1..f1866931 100644 --- a/mrmd/io/RestoreH5MDParallel.hpp +++ b/mrmd/io/RestoreH5MDParallel.hpp @@ -30,8 +30,8 @@ class RestoreH5MDParallel { public: RestoreH5MDParallel(const std::shared_ptr& mpiInfo, - const std::string& particleGroupName = "atoms") - : mpiInfo_(mpiInfo), particleGroupName_(particleGroupName) + const std::string& particleSubGroupName = "atoms") + : mpiInfo_(mpiInfo), particleGroupName_(particleSubGroupName) { } From e29a11dd81630ca2d859c135006a25ca54b7c10b Mon Sep 17 00:00:00 2001 From: julianhille Date: Fri, 7 Mar 2025 17:19:13 +0100 Subject: [PATCH 05/29] first attemps to make step dataset extendable, Ref #038 --- mrmd/io/DumpH5MDParallel.cpp | 21 +++++++++++++-------- mrmd/io/H5MD.test.cpp | 7 ++++++- 2 files changed, 19 insertions(+), 9 deletions(-) diff --git a/mrmd/io/DumpH5MDParallel.cpp b/mrmd/io/DumpH5MDParallel.cpp index d38a0be4..4f7d2b33 100644 --- a/mrmd/io/DumpH5MDParallel.cpp +++ b/mrmd/io/DumpH5MDParallel.cpp @@ -182,15 +182,17 @@ void DumpH5MDParallelImpl::openBox(io::Identifiers& ids) const ids.stepSetId = createStepDataset(ids.edgesGroupId, stepDimsCreate, stepNumDims); } -hid_t DumpH5MDParallelImpl::createStepDataset(const hid_t& groupId, const hsize_t* dims, const hsize_t& ndims) const +hid_t DumpH5MDParallelImpl::createStepDataset(const hid_t& groupId, const hsize_t* /*dims*/, const hsize_t& /*ndims*/) const { - hsize_t max_dims[ndims] = {H5S_UNLIMITED}; + const hsize_t ndims = 1; + const hsize_t dims[ndims] = {1}; + const hsize_t max_dims[ndims] = {H5S_UNLIMITED}; hid_t file_space = H5Screate_simple(ndims, dims, max_dims); hid_t plist = H5Pcreate(H5P_DATASET_CREATE); H5Pset_layout(plist, H5D_CHUNKED); - hsize_t chunk_dims[ndims] = {2}; + hsize_t chunk_dims[ndims] = {1}; H5Pset_chunk(plist, ndims, chunk_dims); auto stepSetId = H5Dcreate(groupId, "step", H5T_NATIVE_INT64, file_space, H5P_DEFAULT, plist, H5P_DEFAULT); @@ -213,14 +215,17 @@ void DumpH5MDParallelImpl::writeStep(const hid_t& stepSetId, const idx_t& step) const hid_t mem_space = H5Screate_simple(stepNumDims, stepDimsAppend, NULL); - H5Dset_extent(stepSetId, stepDimsAppend); + const hsize_t newSize = step + 1; + H5Dset_extent(stepSetId, &newSize); const auto file_space = H5Dget_space(stepSetId); - const hsize_t start[1] = {0}; - const hsize_t count[1] = {1}; - H5Sselect_hyperslab(file_space, H5S_SELECT_SET, start, NULL, count, NULL); + + const hsize_t start = step; + const hsize_t count = step + 1; + H5Sselect_hyperslab(file_space, H5S_SELECT_SET, &start, NULL, &count, NULL); - H5Dwrite(stepSetId, H5T_NATIVE_INT64, mem_space, file_space, H5P_DEFAULT, &step); + const hsize_t writeStep = 1; + H5Dwrite(stepSetId, H5T_NATIVE_INT64, mem_space, file_space, H5P_DEFAULT, &writeStep); H5Sclose(file_space); H5Sclose(mem_space); diff --git a/mrmd/io/H5MD.test.cpp b/mrmd/io/H5MD.test.cpp index 461c7ad1..ff846804 100644 --- a/mrmd/io/H5MD.test.cpp +++ b/mrmd/io/H5MD.test.cpp @@ -128,7 +128,12 @@ TEST(H5MD, dumpMultipleSteps) auto dump = DumpH5MDParallel(mpiInfo, "XzzX"); auto ids = dump.open("dummyMultipleSteps.hdf5"); - dump.dumpStep(ids, subdomain1, atoms1, 0, 0_r); + + for (idx_t step = 0; step < 10; ++step) + { + dump.dumpStep(ids, subdomain1, atoms1, step, 0_r); + } + dump.close(ids); } } // namespace io From 6fb3507829e67176e6952aaf11a48712248265b4 Mon Sep 17 00:00:00 2001 From: julianhille Date: Fri, 7 Mar 2025 17:38:18 +0100 Subject: [PATCH 06/29] absorbed identifiers into DumpH5MDParallel, Ref #038 --- mrmd/io/DumpH5MDParallel.cpp | 62 ++++++++++++++++-------------------- mrmd/io/DumpH5MDParallel.hpp | 24 +++++++------- mrmd/io/H5MD.test.cpp | 8 ++--- 3 files changed, 43 insertions(+), 51 deletions(-) diff --git a/mrmd/io/DumpH5MDParallel.cpp b/mrmd/io/DumpH5MDParallel.cpp index 4f7d2b33..9c23d3e9 100644 --- a/mrmd/io/DumpH5MDParallel.cpp +++ b/mrmd/io/DumpH5MDParallel.cpp @@ -39,14 +39,13 @@ class DumpH5MDParallelImpl public: explicit DumpH5MDParallelImpl(DumpH5MDParallel& config) : config_(config) {} - io::Identifiers open(const std::string& filename) const; + void open(const std::string& filename); void dumpStep( - const io::Identifiers& ids, const data::Subdomain& subdomain, const data::Atoms& atoms, const idx_t step, const real_t dt); - void close(const io::Identifiers& ids) const; + void close() const; void dump(const std::string& filename, const data::Subdomain& subdomain, @@ -57,7 +56,7 @@ class DumpH5MDParallelImpl void closeFile(const hid_t& fileId) const; hid_t createGroup(const hid_t& parentElementId, const std::string& groupName) const; void closeGroup(const hid_t& groupId) const; - void openBox(io::Identifiers& ids) const; + void openBox() const; void writeStep(const hid_t& stepSetId, const idx_t& step) const; hid_t createStepDataset(const hid_t& groupId, const hsize_t* dims, const hsize_t& ndims) const; void closeDataset(const hid_t& datasetId) const; @@ -89,23 +88,21 @@ class DumpH5MDParallelImpl int64_t particleOffset = -1; }; -io::Identifiers DumpH5MDParallelImpl::open(const std::string& filename) const +void DumpH5MDParallelImpl::open(const std::string& filename) { - io::Identifiers ids; MPI_Info info = MPI_INFO_NULL; auto propertyList = CHECK_HDF5(H5Pcreate(H5P_FILE_ACCESS)); CHECK_HDF5(H5Pset_fapl_mpio(propertyList, config_.mpiInfo->comm, info)); - ids.fileId = createFile(filename, propertyList); + config_.fileId = createFile(filename, propertyList); CHECK_HDF5(H5Pclose(propertyList)); - ids.particleGroupId = createGroup(ids.fileId, "particles"); - ids.particleSubGroupId = createGroup(ids.particleGroupId, config_.particleSubGroupName); - writeHeader(ids.fileId); - openBox(ids); - return ids; + config_.particleGroupId = createGroup(config_.fileId, "particles"); + config_.particleSubGroupId = createGroup(config_.particleGroupId, config_.particleSubGroupName); + writeHeader(config_.fileId); + openBox(); } hid_t DumpH5MDParallelImpl::createFile(const std::string& filename, const hid_t& propertyList) const @@ -132,7 +129,6 @@ void DumpH5MDParallelImpl::closeGroup(const hid_t& groupId) const } void DumpH5MDParallelImpl::dumpStep( - const io::Identifiers& ids, const data::Subdomain& subdomain, const data::Atoms& atoms, const idx_t step, @@ -142,44 +138,44 @@ void DumpH5MDParallelImpl::dumpStep( updateCache(h_atoms); - writeStep(ids.stepSetId, step); + writeStep(config_.stepSetId, step); } -void DumpH5MDParallelImpl::close(const io::Identifiers& ids) const +void DumpH5MDParallelImpl::close() const { - closeDataset(ids.stepSetId); - closeGroup(ids.edgesGroupId); - closeGroup(ids.boxGroupId); - closeGroup(ids.particleSubGroupId); - closeGroup(ids.particleGroupId); - closeFile(ids.fileId); + closeDataset(config_.stepSetId); + closeGroup(config_.edgesGroupId); + closeGroup(config_.boxGroupId); + closeGroup(config_.particleSubGroupId); + closeGroup(config_.particleGroupId); + closeFile(config_.fileId); } -void DumpH5MDParallelImpl::openBox(io::Identifiers& ids) const +void DumpH5MDParallelImpl::openBox() const { - ids.boxGroupId = createGroup(ids.particleSubGroupId, "box"); + config_.boxGroupId = createGroup(config_.particleSubGroupId, "box"); std::vector dims = {3}; CHECK_HDF5( - H5LTset_attribute_int(ids.particleSubGroupId, "box", "dimension", dims.data(), dims.size())); + H5LTset_attribute_int(config_.particleSubGroupId, "box", "dimension", dims.data(), dims.size())); auto boundaryType = H5Tcopy(H5T_C_S1); CHECK_HDF5(H5Tset_size(boundaryType, 8)); CHECK_HDF5(H5Tset_strpad(boundaryType, H5T_STR_NULLPAD)); std::vector boundaryDims = {3}; auto space = H5Screate_simple(int_c(boundaryDims.size()), boundaryDims.data(), nullptr); - auto att = H5Acreate(ids.boxGroupId, "boundary", boundaryType, space, H5P_DEFAULT, H5P_DEFAULT); + auto att = H5Acreate(config_.boxGroupId, "boundary", boundaryType, space, H5P_DEFAULT, H5P_DEFAULT); CHECK_HDF5(H5Awrite(att, boundaryType, "periodicperiodicperiodic")); CHECK_HDF5(H5Aclose(att)); CHECK_HDF5(H5Sclose(space)); CHECK_HDF5(H5Tclose(boundaryType)); - ids.edgesGroupId = createGroup(ids.boxGroupId, "edges"); + config_.edgesGroupId = createGroup(config_.boxGroupId, "edges"); const hsize_t stepNumDims = 1; hsize_t stepDimsCreate[stepNumDims] = {0}; - ids.stepSetId = createStepDataset(ids.edgesGroupId, stepDimsCreate, stepNumDims); + config_.stepSetId = createStepDataset(config_.edgesGroupId, stepDimsCreate, stepNumDims); } hid_t DumpH5MDParallelImpl::createStepDataset(const hid_t& groupId, const hsize_t* /*dims*/, const hsize_t& /*ndims*/) const @@ -668,28 +664,26 @@ void DumpH5MDParallelImpl::dump(const std::string& filename, } } // namespace impl -io::Identifiers DumpH5MDParallel::open(const std::string& filename) +void DumpH5MDParallel::open(const std::string& filename) { impl::DumpH5MDParallelImpl helper(*this); - auto ids = helper.open(filename); - return ids; + helper.open(filename); } void DumpH5MDParallel::dumpStep( - const io::Identifiers& ids, const data::Subdomain& subdomain, const data::Atoms& atoms, const idx_t step, const real_t dt) { impl::DumpH5MDParallelImpl helper(*this); - helper.dumpStep(ids, subdomain, atoms, step, dt); + helper.dumpStep(subdomain, atoms, step, dt); } -void DumpH5MDParallel::close(const io::Identifiers& ids) +void DumpH5MDParallel::close() { impl::DumpH5MDParallelImpl helper(*this); - helper.close(ids); + helper.close(); } void DumpH5MDParallel::dump(const std::string& filename, diff --git a/mrmd/io/DumpH5MDParallel.hpp b/mrmd/io/DumpH5MDParallel.hpp index 4ef78ebb..63e7960b 100644 --- a/mrmd/io/DumpH5MDParallel.hpp +++ b/mrmd/io/DumpH5MDParallel.hpp @@ -23,16 +23,6 @@ namespace mrmd::io { -struct Identifiers -{ -public: - hid_t fileId; - hid_t particleGroupId; - hid_t particleSubGroupId; - hid_t boxGroupId; - hid_t edgesGroupId; - hid_t stepSetId; -}; class DumpH5MDParallel { @@ -43,16 +33,15 @@ class DumpH5MDParallel : mpiInfo(mpiInfoArg), author(authorArg), particleSubGroupName(particleSubGroupNameArg) { } - Identifiers open(const std::string& filename); + void open(const std::string& filename); void dumpStep( - const Identifiers& ids, const data::Subdomain& subdomain, const data::Atoms& atoms, const idx_t step, const real_t dt); - void close(const Identifiers& ids); + void close(); void dump(const std::string& filename, const data::Subdomain& subdomain, @@ -78,5 +67,14 @@ class DumpH5MDParallel std::string author = "xxx"; std::string particleSubGroupName = "atoms"; + + hid_t fileId; + hid_t particleGroupId; + hid_t particleSubGroupId; + hid_t boxGroupId; + hid_t edgesGroupId; + hid_t stepSetId; + + hsize_t saveStep = 0; }; } // namespace mrmd::io \ No newline at end of file diff --git a/mrmd/io/H5MD.test.cpp b/mrmd/io/H5MD.test.cpp index ff846804..97b1a481 100644 --- a/mrmd/io/H5MD.test.cpp +++ b/mrmd/io/H5MD.test.cpp @@ -127,14 +127,14 @@ TEST(H5MD, dumpMultipleSteps) auto dump = DumpH5MDParallel(mpiInfo, "XzzX"); - auto ids = dump.open("dummyMultipleSteps.hdf5"); + dump.open("dummyMultipleSteps.hdf5"); for (idx_t step = 0; step < 10; ++step) { - dump.dumpStep(ids, subdomain1, atoms1, step, 0_r); + dump.dumpStep(subdomain1, atoms1, step, 0_r); } - - dump.close(ids); + + dump.close(); } } // namespace io } // namespace mrmd \ No newline at end of file From 8878071ea5c9b30c30bf94e1e4b9d5a3a32c6a28 Mon Sep 17 00:00:00 2001 From: julianhille Date: Fri, 7 Mar 2025 17:58:20 +0100 Subject: [PATCH 07/29] enabled printing each step to file, Ref #038 --- mrmd/io/DumpH5MDParallel.cpp | 25 +++++++++++++------------ mrmd/io/DumpH5MDParallel.hpp | 2 +- 2 files changed, 14 insertions(+), 13 deletions(-) diff --git a/mrmd/io/DumpH5MDParallel.cpp b/mrmd/io/DumpH5MDParallel.cpp index 9c23d3e9..0ef3f271 100644 --- a/mrmd/io/DumpH5MDParallel.cpp +++ b/mrmd/io/DumpH5MDParallel.cpp @@ -57,7 +57,7 @@ class DumpH5MDParallelImpl hid_t createGroup(const hid_t& parentElementId, const std::string& groupName) const; void closeGroup(const hid_t& groupId) const; void openBox() const; - void writeStep(const hid_t& stepSetId, const idx_t& step) const; + void writeStep(const idx_t& step) const; hid_t createStepDataset(const hid_t& groupId, const hsize_t* dims, const hsize_t& ndims) const; void closeDataset(const hid_t& datasetId) const; @@ -138,7 +138,7 @@ void DumpH5MDParallelImpl::dumpStep( updateCache(h_atoms); - writeStep(config_.stepSetId, step); + writeStep(step); } void DumpH5MDParallelImpl::close() const @@ -204,25 +204,26 @@ void DumpH5MDParallelImpl::closeDataset(const hid_t& datasetId) const H5Dclose(datasetId); } -void DumpH5MDParallelImpl::writeStep(const hid_t& stepSetId, const idx_t& step) const +void DumpH5MDParallelImpl::writeStep(const idx_t& step) const { const hsize_t stepNumDims = 1; - const hsize_t stepDimsAppend[stepNumDims] = {1}; + const hsize_t stepDimsAppend = 1; - const hid_t mem_space = H5Screate_simple(stepNumDims, stepDimsAppend, NULL); + const hid_t mem_space = H5Screate_simple(stepNumDims, &stepDimsAppend, NULL); - const hsize_t newSize = step + 1; - H5Dset_extent(stepSetId, &newSize); + const hsize_t newSize = config_.saveCount + 1; + H5Dset_extent(config_.stepSetId, &newSize); - const auto file_space = H5Dget_space(stepSetId); + const auto file_space = H5Dget_space(config_.stepSetId); - const hsize_t start = step; - const hsize_t count = step + 1; + const hsize_t start = config_.saveCount; + const hsize_t count = 1; H5Sselect_hyperslab(file_space, H5S_SELECT_SET, &start, NULL, &count, NULL); - const hsize_t writeStep = 1; - H5Dwrite(stepSetId, H5T_NATIVE_INT64, mem_space, file_space, H5P_DEFAULT, &writeStep); + H5Dwrite(config_.stepSetId, H5T_NATIVE_INT64, mem_space, file_space, H5P_DEFAULT, &step); + config_.saveCount += 1; + H5Sclose(file_space); H5Sclose(mem_space); } diff --git a/mrmd/io/DumpH5MDParallel.hpp b/mrmd/io/DumpH5MDParallel.hpp index 63e7960b..b53adef2 100644 --- a/mrmd/io/DumpH5MDParallel.hpp +++ b/mrmd/io/DumpH5MDParallel.hpp @@ -75,6 +75,6 @@ class DumpH5MDParallel hid_t edgesGroupId; hid_t stepSetId; - hsize_t saveStep = 0; + hsize_t saveCount = 0; }; } // namespace mrmd::io \ No newline at end of file From dd0e12825b81f2f2641da6b4f0b9d0f665734e67 Mon Sep 17 00:00:00 2001 From: julianhille Date: Fri, 7 Mar 2025 18:31:21 +0100 Subject: [PATCH 08/29] added printing of simulation time, Ref #038 --- mrmd/io/DumpH5MDParallel.cpp | 89 ++++++++++++++++++++++++++---------- mrmd/io/DumpH5MDParallel.hpp | 1 + mrmd/io/H5MD.test.cpp | 3 +- 3 files changed, 68 insertions(+), 25 deletions(-) diff --git a/mrmd/io/DumpH5MDParallel.cpp b/mrmd/io/DumpH5MDParallel.cpp index 0ef3f271..9afe7802 100644 --- a/mrmd/io/DumpH5MDParallel.cpp +++ b/mrmd/io/DumpH5MDParallel.cpp @@ -57,10 +57,14 @@ class DumpH5MDParallelImpl hid_t createGroup(const hid_t& parentElementId, const std::string& groupName) const; void closeGroup(const hid_t& groupId) const; void openBox() const; - void writeStep(const idx_t& step) const; - hid_t createStepDataset(const hid_t& groupId, const hsize_t* dims, const hsize_t& ndims) const; + void createStepDataset() const; + void createTimeDataset() const; + void closeDataset(const hid_t& datasetId) const; + void writeStep(const idx_t& step) const; + void writeTime(const real_t& time) const; + void updateCache(const data::HostAtoms& atoms); void writeHeader(hid_t fileId) const; @@ -128,21 +132,9 @@ void DumpH5MDParallelImpl::closeGroup(const hid_t& groupId) const CHECK_HDF5(H5Gclose(groupId)); } -void DumpH5MDParallelImpl::dumpStep( - const data::Subdomain& subdomain, - const data::Atoms& atoms, - const idx_t step, - const real_t /*dt*/) -{ - data::HostAtoms h_atoms(atoms); // NOLINT - - updateCache(h_atoms); - - writeStep(step); -} - void DumpH5MDParallelImpl::close() const { + closeDataset(config_.timeSetId); closeDataset(config_.stepSetId); closeGroup(config_.edgesGroupId); closeGroup(config_.boxGroupId); @@ -172,13 +164,11 @@ void DumpH5MDParallelImpl::openBox() const config_.edgesGroupId = createGroup(config_.boxGroupId, "edges"); - const hsize_t stepNumDims = 1; - hsize_t stepDimsCreate[stepNumDims] = {0}; - - config_.stepSetId = createStepDataset(config_.edgesGroupId, stepDimsCreate, stepNumDims); + createStepDataset(); + createTimeDataset(); } -hid_t DumpH5MDParallelImpl::createStepDataset(const hid_t& groupId, const hsize_t* /*dims*/, const hsize_t& /*ndims*/) const +void DumpH5MDParallelImpl::createStepDataset() const { const hsize_t ndims = 1; const hsize_t dims[ndims] = {1}; @@ -191,12 +181,29 @@ hid_t DumpH5MDParallelImpl::createStepDataset(const hid_t& groupId, const hsize_ hsize_t chunk_dims[ndims] = {1}; H5Pset_chunk(plist, ndims, chunk_dims); - auto stepSetId = H5Dcreate(groupId, "step", H5T_NATIVE_INT64, file_space, H5P_DEFAULT, plist, H5P_DEFAULT); + config_.stepSetId = H5Dcreate(config_.edgesGroupId, "step", H5T_NATIVE_INT64, file_space, H5P_DEFAULT, plist, H5P_DEFAULT); H5Pclose(plist); H5Sclose(file_space); +} - return stepSetId; +void DumpH5MDParallelImpl::createTimeDataset() const +{ + const hsize_t ndims = 1; + const hsize_t dims[ndims] = {1}; + const hsize_t max_dims[ndims] = {H5S_UNLIMITED}; + hid_t file_space = H5Screate_simple(ndims, dims, max_dims); + + hid_t plist = H5Pcreate(H5P_DATASET_CREATE); + H5Pset_layout(plist, H5D_CHUNKED); + + hsize_t chunk_dims[ndims] = {1}; + H5Pset_chunk(plist, ndims, chunk_dims); + + config_.timeSetId = H5Dcreate(config_.edgesGroupId, "time", H5T_NATIVE_DOUBLE, file_space, H5P_DEFAULT, plist, H5P_DEFAULT); + + H5Pclose(plist); + H5Sclose(file_space); } void DumpH5MDParallelImpl::closeDataset(const hid_t& datasetId) const @@ -204,6 +211,21 @@ void DumpH5MDParallelImpl::closeDataset(const hid_t& datasetId) const H5Dclose(datasetId); } +void DumpH5MDParallelImpl::dumpStep( + const data::Subdomain& subdomain, + const data::Atoms& atoms, + const idx_t step, + const real_t dt) +{ + data::HostAtoms h_atoms(atoms); // NOLINT + + updateCache(h_atoms); + + writeStep(step); + writeTime(real_c(step) * dt); + config_.saveCount += 1; +} + void DumpH5MDParallelImpl::writeStep(const idx_t& step) const { const hsize_t stepNumDims = 1; @@ -222,12 +244,31 @@ void DumpH5MDParallelImpl::writeStep(const idx_t& step) const H5Dwrite(config_.stepSetId, H5T_NATIVE_INT64, mem_space, file_space, H5P_DEFAULT, &step); - config_.saveCount += 1; - H5Sclose(file_space); H5Sclose(mem_space); } +void DumpH5MDParallelImpl::writeTime(const real_t& time) const +{ + const hsize_t stepNumDims = 1; + const hsize_t stepDimsAppend = 1; + + const hid_t mem_space = H5Screate_simple(stepNumDims, &stepDimsAppend, NULL); + + const hsize_t newSize = config_.saveCount + 1; + H5Dset_extent(config_.timeSetId, &newSize); + + const auto file_space = H5Dget_space(config_.timeSetId); + + const hsize_t start = config_.saveCount; + const hsize_t count = 1; + H5Sselect_hyperslab(file_space, H5S_SELECT_SET, &start, NULL, &count, NULL); + + H5Dwrite(config_.timeSetId, H5T_NATIVE_DOUBLE, mem_space, file_space, H5P_DEFAULT, &time); + + H5Sclose(file_space); + H5Sclose(mem_space); +} diff --git a/mrmd/io/DumpH5MDParallel.hpp b/mrmd/io/DumpH5MDParallel.hpp index b53adef2..ad1138b6 100644 --- a/mrmd/io/DumpH5MDParallel.hpp +++ b/mrmd/io/DumpH5MDParallel.hpp @@ -74,6 +74,7 @@ class DumpH5MDParallel hid_t boxGroupId; hid_t edgesGroupId; hid_t stepSetId; + hid_t timeSetId; hsize_t saveCount = 0; }; diff --git a/mrmd/io/H5MD.test.cpp b/mrmd/io/H5MD.test.cpp index 97b1a481..ec8103da 100644 --- a/mrmd/io/H5MD.test.cpp +++ b/mrmd/io/H5MD.test.cpp @@ -124,6 +124,7 @@ TEST(H5MD, dumpMultipleSteps) auto subdomain1 = data::Subdomain({1_r, 2_r, 3_r}, {4_r, 6_r, 8_r}, 0.5_r); auto atoms1 = getAtoms(mpiInfo); + real_t dt = 0.002_r; auto dump = DumpH5MDParallel(mpiInfo, "XzzX"); @@ -131,7 +132,7 @@ TEST(H5MD, dumpMultipleSteps) for (idx_t step = 0; step < 10; ++step) { - dump.dumpStep(subdomain1, atoms1, step, 0_r); + dump.dumpStep(subdomain1, atoms1, step, dt); } dump.close(); From ac3bdc060b96de8fe8a30a6eb56b3215656b3a96 Mon Sep 17 00:00:00 2001 From: julianhille Date: Fri, 7 Mar 2025 19:03:42 +0100 Subject: [PATCH 09/29] generalized time and step creation, Ref #038 --- mrmd/io/DumpH5MDParallel.cpp | 42 +++++++++++------------------------- 1 file changed, 13 insertions(+), 29 deletions(-) diff --git a/mrmd/io/DumpH5MDParallel.cpp b/mrmd/io/DumpH5MDParallel.cpp index 9afe7802..bc5d44d2 100644 --- a/mrmd/io/DumpH5MDParallel.cpp +++ b/mrmd/io/DumpH5MDParallel.cpp @@ -57,6 +57,7 @@ class DumpH5MDParallelImpl hid_t createGroup(const hid_t& parentElementId, const std::string& groupName) const; void closeGroup(const hid_t& groupId) const; void openBox() const; + hid_t createChunkedDataset(const hid_t& groupId, const hsize_t dims[], const hsize_t& ndims, const std::string& name, const hid_t& dtype) const; void createStepDataset() const; void createTimeDataset() const; @@ -164,46 +165,29 @@ void DumpH5MDParallelImpl::openBox() const config_.edgesGroupId = createGroup(config_.boxGroupId, "edges"); - createStepDataset(); - createTimeDataset(); + std::vector stepDims = {1}; + config_.stepSetId = createChunkedDataset(config_.edgesGroupId, stepDims.data(), stepDims.size(), "step", H5T_NATIVE_INT64); + std::vector timeDims = {1}; + config_.timeSetId = createChunkedDataset(config_.edgesGroupId, timeDims.data(), timeDims.size(), "time", H5T_NATIVE_DOUBLE); } -void DumpH5MDParallelImpl::createStepDataset() const +hid_t DumpH5MDParallelImpl::createChunkedDataset(const hid_t& groupId, const hsize_t dims[], const hsize_t& ndims, const std::string& name, const hid_t& dtype) const { - const hsize_t ndims = 1; - const hsize_t dims[ndims] = {1}; - const hsize_t max_dims[ndims] = {H5S_UNLIMITED}; - hid_t file_space = H5Screate_simple(ndims, dims, max_dims); + const std::vector max_dims = {H5S_UNLIMITED}; + hid_t file_space = H5Screate_simple(ndims, dims, max_dims.data()); hid_t plist = H5Pcreate(H5P_DATASET_CREATE); H5Pset_layout(plist, H5D_CHUNKED); - hsize_t chunk_dims[ndims] = {1}; - H5Pset_chunk(plist, ndims, chunk_dims); - - config_.stepSetId = H5Dcreate(config_.edgesGroupId, "step", H5T_NATIVE_INT64, file_space, H5P_DEFAULT, plist, H5P_DEFAULT); - - H5Pclose(plist); - H5Sclose(file_space); -} - -void DumpH5MDParallelImpl::createTimeDataset() const -{ - const hsize_t ndims = 1; - const hsize_t dims[ndims] = {1}; - const hsize_t max_dims[ndims] = {H5S_UNLIMITED}; - hid_t file_space = H5Screate_simple(ndims, dims, max_dims); - - hid_t plist = H5Pcreate(H5P_DATASET_CREATE); - H5Pset_layout(plist, H5D_CHUNKED); + const std::vector chunk_dims = {1}; + H5Pset_chunk(plist, ndims, chunk_dims.data()); - hsize_t chunk_dims[ndims] = {1}; - H5Pset_chunk(plist, ndims, chunk_dims); - - config_.timeSetId = H5Dcreate(config_.edgesGroupId, "time", H5T_NATIVE_DOUBLE, file_space, H5P_DEFAULT, plist, H5P_DEFAULT); + auto datasetId = H5Dcreate(groupId, name.c_str(), dtype, file_space, H5P_DEFAULT, plist, H5P_DEFAULT); H5Pclose(plist); H5Sclose(file_space); + + return datasetId; } void DumpH5MDParallelImpl::closeDataset(const hid_t& datasetId) const From 1d107c04f6c998f7dd294ea26986e53cbb9ec7ca Mon Sep 17 00:00:00 2001 From: julianhille Date: Fri, 7 Mar 2025 20:28:14 +0100 Subject: [PATCH 10/29] generalized appending of small data and started parallel append, Ref #038 --- mrmd/io/DumpH5MDParallel.cpp | 102 +++++++++++++++++++++++------------ mrmd/io/DumpH5MDParallel.hpp | 1 + 2 files changed, 69 insertions(+), 34 deletions(-) diff --git a/mrmd/io/DumpH5MDParallel.cpp b/mrmd/io/DumpH5MDParallel.cpp index bc5d44d2..cee5dba0 100644 --- a/mrmd/io/DumpH5MDParallel.cpp +++ b/mrmd/io/DumpH5MDParallel.cpp @@ -58,13 +58,18 @@ class DumpH5MDParallelImpl void closeGroup(const hid_t& groupId) const; void openBox() const; hid_t createChunkedDataset(const hid_t& groupId, const hsize_t dims[], const hsize_t& ndims, const std::string& name, const hid_t& dtype) const; - void createStepDataset() const; - void createTimeDataset() const; - void closeDataset(const hid_t& datasetId) const; void writeStep(const idx_t& step) const; void writeTime(const real_t& time) const; + template + void appendData(const hid_t datasetId, + const std::vector& data) const; + template + void appendParallel(const hid_t datasetId, + const std::vector& globalDims, + const std::vector& localDims, + const std::vector& data); void updateCache(const data::HostAtoms& atoms); @@ -135,6 +140,7 @@ void DumpH5MDParallelImpl::closeGroup(const hid_t& groupId) const void DumpH5MDParallelImpl::close() const { + closeDataset(config_.boxValueSetId); closeDataset(config_.timeSetId); closeDataset(config_.stepSetId); closeGroup(config_.edgesGroupId); @@ -166,9 +172,12 @@ void DumpH5MDParallelImpl::openBox() const config_.edgesGroupId = createGroup(config_.boxGroupId, "edges"); std::vector stepDims = {1}; - config_.stepSetId = createChunkedDataset(config_.edgesGroupId, stepDims.data(), stepDims.size(), "step", H5T_NATIVE_INT64); std::vector timeDims = {1}; + std::vector boxValueDims = {3}; + + config_.stepSetId = createChunkedDataset(config_.edgesGroupId, stepDims.data(), stepDims.size(), "step", H5T_NATIVE_INT64); config_.timeSetId = createChunkedDataset(config_.edgesGroupId, timeDims.data(), timeDims.size(), "time", H5T_NATIVE_DOUBLE); + config_.boxValueSetId = createChunkedDataset(config_.edgesGroupId, boxValueDims.data(), boxValueDims.size(), "value", H5T_NATIVE_DOUBLE); } hid_t DumpH5MDParallelImpl::createChunkedDataset(const hid_t& groupId, const hsize_t dims[], const hsize_t& ndims, const std::string& name, const hid_t& dtype) const @@ -205,56 +214,81 @@ void DumpH5MDParallelImpl::dumpStep( updateCache(h_atoms); - writeStep(step); - writeTime(real_c(step) * dt); + appendData(config_.stepSetId, std::vector{step}); + appendData(config_.timeSetId, std::vector{real_c(step) * dt}); + appendData(config_.boxValueSetId, std::vector{subdomain.diameter[0], subdomain.diameter[1], subdomain.diameter[2]}); config_.saveCount += 1; } -void DumpH5MDParallelImpl::writeStep(const idx_t& step) const +template +void DumpH5MDParallelImpl::appendData(const hid_t datasetId, const std::vector& data) const { - const hsize_t stepNumDims = 1; - const hsize_t stepDimsAppend = 1; + const hsize_t rank = 2; + const std::vector dims = {1, data.size()}; - const hid_t mem_space = H5Screate_simple(stepNumDims, &stepDimsAppend, NULL); + const hid_t mem_space = H5Screate_simple(rank, dims.data(), NULL); - const hsize_t newSize = config_.saveCount + 1; - H5Dset_extent(config_.stepSetId, &newSize); + const std::vector newSize = {config_.saveCount + 1, data.size()}; + H5Dset_extent(datasetId, newSize.data()); - const auto file_space = H5Dget_space(config_.stepSetId); + const auto file_space = H5Dget_space(datasetId); - const hsize_t start = config_.saveCount; - const hsize_t count = 1; - H5Sselect_hyperslab(file_space, H5S_SELECT_SET, &start, NULL, &count, NULL); + const std::vector start = {config_.saveCount, data.size()}; + const std::vector count = {1, data.size()}; + H5Sselect_hyperslab(file_space, H5S_SELECT_SET, start.data(), NULL, count.data(), NULL); - H5Dwrite(config_.stepSetId, H5T_NATIVE_INT64, mem_space, file_space, H5P_DEFAULT, &step); + H5Dwrite(datasetId, typeToHDF5(), mem_space, file_space, H5P_DEFAULT, data.data()); H5Sclose(file_space); H5Sclose(mem_space); } -void DumpH5MDParallelImpl::writeTime(const real_t& time) const +template +void DumpH5MDParallelImpl::appendParallel(const hid_t datasetId, + const std::vector& globalDims, + const std::vector& localDims, + const std::vector& data) { - const hsize_t stepNumDims = 1; - const hsize_t stepDimsAppend = 1; - - const hid_t mem_space = H5Screate_simple(stepNumDims, &stepDimsAppend, NULL); + MRMD_HOST_CHECK_EQUAL(globalDims.size(), localDims.size()); + MRMD_HOST_CHECK_EQUAL( + data.size(), + std::accumulate(localDims.begin(), localDims.end(), hsize_t(1), std::multiplies<>())); - const hsize_t newSize = config_.saveCount + 1; - H5Dset_extent(config_.timeSetId, &newSize); + auto dataSpace = + CHECK_HDF5(H5Screate_simple(int_c(globalDims.size()), globalDims.data(), NULL)); - const auto file_space = H5Dget_space(config_.timeSetId); - - const hsize_t start = config_.saveCount; - const hsize_t count = 1; - H5Sselect_hyperslab(file_space, H5S_SELECT_SET, &start, NULL, &count, NULL); + std::vector offset(globalDims.size(), 0); + offset[1] = particleOffset; + std::vector stride(globalDims.size(), 1); + std::vector count(globalDims.size(), 1); + for (auto i = 0; i < int_c(globalDims.size()); ++i) + { + MRMD_HOST_CHECK_LESSEQUAL( + localDims[i] + offset[i], globalDims[i], fmt::format("i = {}", i)); + } + auto dstSpace = CHECK_HDF5(H5Dget_space(datasetId)); + CHECK_HDF5(H5Sselect_hyperslab( + dstSpace, H5S_SELECT_SET, offset.data(), stride.data(), count.data(), localDims.data())); - H5Dwrite(config_.timeSetId, H5T_NATIVE_DOUBLE, mem_space, file_space, H5P_DEFAULT, &time); - - H5Sclose(file_space); - H5Sclose(mem_space); -} + std::vector localOffset(globalDims.size(), 0); + auto srcSpace = + CHECK_HDF5(H5Screate_simple(int_c(localDims.size()), localDims.data(), NULL)); + CHECK_HDF5(H5Sselect_hyperslab(srcSpace, + H5S_SELECT_SET, + localOffset.data(), + stride.data(), + count.data(), + localDims.data())); + auto dataPropertyList = CHECK_HDF5(H5Pcreate(H5P_DATASET_XFER)); + CHECK_HDF5(H5Pset_dxpl_mpio(dataPropertyList, H5FD_MPIO_COLLECTIVE)); + CHECK_HDF5(H5Dwrite(datasetId, typeToHDF5(), srcSpace, dstSpace, dataPropertyList, data.data())); + CHECK_HDF5(H5Pclose(dataPropertyList)); + CHECK_HDF5(H5Sclose(dstSpace)); + CHECK_HDF5(H5Sclose(srcSpace)); + CHECK_HDF5(H5Sclose(dataSpace)); +} diff --git a/mrmd/io/DumpH5MDParallel.hpp b/mrmd/io/DumpH5MDParallel.hpp index ad1138b6..b480a7c8 100644 --- a/mrmd/io/DumpH5MDParallel.hpp +++ b/mrmd/io/DumpH5MDParallel.hpp @@ -75,6 +75,7 @@ class DumpH5MDParallel hid_t edgesGroupId; hid_t stepSetId; hid_t timeSetId; + hid_t boxValueSetId; hsize_t saveCount = 0; }; From 8fe281c8edc29402cfaab433477dc4bf78e24ca1 Mon Sep 17 00:00:00 2001 From: julianhille Date: Mon, 10 Mar 2025 10:44:41 +0100 Subject: [PATCH 11/29] enabled appending box edge vectors to H5MD file, Ref #038 --- mrmd/io/DumpH5MDParallel.cpp | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/mrmd/io/DumpH5MDParallel.cpp b/mrmd/io/DumpH5MDParallel.cpp index cee5dba0..1a448607 100644 --- a/mrmd/io/DumpH5MDParallel.cpp +++ b/mrmd/io/DumpH5MDParallel.cpp @@ -57,7 +57,7 @@ class DumpH5MDParallelImpl hid_t createGroup(const hid_t& parentElementId, const std::string& groupName) const; void closeGroup(const hid_t& groupId) const; void openBox() const; - hid_t createChunkedDataset(const hid_t& groupId, const hsize_t dims[], const hsize_t& ndims, const std::string& name, const hid_t& dtype) const; + hid_t createChunkedDataset(const hid_t& groupId, const std::vector& dims, const std::string& name, const hid_t& dtype) const; void closeDataset(const hid_t& datasetId) const; void writeStep(const idx_t& step) const; @@ -173,23 +173,23 @@ void DumpH5MDParallelImpl::openBox() const std::vector stepDims = {1}; std::vector timeDims = {1}; - std::vector boxValueDims = {3}; + std::vector boxValueDims = {1, 3}; - config_.stepSetId = createChunkedDataset(config_.edgesGroupId, stepDims.data(), stepDims.size(), "step", H5T_NATIVE_INT64); - config_.timeSetId = createChunkedDataset(config_.edgesGroupId, timeDims.data(), timeDims.size(), "time", H5T_NATIVE_DOUBLE); - config_.boxValueSetId = createChunkedDataset(config_.edgesGroupId, boxValueDims.data(), boxValueDims.size(), "value", H5T_NATIVE_DOUBLE); + config_.stepSetId = createChunkedDataset(config_.edgesGroupId, stepDims, "step", H5T_NATIVE_INT64); + config_.timeSetId = createChunkedDataset(config_.edgesGroupId, timeDims, "time", H5T_NATIVE_DOUBLE); + config_.boxValueSetId = createChunkedDataset(config_.edgesGroupId, boxValueDims, "value", H5T_NATIVE_DOUBLE); } -hid_t DumpH5MDParallelImpl::createChunkedDataset(const hid_t& groupId, const hsize_t dims[], const hsize_t& ndims, const std::string& name, const hid_t& dtype) const +hid_t DumpH5MDParallelImpl::createChunkedDataset(const hid_t& groupId, const std::vector& dims, const std::string& name, const hid_t& dtype) const { - const std::vector max_dims = {H5S_UNLIMITED}; - hid_t file_space = H5Screate_simple(ndims, dims, max_dims.data()); + const std::vector max_dims = {H5S_UNLIMITED, dims[1]}; + hid_t file_space = H5Screate_simple(dims.size(), dims.data(), max_dims.data()); hid_t plist = H5Pcreate(H5P_DATASET_CREATE); H5Pset_layout(plist, H5D_CHUNKED); - const std::vector chunk_dims = {1}; - H5Pset_chunk(plist, ndims, chunk_dims.data()); + const std::vector chunk_dims = dims; + H5Pset_chunk(plist, dims.size(), chunk_dims.data()); auto datasetId = H5Dcreate(groupId, name.c_str(), dtype, file_space, H5P_DEFAULT, plist, H5P_DEFAULT); @@ -233,7 +233,7 @@ void DumpH5MDParallelImpl::appendData(const hid_t datasetId, const std::vector start = {config_.saveCount, data.size()}; + const std::vector start = {config_.saveCount, 0}; const std::vector count = {1, data.size()}; H5Sselect_hyperslab(file_space, H5S_SELECT_SET, start.data(), NULL, count.data(), NULL); From 8ff2037e24e4960e1aeb718497ee33d2c3b41735 Mon Sep 17 00:00:00 2001 From: julianhille Date: Mon, 10 Mar 2025 15:13:01 +0100 Subject: [PATCH 12/29] added charges and positions creation, appending and closing, Ref #038 --- mrmd/io/DumpH5MDParallel.cpp | 186 +++++++++++++++++------------------ mrmd/io/DumpH5MDParallel.hpp | 16 ++- mrmd/io/H5MD.test.cpp | 2 +- 3 files changed, 106 insertions(+), 98 deletions(-) diff --git a/mrmd/io/DumpH5MDParallel.cpp b/mrmd/io/DumpH5MDParallel.cpp index 1a448607..9ca0c80e 100644 --- a/mrmd/io/DumpH5MDParallel.cpp +++ b/mrmd/io/DumpH5MDParallel.cpp @@ -39,7 +39,7 @@ class DumpH5MDParallelImpl public: explicit DumpH5MDParallelImpl(DumpH5MDParallel& config) : config_(config) {} - void open(const std::string& filename); + void open(const std::string& filename, const data::Atoms& atoms); void dumpStep( const data::Subdomain& subdomain, const data::Atoms& atoms, @@ -63,13 +63,10 @@ class DumpH5MDParallelImpl void writeStep(const idx_t& step) const; void writeTime(const real_t& time) const; template - void appendData(const hid_t datasetId, - const std::vector& data) const; - template - void appendParallel(const hid_t datasetId, - const std::vector& globalDims, - const std::vector& localDims, - const std::vector& data); + void appendData(const hid_t datasetId, const std::vector& data, const std::vector& dims) const; + void appendEdges(const idx_t& step, const real_t& dt, const data::Subdomain& subdomain) const; + void appendCharges(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const; + void appendPositions(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const; void updateCache(const data::HostAtoms& atoms); @@ -98,7 +95,7 @@ class DumpH5MDParallelImpl int64_t particleOffset = -1; }; -void DumpH5MDParallelImpl::open(const std::string& filename) +void DumpH5MDParallelImpl::open(const std::string& filename, const data::Atoms& atoms) { MPI_Info info = MPI_INFO_NULL; @@ -113,6 +110,16 @@ void DumpH5MDParallelImpl::open(const std::string& filename) config_.particleSubGroupId = createGroup(config_.particleGroupId, config_.particleSubGroupName); writeHeader(config_.fileId); openBox(); + + config_.chargesGroupId = createGroup(config_.particleSubGroupId, "charges"); + config_.chargesStepSetId = createChunkedDataset(config_.chargesGroupId, std::vector {1}, "step", H5T_NATIVE_INT64); + config_.chargesTimeSetId = createChunkedDataset(config_.chargesGroupId, std::vector {1}, "time", H5T_NATIVE_DOUBLE); + config_.chargesValueSetId = createChunkedDataset(config_.chargesGroupId, std::vector {1, atoms.size(), 1}, "value", H5T_NATIVE_DOUBLE); + + config_.posGroupId = createGroup(config_.particleSubGroupId, "position"); + config_.posStepSetId = createChunkedDataset(config_.posGroupId, std::vector {1}, "step", H5T_NATIVE_INT64); + config_.posTimeSetId = createChunkedDataset(config_.posGroupId, std::vector {1}, "time", H5T_NATIVE_DOUBLE); + config_.posValueSetId = createChunkedDataset(config_.posGroupId, std::vector {1, atoms.size(), 3}, "value", H5T_NATIVE_DOUBLE); } hid_t DumpH5MDParallelImpl::createFile(const std::string& filename, const hid_t& propertyList) const @@ -140,9 +147,17 @@ void DumpH5MDParallelImpl::closeGroup(const hid_t& groupId) const void DumpH5MDParallelImpl::close() const { - closeDataset(config_.boxValueSetId); - closeDataset(config_.timeSetId); - closeDataset(config_.stepSetId); + closeDataset(config_.posValueSetId); + closeDataset(config_.posTimeSetId); + closeDataset(config_.posStepSetId); + closeGroup(config_.posGroupId); + closeDataset(config_.chargesValueSetId); + closeDataset(config_.chargesTimeSetId); + closeDataset(config_.chargesStepSetId); + closeGroup(config_.chargesGroupId); + closeDataset(config_.edgesValueSetId); + closeDataset(config_.edgesTimeSetId); + closeDataset(config_.edgesStepSetId); closeGroup(config_.edgesGroupId); closeGroup(config_.boxGroupId); closeGroup(config_.particleSubGroupId); @@ -151,7 +166,7 @@ void DumpH5MDParallelImpl::close() const } void DumpH5MDParallelImpl::openBox() const -{ +{ config_.boxGroupId = createGroup(config_.particleSubGroupId, "box"); std::vector dims = {3}; @@ -168,22 +183,18 @@ void DumpH5MDParallelImpl::openBox() const CHECK_HDF5(H5Aclose(att)); CHECK_HDF5(H5Sclose(space)); CHECK_HDF5(H5Tclose(boundaryType)); - - config_.edgesGroupId = createGroup(config_.boxGroupId, "edges"); - std::vector stepDims = {1}; - std::vector timeDims = {1}; - std::vector boxValueDims = {1, 3}; - - config_.stepSetId = createChunkedDataset(config_.edgesGroupId, stepDims, "step", H5T_NATIVE_INT64); - config_.timeSetId = createChunkedDataset(config_.edgesGroupId, timeDims, "time", H5T_NATIVE_DOUBLE); - config_.boxValueSetId = createChunkedDataset(config_.edgesGroupId, boxValueDims, "value", H5T_NATIVE_DOUBLE); + config_.edgesGroupId = createGroup(config_.boxGroupId, "edges"); + config_.edgesStepSetId = createChunkedDataset(config_.edgesGroupId, std::vector {1}, "step", H5T_NATIVE_INT64); + config_.edgesTimeSetId = createChunkedDataset(config_.edgesGroupId, std::vector {1}, "time", H5T_NATIVE_DOUBLE); + config_.edgesValueSetId = createChunkedDataset(config_.edgesGroupId, std::vector {1, 3}, "value", H5T_NATIVE_DOUBLE); } hid_t DumpH5MDParallelImpl::createChunkedDataset(const hid_t& groupId, const std::vector& dims, const std::string& name, const hid_t& dtype) const { - const std::vector max_dims = {H5S_UNLIMITED, dims[1]}; - hid_t file_space = H5Screate_simple(dims.size(), dims.data(), max_dims.data()); + std::vector max_dims = dims; + max_dims[0] = H5S_UNLIMITED; + hid_t fileSpace = H5Screate_simple(dims.size(), dims.data(), max_dims.data()); hid_t plist = H5Pcreate(H5P_DATASET_CREATE); H5Pset_layout(plist, H5D_CHUNKED); @@ -191,10 +202,10 @@ hid_t DumpH5MDParallelImpl::createChunkedDataset(const hid_t& groupId, const std const std::vector chunk_dims = dims; H5Pset_chunk(plist, dims.size(), chunk_dims.data()); - auto datasetId = H5Dcreate(groupId, name.c_str(), dtype, file_space, H5P_DEFAULT, plist, H5P_DEFAULT); + auto datasetId = H5Dcreate(groupId, name.c_str(), dtype, fileSpace, H5P_DEFAULT, plist, H5P_DEFAULT); H5Pclose(plist); - H5Sclose(file_space); + H5Sclose(fileSpace); return datasetId; } @@ -214,86 +225,75 @@ void DumpH5MDParallelImpl::dumpStep( updateCache(h_atoms); - appendData(config_.stepSetId, std::vector{step}); - appendData(config_.timeSetId, std::vector{real_c(step) * dt}); - appendData(config_.boxValueSetId, std::vector{subdomain.diameter[0], subdomain.diameter[1], subdomain.diameter[2]}); + appendEdges(step, dt, subdomain); + appendCharges(step, dt, h_atoms); + appendPositions(step, dt, h_atoms); config_.saveCount += 1; } -template -void DumpH5MDParallelImpl::appendData(const hid_t datasetId, const std::vector& data) const +void DumpH5MDParallelImpl::appendEdges(const idx_t& step, const real_t& dt, const data::Subdomain& subdomain) const { - const hsize_t rank = 2; - const std::vector dims = {1, data.size()}; - - const hid_t mem_space = H5Screate_simple(rank, dims.data(), NULL); - - const std::vector newSize = {config_.saveCount + 1, data.size()}; - H5Dset_extent(datasetId, newSize.data()); - - const auto file_space = H5Dget_space(datasetId); - - const std::vector start = {config_.saveCount, 0}; - const std::vector count = {1, data.size()}; - H5Sselect_hyperslab(file_space, H5S_SELECT_SET, start.data(), NULL, count.data(), NULL); - - H5Dwrite(datasetId, typeToHDF5(), mem_space, file_space, H5P_DEFAULT, data.data()); - - H5Sclose(file_space); - H5Sclose(mem_space); + appendData(config_.edgesStepSetId, std::vector{step}, std::vector{1}); + appendData(config_.edgesTimeSetId, std::vector{real_c(step) * dt}, std::vector{1}); + appendData(config_.edgesValueSetId, std::vector{subdomain.diameter[0], subdomain.diameter[1], subdomain.diameter[2]}, std::vector{1, 3}); } -template -void DumpH5MDParallelImpl::appendParallel(const hid_t datasetId, - const std::vector& globalDims, - const std::vector& localDims, - const std::vector& data) +void DumpH5MDParallelImpl::appendCharges(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const { - MRMD_HOST_CHECK_EQUAL(globalDims.size(), localDims.size()); - MRMD_HOST_CHECK_EQUAL( - data.size(), - std::accumulate(localDims.begin(), localDims.end(), hsize_t(1), std::multiplies<>())); - - auto dataSpace = - CHECK_HDF5(H5Screate_simple(int_c(globalDims.size()), globalDims.data(), NULL)); - - std::vector offset(globalDims.size(), 0); - offset[1] = particleOffset; - std::vector stride(globalDims.size(), 1); - std::vector count(globalDims.size(), 1); - for (auto i = 0; i < int_c(globalDims.size()); ++i) + appendData(config_.chargesStepSetId, std::vector{step}, std::vector{1}); + appendData(config_.chargesTimeSetId, std::vector{real_c(step) * dt}, std::vector{1}); + hsize_t numberLocalAtoms = atoms.numLocalAtoms; + constexpr int64_t dimensions = 1; + std::vector charges; + charges.reserve(numLocalParticles * dimensions); + for (idx_t idx = 0; idx < numLocalParticles; ++idx) { - MRMD_HOST_CHECK_LESSEQUAL( - localDims[i] + offset[i], globalDims[i], fmt::format("i = {}", i)); + charges.emplace_back(atoms.getCharge()(idx)); } - auto dstSpace = CHECK_HDF5(H5Dget_space(datasetId)); - CHECK_HDF5(H5Sselect_hyperslab( - dstSpace, H5S_SELECT_SET, offset.data(), stride.data(), count.data(), localDims.data())); - - std::vector localOffset(globalDims.size(), 0); - auto srcSpace = - CHECK_HDF5(H5Screate_simple(int_c(localDims.size()), localDims.data(), NULL)); - CHECK_HDF5(H5Sselect_hyperslab(srcSpace, - H5S_SELECT_SET, - localOffset.data(), - stride.data(), - count.data(), - localDims.data())); - - auto dataPropertyList = CHECK_HDF5(H5Pcreate(H5P_DATASET_XFER)); - CHECK_HDF5(H5Pset_dxpl_mpio(dataPropertyList, H5FD_MPIO_COLLECTIVE)); - CHECK_HDF5(H5Dwrite(datasetId, typeToHDF5(), srcSpace, dstSpace, dataPropertyList, data.data())); - - CHECK_HDF5(H5Pclose(dataPropertyList)); - CHECK_HDF5(H5Sclose(dstSpace)); - CHECK_HDF5(H5Sclose(srcSpace)); - CHECK_HDF5(H5Sclose(dataSpace)); + MRMD_HOST_CHECK_EQUAL(int64_c(charges.size()), numLocalParticles * dimensions); + appendData(config_.chargesValueSetId, charges, std::vector{1, numberLocalAtoms, dimensions}); } +void DumpH5MDParallelImpl::appendPositions(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const +{ + appendData(config_.posStepSetId, std::vector{step}, std::vector{1}); + appendData(config_.posTimeSetId, std::vector{real_c(step) * dt}, std::vector{1}); + hsize_t numberLocalAtoms = atoms.numLocalAtoms; + constexpr int64_t dimensions = 3; + std::vector positions; + positions.reserve(numLocalParticles * dimensions); + for (idx_t idx = 0; idx < atoms.numLocalAtoms; ++idx) + { + positions.emplace_back(atoms.getPos()(idx, 0)); + positions.emplace_back(atoms.getPos()(idx, 1)); + positions.emplace_back(atoms.getPos()(idx, 2)); + } + MRMD_HOST_CHECK_EQUAL(int64_c(positions.size()), numLocalParticles * dimensions); + appendData(config_.posValueSetId, positions, std::vector{1, numberLocalAtoms, dimensions}); +} +template +void DumpH5MDParallelImpl::appendData(const hid_t datasetId, const std::vector& data, const std::vector& dims) const +{ + const hid_t memorySpace = H5Screate_simple(dims.size(), dims.data(), NULL); + std::vector newSize = dims; + newSize[0] = config_.saveCount + 1; + H5Dset_extent(datasetId, newSize.data()); + const auto fileSpace = H5Dget_space(datasetId); + + std::vector start(dims.size(), 0); + start[0] = config_.saveCount; + std::vector count = dims; + count[0] = 1; + H5Sselect_hyperslab(fileSpace, H5S_SELECT_SET, start.data(), NULL, count.data(), NULL); + H5Dwrite(datasetId, typeToHDF5(), memorySpace, fileSpace, H5P_DEFAULT, data.data()); + + H5Sclose(fileSpace); + H5Sclose(memorySpace); +} template void DumpH5MDParallelImpl::writeParallel(hid_t fileId, @@ -724,10 +724,10 @@ void DumpH5MDParallelImpl::dump(const std::string& filename, } } // namespace impl -void DumpH5MDParallel::open(const std::string& filename) +void DumpH5MDParallel::open(const std::string& filename, const data::Atoms& atoms) { impl::DumpH5MDParallelImpl helper(*this); - helper.open(filename); + helper.open(filename, atoms); } void DumpH5MDParallel::dumpStep( diff --git a/mrmd/io/DumpH5MDParallel.hpp b/mrmd/io/DumpH5MDParallel.hpp index b480a7c8..c25f90ea 100644 --- a/mrmd/io/DumpH5MDParallel.hpp +++ b/mrmd/io/DumpH5MDParallel.hpp @@ -33,7 +33,7 @@ class DumpH5MDParallel : mpiInfo(mpiInfoArg), author(authorArg), particleSubGroupName(particleSubGroupNameArg) { } - void open(const std::string& filename); + void open(const std::string& filename, const data::Atoms& atoms); void dumpStep( const data::Subdomain& subdomain, @@ -73,9 +73,17 @@ class DumpH5MDParallel hid_t particleSubGroupId; hid_t boxGroupId; hid_t edgesGroupId; - hid_t stepSetId; - hid_t timeSetId; - hid_t boxValueSetId; + hid_t edgesStepSetId; + hid_t edgesTimeSetId; + hid_t edgesValueSetId; + hid_t chargesGroupId; + hid_t chargesStepSetId; + hid_t chargesTimeSetId; + hid_t chargesValueSetId; + hid_t posGroupId; + hid_t posStepSetId; + hid_t posTimeSetId; + hid_t posValueSetId; hsize_t saveCount = 0; }; diff --git a/mrmd/io/H5MD.test.cpp b/mrmd/io/H5MD.test.cpp index ec8103da..b66656c7 100644 --- a/mrmd/io/H5MD.test.cpp +++ b/mrmd/io/H5MD.test.cpp @@ -128,7 +128,7 @@ TEST(H5MD, dumpMultipleSteps) auto dump = DumpH5MDParallel(mpiInfo, "XzzX"); - dump.open("dummyMultipleSteps.hdf5"); + dump.open("dummyMultipleSteps.hdf5", atoms1); for (idx_t step = 0; step < 10; ++step) { From 70438905ffc84a6f416cbe903eae865fc5356626 Mon Sep 17 00:00:00 2001 From: julianhille Date: Mon, 10 Mar 2025 16:28:30 +0100 Subject: [PATCH 13/29] primitive way to dump all data into H5MD file stepwise, Ref #038 --- mrmd/io/DumpH5MDParallel.cpp | 143 ++++++++++++++++++++++++++++++++++- mrmd/io/DumpH5MDParallel.hpp | 20 +++++ 2 files changed, 160 insertions(+), 3 deletions(-) diff --git a/mrmd/io/DumpH5MDParallel.cpp b/mrmd/io/DumpH5MDParallel.cpp index 9ca0c80e..39dd5fd3 100644 --- a/mrmd/io/DumpH5MDParallel.cpp +++ b/mrmd/io/DumpH5MDParallel.cpp @@ -60,13 +60,16 @@ class DumpH5MDParallelImpl hid_t createChunkedDataset(const hid_t& groupId, const std::vector& dims, const std::string& name, const hid_t& dtype) const; void closeDataset(const hid_t& datasetId) const; - void writeStep(const idx_t& step) const; - void writeTime(const real_t& time) const; template void appendData(const hid_t datasetId, const std::vector& data, const std::vector& dims) const; void appendEdges(const idx_t& step, const real_t& dt, const data::Subdomain& subdomain) const; void appendCharges(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const; + void appendForces(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const; + void appendMasses(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const; void appendPositions(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const; + void appendRelativeMasses(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const; + void appendTypes(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const; + void appendVelocities(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const; void updateCache(const data::HostAtoms& atoms); @@ -111,15 +114,40 @@ void DumpH5MDParallelImpl::open(const std::string& filename, const data::Atoms& writeHeader(config_.fileId); openBox(); - config_.chargesGroupId = createGroup(config_.particleSubGroupId, "charges"); + config_.chargesGroupId = createGroup(config_.particleSubGroupId, "charge"); config_.chargesStepSetId = createChunkedDataset(config_.chargesGroupId, std::vector {1}, "step", H5T_NATIVE_INT64); config_.chargesTimeSetId = createChunkedDataset(config_.chargesGroupId, std::vector {1}, "time", H5T_NATIVE_DOUBLE); config_.chargesValueSetId = createChunkedDataset(config_.chargesGroupId, std::vector {1, atoms.size(), 1}, "value", H5T_NATIVE_DOUBLE); + config_.forceGroupId = createGroup(config_.particleSubGroupId, "force"); + config_.forceStepSetId = createChunkedDataset(config_.forceGroupId, std::vector {1}, "step", H5T_NATIVE_INT64); + config_.forceTimeSetId = createChunkedDataset(config_.forceGroupId, std::vector {1}, "time", H5T_NATIVE_DOUBLE); + config_.forceValueSetId = createChunkedDataset(config_.forceGroupId, std::vector {1, atoms.size(), 3}, "value", H5T_NATIVE_DOUBLE); + + config_.massGroupId = createGroup(config_.particleSubGroupId, "mass"); + config_.massStepSetId = createChunkedDataset(config_.massGroupId, std::vector {1}, "step", H5T_NATIVE_INT64); + config_.massTimeSetId = createChunkedDataset(config_.massGroupId, std::vector {1}, "time", H5T_NATIVE_DOUBLE); + config_.massValueSetId = createChunkedDataset(config_.massGroupId, std::vector {1, atoms.size(), 1}, "value", H5T_NATIVE_DOUBLE); + config_.posGroupId = createGroup(config_.particleSubGroupId, "position"); config_.posStepSetId = createChunkedDataset(config_.posGroupId, std::vector {1}, "step", H5T_NATIVE_INT64); config_.posTimeSetId = createChunkedDataset(config_.posGroupId, std::vector {1}, "time", H5T_NATIVE_DOUBLE); config_.posValueSetId = createChunkedDataset(config_.posGroupId, std::vector {1, atoms.size(), 3}, "value", H5T_NATIVE_DOUBLE); + + config_.relativeMassGroupId = createGroup(config_.particleSubGroupId, "relativeMass"); + config_.relativeMassStepSetId = createChunkedDataset(config_.relativeMassGroupId, std::vector {1}, "step", H5T_NATIVE_INT64); + config_.relativeMassTimeSetId = createChunkedDataset(config_.relativeMassGroupId, std::vector {1}, "time", H5T_NATIVE_DOUBLE); + config_.relativeMassValueSetId = createChunkedDataset(config_.relativeMassGroupId, std::vector {1, atoms.size(), 1}, "value", H5T_NATIVE_DOUBLE); + + config_.typeGroupId = createGroup(config_.particleSubGroupId, "type"); + config_.typeStepSetId = createChunkedDataset(config_.typeGroupId, std::vector {1}, "step", H5T_NATIVE_INT64); + config_.typeTimeSetId = createChunkedDataset(config_.typeGroupId, std::vector {1}, "time", H5T_NATIVE_DOUBLE); + config_.typeValueSetId = createChunkedDataset(config_.typeGroupId, std::vector {1, atoms.size(), 1}, "value", H5T_NATIVE_DOUBLE); + + config_.velGroupId = createGroup(config_.particleSubGroupId, "velocity"); + config_.velStepSetId = createChunkedDataset(config_.velGroupId, std::vector {1}, "step", H5T_NATIVE_INT64); + config_.velTimeSetId = createChunkedDataset(config_.velGroupId, std::vector {1}, "time", H5T_NATIVE_DOUBLE); + config_.velValueSetId = createChunkedDataset(config_.velGroupId, std::vector {1, atoms.size(), 3}, "value", H5T_NATIVE_DOUBLE); } hid_t DumpH5MDParallelImpl::createFile(const std::string& filename, const hid_t& propertyList) const @@ -147,13 +175,33 @@ void DumpH5MDParallelImpl::closeGroup(const hid_t& groupId) const void DumpH5MDParallelImpl::close() const { + closeDataset(config_.velValueSetId); + closeDataset(config_.velTimeSetId); + closeDataset(config_.velStepSetId); + closeGroup(config_.velGroupId); + closeDataset(config_.typeValueSetId); + closeDataset(config_.typeTimeSetId); + closeDataset(config_.typeStepSetId); + closeGroup(config_.typeGroupId); + closeDataset(config_.relativeMassValueSetId); + closeDataset(config_.relativeMassTimeSetId); + closeDataset(config_.relativeMassStepSetId); + closeGroup(config_.relativeMassGroupId); closeDataset(config_.posValueSetId); closeDataset(config_.posTimeSetId); closeDataset(config_.posStepSetId); closeGroup(config_.posGroupId); + closeDataset(config_.massStepSetId); + closeDataset(config_.massTimeSetId); + closeDataset(config_.massValueSetId); + closeGroup(config_.massGroupId); closeDataset(config_.chargesValueSetId); closeDataset(config_.chargesTimeSetId); closeDataset(config_.chargesStepSetId); + closeDataset(config_.forceValueSetId); + closeDataset(config_.forceTimeSetId); + closeDataset(config_.forceStepSetId); + closeGroup(config_.forceGroupId); closeGroup(config_.chargesGroupId); closeDataset(config_.edgesValueSetId); closeDataset(config_.edgesTimeSetId); @@ -227,7 +275,12 @@ void DumpH5MDParallelImpl::dumpStep( appendEdges(step, dt, subdomain); appendCharges(step, dt, h_atoms); + appendForces(step, dt, h_atoms); + appendMasses(step, dt, h_atoms); appendPositions(step, dt, h_atoms); + appendRelativeMasses(step, dt, h_atoms); + appendTypes(step, dt, h_atoms); + appendVelocities(step, dt, h_atoms); config_.saveCount += 1; } @@ -254,6 +307,40 @@ void DumpH5MDParallelImpl::appendCharges(const idx_t& step, const real_t& dt, co appendData(config_.chargesValueSetId, charges, std::vector{1, numberLocalAtoms, dimensions}); } +void DumpH5MDParallelImpl::appendForces(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const +{ + appendData(config_.posStepSetId, std::vector{step}, std::vector{1}); + appendData(config_.posTimeSetId, std::vector{real_c(step) * dt}, std::vector{1}); + hsize_t numberLocalAtoms = atoms.numLocalAtoms; + constexpr int64_t dimensions = 3; + std::vector positions; + positions.reserve(numLocalParticles * dimensions); + for (idx_t idx = 0; idx < atoms.numLocalAtoms; ++idx) + { + positions.emplace_back(atoms.getForce()(idx, 0)); + positions.emplace_back(atoms.getForce()(idx, 1)); + positions.emplace_back(atoms.getForce()(idx, 2)); + } + MRMD_HOST_CHECK_EQUAL(int64_c(positions.size()), numLocalParticles * dimensions); + appendData(config_.posValueSetId, positions, std::vector{1, numberLocalAtoms, dimensions}); +} + +void DumpH5MDParallelImpl::appendMasses(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const +{ + appendData(config_.massStepSetId, std::vector{step}, std::vector{1}); + appendData(config_.massTimeSetId, std::vector{real_c(step) * dt}, std::vector{1}); + hsize_t numberLocalAtoms = atoms.numLocalAtoms; + constexpr int64_t dimensions = 1; + std::vector masses; + masses.reserve(numLocalParticles * dimensions); + for (idx_t idx = 0; idx < numLocalParticles; ++idx) + { + masses.emplace_back(atoms.getMass()(idx)); + } + MRMD_HOST_CHECK_EQUAL(int64_c(masses.size()), numLocalParticles * dimensions); + appendData(config_.chargesValueSetId, masses, std::vector{1, numberLocalAtoms, dimensions}); +} + void DumpH5MDParallelImpl::appendPositions(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const { appendData(config_.posStepSetId, std::vector{step}, std::vector{1}); @@ -272,6 +359,56 @@ void DumpH5MDParallelImpl::appendPositions(const idx_t& step, const real_t& dt, appendData(config_.posValueSetId, positions, std::vector{1, numberLocalAtoms, dimensions}); } +void DumpH5MDParallelImpl::appendRelativeMasses(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const +{ + appendData(config_.relativeMassStepSetId, std::vector{step}, std::vector{1}); + appendData(config_.relativeMassTimeSetId, std::vector{real_c(step) * dt}, std::vector{1}); + hsize_t numberLocalAtoms = atoms.numLocalAtoms; + constexpr int64_t dimensions = 1; + std::vector relativeMasses; + relativeMasses.reserve(numLocalParticles * dimensions); + for (idx_t idx = 0; idx < numLocalParticles; ++idx) + { + relativeMasses.emplace_back(atoms.getRelativeMass()(idx)); + } + MRMD_HOST_CHECK_EQUAL(int64_c(relativeMasses.size()), numLocalParticles * dimensions); + appendData(config_.chargesValueSetId, relativeMasses, std::vector{1, numberLocalAtoms, dimensions}); +} + +void DumpH5MDParallelImpl::appendTypes(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const +{ + appendData(config_.typeStepSetId, std::vector{step}, std::vector{1}); + appendData(config_.typeTimeSetId, std::vector{real_c(step) * dt}, std::vector{1}); + hsize_t numberLocalAtoms = atoms.numLocalAtoms; + constexpr int64_t dimensions = 1; + std::vector types; + types.reserve(numLocalParticles * dimensions); + for (idx_t idx = 0; idx < numLocalParticles; ++idx) + { + types.emplace_back(atoms.getType()(idx)); + } + MRMD_HOST_CHECK_EQUAL(int64_c(types.size()), numLocalParticles * dimensions); + appendData(config_.chargesValueSetId, types, std::vector{1, numberLocalAtoms, dimensions}); +} + +void DumpH5MDParallelImpl::appendVelocities(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const +{ + appendData(config_.velStepSetId, std::vector{step}, std::vector{1}); + appendData(config_.velTimeSetId, std::vector{real_c(step) * dt}, std::vector{1}); + hsize_t numberLocalAtoms = atoms.numLocalAtoms; + constexpr int64_t dimensions = 3; + std::vector velocities; + velocities.reserve(numLocalParticles * dimensions); + for (idx_t idx = 0; idx < atoms.numLocalAtoms; ++idx) + { + velocities.emplace_back(atoms.getVel()(idx, 0)); + velocities.emplace_back(atoms.getVel()(idx, 1)); + velocities.emplace_back(atoms.getVel()(idx, 2)); + } + MRMD_HOST_CHECK_EQUAL(int64_c(velocities.size()), numLocalParticles * dimensions); + appendData(config_.posValueSetId, velocities, std::vector{1, numberLocalAtoms, dimensions}); +} + template void DumpH5MDParallelImpl::appendData(const hid_t datasetId, const std::vector& data, const std::vector& dims) const { diff --git a/mrmd/io/DumpH5MDParallel.hpp b/mrmd/io/DumpH5MDParallel.hpp index c25f90ea..dd133fd7 100644 --- a/mrmd/io/DumpH5MDParallel.hpp +++ b/mrmd/io/DumpH5MDParallel.hpp @@ -80,10 +80,30 @@ class DumpH5MDParallel hid_t chargesStepSetId; hid_t chargesTimeSetId; hid_t chargesValueSetId; + hid_t forceGroupId; + hid_t forceStepSetId; + hid_t forceTimeSetId; + hid_t forceValueSetId; + hid_t massGroupId; + hid_t massStepSetId; + hid_t massTimeSetId; + hid_t massValueSetId; hid_t posGroupId; hid_t posStepSetId; hid_t posTimeSetId; hid_t posValueSetId; + hid_t relativeMassGroupId; + hid_t relativeMassStepSetId; + hid_t relativeMassTimeSetId; + hid_t relativeMassValueSetId; + hid_t typeGroupId; + hid_t typeStepSetId; + hid_t typeTimeSetId; + hid_t typeValueSetId; + hid_t velGroupId; + hid_t velStepSetId; + hid_t velTimeSetId; + hid_t velValueSetId; hsize_t saveCount = 0; }; From ca749f1fb8109064c403f4fd6adaf8864c0557dd Mon Sep 17 00:00:00 2001 From: julianhille Date: Mon, 10 Mar 2025 18:00:11 +0100 Subject: [PATCH 14/29] hotfixes and added appendDataParallel to write many-particle info efficiently, Ref #038 --- mrmd/io/DumpH5MDParallel.cpp | 58 +++++++++++++++++++++++++++++------- 1 file changed, 48 insertions(+), 10 deletions(-) diff --git a/mrmd/io/DumpH5MDParallel.cpp b/mrmd/io/DumpH5MDParallel.cpp index 39dd5fd3..0386bdbc 100644 --- a/mrmd/io/DumpH5MDParallel.cpp +++ b/mrmd/io/DumpH5MDParallel.cpp @@ -62,6 +62,8 @@ class DumpH5MDParallelImpl template void appendData(const hid_t datasetId, const std::vector& data, const std::vector& dims) const; + template + void appendDataParallel(const hid_t datasetId, const std::vector& data, const std::vector& dims) const; void appendEdges(const idx_t& step, const real_t& dt, const data::Subdomain& subdomain) const; void appendCharges(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const; void appendForces(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const; @@ -142,7 +144,7 @@ void DumpH5MDParallelImpl::open(const std::string& filename, const data::Atoms& config_.typeGroupId = createGroup(config_.particleSubGroupId, "type"); config_.typeStepSetId = createChunkedDataset(config_.typeGroupId, std::vector {1}, "step", H5T_NATIVE_INT64); config_.typeTimeSetId = createChunkedDataset(config_.typeGroupId, std::vector {1}, "time", H5T_NATIVE_DOUBLE); - config_.typeValueSetId = createChunkedDataset(config_.typeGroupId, std::vector {1, atoms.size(), 1}, "value", H5T_NATIVE_DOUBLE); + config_.typeValueSetId = createChunkedDataset(config_.typeGroupId, std::vector {1, atoms.size(), 1}, "value", H5T_NATIVE_INT64); config_.velGroupId = createGroup(config_.particleSubGroupId, "velocity"); config_.velStepSetId = createChunkedDataset(config_.velGroupId, std::vector {1}, "step", H5T_NATIVE_INT64); @@ -304,13 +306,13 @@ void DumpH5MDParallelImpl::appendCharges(const idx_t& step, const real_t& dt, co charges.emplace_back(atoms.getCharge()(idx)); } MRMD_HOST_CHECK_EQUAL(int64_c(charges.size()), numLocalParticles * dimensions); - appendData(config_.chargesValueSetId, charges, std::vector{1, numberLocalAtoms, dimensions}); + appendDataParallel(config_.chargesValueSetId, charges, std::vector{1, numberLocalAtoms, dimensions}); } void DumpH5MDParallelImpl::appendForces(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const { - appendData(config_.posStepSetId, std::vector{step}, std::vector{1}); - appendData(config_.posTimeSetId, std::vector{real_c(step) * dt}, std::vector{1}); + appendData(config_.forceStepSetId, std::vector{step}, std::vector{1}); + appendData(config_.forceTimeSetId, std::vector{real_c(step) * dt}, std::vector{1}); hsize_t numberLocalAtoms = atoms.numLocalAtoms; constexpr int64_t dimensions = 3; std::vector positions; @@ -322,7 +324,7 @@ void DumpH5MDParallelImpl::appendForces(const idx_t& step, const real_t& dt, con positions.emplace_back(atoms.getForce()(idx, 2)); } MRMD_HOST_CHECK_EQUAL(int64_c(positions.size()), numLocalParticles * dimensions); - appendData(config_.posValueSetId, positions, std::vector{1, numberLocalAtoms, dimensions}); + appendDataParallel(config_.forceValueSetId, positions, std::vector{1, numberLocalAtoms, dimensions}); } void DumpH5MDParallelImpl::appendMasses(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const @@ -338,7 +340,7 @@ void DumpH5MDParallelImpl::appendMasses(const idx_t& step, const real_t& dt, con masses.emplace_back(atoms.getMass()(idx)); } MRMD_HOST_CHECK_EQUAL(int64_c(masses.size()), numLocalParticles * dimensions); - appendData(config_.chargesValueSetId, masses, std::vector{1, numberLocalAtoms, dimensions}); + appendDataParallel(config_.massValueSetId, masses, std::vector{1, numberLocalAtoms, dimensions}); } void DumpH5MDParallelImpl::appendPositions(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const @@ -356,7 +358,7 @@ void DumpH5MDParallelImpl::appendPositions(const idx_t& step, const real_t& dt, positions.emplace_back(atoms.getPos()(idx, 2)); } MRMD_HOST_CHECK_EQUAL(int64_c(positions.size()), numLocalParticles * dimensions); - appendData(config_.posValueSetId, positions, std::vector{1, numberLocalAtoms, dimensions}); + appendDataParallel(config_.posValueSetId, positions, std::vector{1, numberLocalAtoms, dimensions}); } void DumpH5MDParallelImpl::appendRelativeMasses(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const @@ -372,7 +374,7 @@ void DumpH5MDParallelImpl::appendRelativeMasses(const idx_t& step, const real_t& relativeMasses.emplace_back(atoms.getRelativeMass()(idx)); } MRMD_HOST_CHECK_EQUAL(int64_c(relativeMasses.size()), numLocalParticles * dimensions); - appendData(config_.chargesValueSetId, relativeMasses, std::vector{1, numberLocalAtoms, dimensions}); + appendDataParallel(config_.relativeMassValueSetId, relativeMasses, std::vector{1, numberLocalAtoms, dimensions}); } void DumpH5MDParallelImpl::appendTypes(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const @@ -388,7 +390,7 @@ void DumpH5MDParallelImpl::appendTypes(const idx_t& step, const real_t& dt, cons types.emplace_back(atoms.getType()(idx)); } MRMD_HOST_CHECK_EQUAL(int64_c(types.size()), numLocalParticles * dimensions); - appendData(config_.chargesValueSetId, types, std::vector{1, numberLocalAtoms, dimensions}); + appendDataParallel(config_.typeValueSetId, types, std::vector{1, numberLocalAtoms, dimensions}); } void DumpH5MDParallelImpl::appendVelocities(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const @@ -406,7 +408,43 @@ void DumpH5MDParallelImpl::appendVelocities(const idx_t& step, const real_t& dt, velocities.emplace_back(atoms.getVel()(idx, 2)); } MRMD_HOST_CHECK_EQUAL(int64_c(velocities.size()), numLocalParticles * dimensions); - appendData(config_.posValueSetId, velocities, std::vector{1, numberLocalAtoms, dimensions}); + appendDataParallel(config_.velValueSetId, velocities, std::vector{1, numberLocalAtoms, dimensions}); +} + +template +void DumpH5MDParallelImpl::appendDataParallel(const hid_t datasetId, const std::vector& data, const std::vector& dims) const +{ + std::vector newSize = dims; + newSize[0] = config_.saveCount + 1; + H5Dset_extent(datasetId, newSize.data()); + + const auto fileSpace = H5Dget_space(datasetId); + + std::vector offset(dims.size(), 0); + offset[0] = config_.saveCount; + offset[1] = particleOffset; + std::vector stride(dims.size(), 1); + std::vector count(dims.size(), 1); + + CHECK_HDF5(H5Sselect_hyperslab( + fileSpace, H5S_SELECT_SET, offset.data(), stride.data(), count.data(), dims.data())); + + std::vector localOffset(dims.size(), 0); + const hid_t memorySpace = H5Screate_simple(dims.size(), dims.data(), NULL); + CHECK_HDF5(H5Sselect_hyperslab(memorySpace, + H5S_SELECT_SET, + localOffset.data(), + stride.data(), + count.data(), + dims.data())); + + auto propertyList = CHECK_HDF5(H5Pcreate(H5P_DATASET_XFER)); + CHECK_HDF5(H5Pset_dxpl_mpio(propertyList, H5FD_MPIO_COLLECTIVE)); + CHECK_HDF5(H5Dwrite(datasetId, typeToHDF5(), memorySpace, fileSpace, propertyList, data.data())); + + CHECK_HDF5(H5Pclose(propertyList)); + H5Sclose(fileSpace); + H5Sclose(memorySpace); } template From 4cc5bca4e92cc86cab39975bcfda26aa1967d47e Mon Sep 17 00:00:00 2001 From: julianhille Date: Mon, 10 Mar 2025 18:04:07 +0100 Subject: [PATCH 15/29] changed test output to .h5md, Ref #038 --- mrmd/io/H5MD.test.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mrmd/io/H5MD.test.cpp b/mrmd/io/H5MD.test.cpp index b66656c7..f10ed4fd 100644 --- a/mrmd/io/H5MD.test.cpp +++ b/mrmd/io/H5MD.test.cpp @@ -128,7 +128,7 @@ TEST(H5MD, dumpMultipleSteps) auto dump = DumpH5MDParallel(mpiInfo, "XzzX"); - dump.open("dummyMultipleSteps.hdf5", atoms1); + dump.open("dummyMultipleSteps.h5md", atoms1); for (idx_t step = 0; step < 10; ++step) { From 2c40240f35bd5f9f66c29f7faf51bf369bd5289e Mon Sep 17 00:00:00 2001 From: julianhille Date: Mon, 10 Mar 2025 18:08:18 +0100 Subject: [PATCH 16/29] fixed clang-format, Ref #038 --- mrmd/io/DumpH5MDParallel.cpp | 326 +++++++++++++++++++++-------------- mrmd/io/DumpH5MDParallel.hpp | 9 +- mrmd/io/H5MD.test.cpp | 2 +- 3 files changed, 204 insertions(+), 133 deletions(-) diff --git a/mrmd/io/DumpH5MDParallel.cpp b/mrmd/io/DumpH5MDParallel.cpp index 0386bdbc..d4c90bc1 100644 --- a/mrmd/io/DumpH5MDParallel.cpp +++ b/mrmd/io/DumpH5MDParallel.cpp @@ -40,36 +40,44 @@ class DumpH5MDParallelImpl explicit DumpH5MDParallelImpl(DumpH5MDParallel& config) : config_(config) {} void open(const std::string& filename, const data::Atoms& atoms); - void dumpStep( - const data::Subdomain& subdomain, - const data::Atoms& atoms, - const idx_t step, - const real_t dt); - void close() const; + void dumpStep(const data::Subdomain& subdomain, + const data::Atoms& atoms, + const idx_t step, + const real_t dt); + void close() const; void dump(const std::string& filename, - const data::Subdomain& subdomain, - const data::Atoms& atoms); + const data::Subdomain& subdomain, + const data::Atoms& atoms); private: hid_t createFile(const std::string& filename, const hid_t& propertyList) const; - void closeFile(const hid_t& fileId) const; + void closeFile(const hid_t& fileId) const; hid_t createGroup(const hid_t& parentElementId, const std::string& groupName) const; void closeGroup(const hid_t& groupId) const; void openBox() const; - hid_t createChunkedDataset(const hid_t& groupId, const std::vector& dims, const std::string& name, const hid_t& dtype) const; + hid_t createChunkedDataset(const hid_t& groupId, + const std::vector& dims, + const std::string& name, + const hid_t& dtype) const; void closeDataset(const hid_t& datasetId) const; template - void appendData(const hid_t datasetId, const std::vector& data, const std::vector& dims) const; + void appendData(const hid_t datasetId, + const std::vector& data, + const std::vector& dims) const; template - void appendDataParallel(const hid_t datasetId, const std::vector& data, const std::vector& dims) const; + void appendDataParallel(const hid_t datasetId, + const std::vector& data, + const std::vector& dims) const; void appendEdges(const idx_t& step, const real_t& dt, const data::Subdomain& subdomain) const; void appendCharges(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const; void appendForces(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const; void appendMasses(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const; void appendPositions(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const; - void appendRelativeMasses(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const; + void appendRelativeMasses(const idx_t& step, + const real_t& dt, + const data::HostAtoms& atoms) const; void appendTypes(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const; void appendVelocities(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const; @@ -87,10 +95,10 @@ class DumpH5MDParallelImpl template void writeParallel(hid_t fileId, - const std::string& name, - const std::vector& globalDims, - const std::vector& localDims, - const std::vector& data); + const std::string& name, + const std::vector& globalDims, + const std::vector& localDims, + const std::vector& data); DumpH5MDParallel& config_; @@ -117,39 +125,64 @@ void DumpH5MDParallelImpl::open(const std::string& filename, const data::Atoms& openBox(); config_.chargesGroupId = createGroup(config_.particleSubGroupId, "charge"); - config_.chargesStepSetId = createChunkedDataset(config_.chargesGroupId, std::vector {1}, "step", H5T_NATIVE_INT64); - config_.chargesTimeSetId = createChunkedDataset(config_.chargesGroupId, std::vector {1}, "time", H5T_NATIVE_DOUBLE); - config_.chargesValueSetId = createChunkedDataset(config_.chargesGroupId, std::vector {1, atoms.size(), 1}, "value", H5T_NATIVE_DOUBLE); + config_.chargesStepSetId = createChunkedDataset( + config_.chargesGroupId, std::vector{1}, "step", H5T_NATIVE_INT64); + config_.chargesTimeSetId = createChunkedDataset( + config_.chargesGroupId, std::vector{1}, "time", H5T_NATIVE_DOUBLE); + config_.chargesValueSetId = createChunkedDataset(config_.chargesGroupId, + std::vector{1, atoms.size(), 1}, + "value", + H5T_NATIVE_DOUBLE); config_.forceGroupId = createGroup(config_.particleSubGroupId, "force"); - config_.forceStepSetId = createChunkedDataset(config_.forceGroupId, std::vector {1}, "step", H5T_NATIVE_INT64); - config_.forceTimeSetId = createChunkedDataset(config_.forceGroupId, std::vector {1}, "time", H5T_NATIVE_DOUBLE); - config_.forceValueSetId = createChunkedDataset(config_.forceGroupId, std::vector {1, atoms.size(), 3}, "value", H5T_NATIVE_DOUBLE); - + config_.forceStepSetId = createChunkedDataset( + config_.forceGroupId, std::vector{1}, "step", H5T_NATIVE_INT64); + config_.forceTimeSetId = createChunkedDataset( + config_.forceGroupId, std::vector{1}, "time", H5T_NATIVE_DOUBLE); + config_.forceValueSetId = createChunkedDataset( + config_.forceGroupId, std::vector{1, atoms.size(), 3}, "value", H5T_NATIVE_DOUBLE); + config_.massGroupId = createGroup(config_.particleSubGroupId, "mass"); - config_.massStepSetId = createChunkedDataset(config_.massGroupId, std::vector {1}, "step", H5T_NATIVE_INT64); - config_.massTimeSetId = createChunkedDataset(config_.massGroupId, std::vector {1}, "time", H5T_NATIVE_DOUBLE); - config_.massValueSetId = createChunkedDataset(config_.massGroupId, std::vector {1, atoms.size(), 1}, "value", H5T_NATIVE_DOUBLE); + config_.massStepSetId = createChunkedDataset( + config_.massGroupId, std::vector{1}, "step", H5T_NATIVE_INT64); + config_.massTimeSetId = createChunkedDataset( + config_.massGroupId, std::vector{1}, "time", H5T_NATIVE_DOUBLE); + config_.massValueSetId = createChunkedDataset( + config_.massGroupId, std::vector{1, atoms.size(), 1}, "value", H5T_NATIVE_DOUBLE); config_.posGroupId = createGroup(config_.particleSubGroupId, "position"); - config_.posStepSetId = createChunkedDataset(config_.posGroupId, std::vector {1}, "step", H5T_NATIVE_INT64); - config_.posTimeSetId = createChunkedDataset(config_.posGroupId, std::vector {1}, "time", H5T_NATIVE_DOUBLE); - config_.posValueSetId = createChunkedDataset(config_.posGroupId, std::vector {1, atoms.size(), 3}, "value", H5T_NATIVE_DOUBLE); + config_.posStepSetId = + createChunkedDataset(config_.posGroupId, std::vector{1}, "step", H5T_NATIVE_INT64); + config_.posTimeSetId = createChunkedDataset( + config_.posGroupId, std::vector{1}, "time", H5T_NATIVE_DOUBLE); + config_.posValueSetId = createChunkedDataset( + config_.posGroupId, std::vector{1, atoms.size(), 3}, "value", H5T_NATIVE_DOUBLE); config_.relativeMassGroupId = createGroup(config_.particleSubGroupId, "relativeMass"); - config_.relativeMassStepSetId = createChunkedDataset(config_.relativeMassGroupId, std::vector {1}, "step", H5T_NATIVE_INT64); - config_.relativeMassTimeSetId = createChunkedDataset(config_.relativeMassGroupId, std::vector {1}, "time", H5T_NATIVE_DOUBLE); - config_.relativeMassValueSetId = createChunkedDataset(config_.relativeMassGroupId, std::vector {1, atoms.size(), 1}, "value", H5T_NATIVE_DOUBLE); + config_.relativeMassStepSetId = createChunkedDataset( + config_.relativeMassGroupId, std::vector{1}, "step", H5T_NATIVE_INT64); + config_.relativeMassTimeSetId = createChunkedDataset( + config_.relativeMassGroupId, std::vector{1}, "time", H5T_NATIVE_DOUBLE); + config_.relativeMassValueSetId = createChunkedDataset(config_.relativeMassGroupId, + std::vector{1, atoms.size(), 1}, + "value", + H5T_NATIVE_DOUBLE); config_.typeGroupId = createGroup(config_.particleSubGroupId, "type"); - config_.typeStepSetId = createChunkedDataset(config_.typeGroupId, std::vector {1}, "step", H5T_NATIVE_INT64); - config_.typeTimeSetId = createChunkedDataset(config_.typeGroupId, std::vector {1}, "time", H5T_NATIVE_DOUBLE); - config_.typeValueSetId = createChunkedDataset(config_.typeGroupId, std::vector {1, atoms.size(), 1}, "value", H5T_NATIVE_INT64); + config_.typeStepSetId = createChunkedDataset( + config_.typeGroupId, std::vector{1}, "step", H5T_NATIVE_INT64); + config_.typeTimeSetId = createChunkedDataset( + config_.typeGroupId, std::vector{1}, "time", H5T_NATIVE_DOUBLE); + config_.typeValueSetId = createChunkedDataset( + config_.typeGroupId, std::vector{1, atoms.size(), 1}, "value", H5T_NATIVE_INT64); config_.velGroupId = createGroup(config_.particleSubGroupId, "velocity"); - config_.velStepSetId = createChunkedDataset(config_.velGroupId, std::vector {1}, "step", H5T_NATIVE_INT64); - config_.velTimeSetId = createChunkedDataset(config_.velGroupId, std::vector {1}, "time", H5T_NATIVE_DOUBLE); - config_.velValueSetId = createChunkedDataset(config_.velGroupId, std::vector {1, atoms.size(), 3}, "value", H5T_NATIVE_DOUBLE); + config_.velStepSetId = + createChunkedDataset(config_.velGroupId, std::vector{1}, "step", H5T_NATIVE_INT64); + config_.velTimeSetId = createChunkedDataset( + config_.velGroupId, std::vector{1}, "time", H5T_NATIVE_DOUBLE); + config_.velValueSetId = createChunkedDataset( + config_.velGroupId, std::vector{1, atoms.size(), 3}, "value", H5T_NATIVE_DOUBLE); } hid_t DumpH5MDParallelImpl::createFile(const std::string& filename, const hid_t& propertyList) const @@ -158,22 +191,17 @@ hid_t DumpH5MDParallelImpl::createFile(const std::string& filename, const hid_t& return fileId; } -void DumpH5MDParallelImpl::closeFile(const hid_t& fileId) const -{ - CHECK_HDF5(H5Fclose(fileId)); -} +void DumpH5MDParallelImpl::closeFile(const hid_t& fileId) const { CHECK_HDF5(H5Fclose(fileId)); } -hid_t DumpH5MDParallelImpl::createGroup(const hid_t& parentElementId, const std::string& groupName) const +hid_t DumpH5MDParallelImpl::createGroup(const hid_t& parentElementId, + const std::string& groupName) const { - auto groupId = - CHECK_HDF5(H5Gcreate(parentElementId, groupName.c_str(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)); + auto groupId = CHECK_HDF5( + H5Gcreate(parentElementId, groupName.c_str(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)); return groupId; } -void DumpH5MDParallelImpl::closeGroup(const hid_t& groupId) const -{ - CHECK_HDF5(H5Gclose(groupId)); -} +void DumpH5MDParallelImpl::closeGroup(const hid_t& groupId) const { CHECK_HDF5(H5Gclose(groupId)); } void DumpH5MDParallelImpl::close() const { @@ -216,31 +244,38 @@ void DumpH5MDParallelImpl::close() const } void DumpH5MDParallelImpl::openBox() const -{ - config_.boxGroupId = createGroup(config_.particleSubGroupId, "box"); +{ + config_.boxGroupId = createGroup(config_.particleSubGroupId, "box"); std::vector dims = {3}; - CHECK_HDF5( - H5LTset_attribute_int(config_.particleSubGroupId, "box", "dimension", dims.data(), dims.size())); + CHECK_HDF5(H5LTset_attribute_int( + config_.particleSubGroupId, "box", "dimension", dims.data(), dims.size())); auto boundaryType = H5Tcopy(H5T_C_S1); CHECK_HDF5(H5Tset_size(boundaryType, 8)); CHECK_HDF5(H5Tset_strpad(boundaryType, H5T_STR_NULLPAD)); std::vector boundaryDims = {3}; auto space = H5Screate_simple(int_c(boundaryDims.size()), boundaryDims.data(), nullptr); - auto att = H5Acreate(config_.boxGroupId, "boundary", boundaryType, space, H5P_DEFAULT, H5P_DEFAULT); + auto att = + H5Acreate(config_.boxGroupId, "boundary", boundaryType, space, H5P_DEFAULT, H5P_DEFAULT); CHECK_HDF5(H5Awrite(att, boundaryType, "periodicperiodicperiodic")); CHECK_HDF5(H5Aclose(att)); CHECK_HDF5(H5Sclose(space)); CHECK_HDF5(H5Tclose(boundaryType)); - + config_.edgesGroupId = createGroup(config_.boxGroupId, "edges"); - config_.edgesStepSetId = createChunkedDataset(config_.edgesGroupId, std::vector {1}, "step", H5T_NATIVE_INT64); - config_.edgesTimeSetId = createChunkedDataset(config_.edgesGroupId, std::vector {1}, "time", H5T_NATIVE_DOUBLE); - config_.edgesValueSetId = createChunkedDataset(config_.edgesGroupId, std::vector {1, 3}, "value", H5T_NATIVE_DOUBLE); + config_.edgesStepSetId = createChunkedDataset( + config_.edgesGroupId, std::vector{1}, "step", H5T_NATIVE_INT64); + config_.edgesTimeSetId = createChunkedDataset( + config_.edgesGroupId, std::vector{1}, "time", H5T_NATIVE_DOUBLE); + config_.edgesValueSetId = createChunkedDataset( + config_.edgesGroupId, std::vector{1, 3}, "value", H5T_NATIVE_DOUBLE); } -hid_t DumpH5MDParallelImpl::createChunkedDataset(const hid_t& groupId, const std::vector& dims, const std::string& name, const hid_t& dtype) const +hid_t DumpH5MDParallelImpl::createChunkedDataset(const hid_t& groupId, + const std::vector& dims, + const std::string& name, + const hid_t& dtype) const { std::vector max_dims = dims; max_dims[0] = H5S_UNLIMITED; @@ -252,7 +287,8 @@ hid_t DumpH5MDParallelImpl::createChunkedDataset(const hid_t& groupId, const std const std::vector chunk_dims = dims; H5Pset_chunk(plist, dims.size(), chunk_dims.data()); - auto datasetId = H5Dcreate(groupId, name.c_str(), dtype, fileSpace, H5P_DEFAULT, plist, H5P_DEFAULT); + auto datasetId = + H5Dcreate(groupId, name.c_str(), dtype, fileSpace, H5P_DEFAULT, plist, H5P_DEFAULT); H5Pclose(plist); H5Sclose(fileSpace); @@ -260,16 +296,12 @@ hid_t DumpH5MDParallelImpl::createChunkedDataset(const hid_t& groupId, const std return datasetId; } -void DumpH5MDParallelImpl::closeDataset(const hid_t& datasetId) const -{ - H5Dclose(datasetId); -} +void DumpH5MDParallelImpl::closeDataset(const hid_t& datasetId) const { H5Dclose(datasetId); } -void DumpH5MDParallelImpl::dumpStep( - const data::Subdomain& subdomain, - const data::Atoms& atoms, - const idx_t step, - const real_t dt) +void DumpH5MDParallelImpl::dumpStep(const data::Subdomain& subdomain, + const data::Atoms& atoms, + const idx_t step, + const real_t dt) { data::HostAtoms h_atoms(atoms); // NOLINT @@ -286,19 +318,28 @@ void DumpH5MDParallelImpl::dumpStep( config_.saveCount += 1; } -void DumpH5MDParallelImpl::appendEdges(const idx_t& step, const real_t& dt, const data::Subdomain& subdomain) const +void DumpH5MDParallelImpl::appendEdges(const idx_t& step, + const real_t& dt, + const data::Subdomain& subdomain) const { appendData(config_.edgesStepSetId, std::vector{step}, std::vector{1}); - appendData(config_.edgesTimeSetId, std::vector{real_c(step) * dt}, std::vector{1}); - appendData(config_.edgesValueSetId, std::vector{subdomain.diameter[0], subdomain.diameter[1], subdomain.diameter[2]}, std::vector{1, 3}); + appendData( + config_.edgesTimeSetId, std::vector{real_c(step) * dt}, std::vector{1}); + appendData( + config_.edgesValueSetId, + std::vector{subdomain.diameter[0], subdomain.diameter[1], subdomain.diameter[2]}, + std::vector{1, 3}); } -void DumpH5MDParallelImpl::appendCharges(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const +void DumpH5MDParallelImpl::appendCharges(const idx_t& step, + const real_t& dt, + const data::HostAtoms& atoms) const { appendData(config_.chargesStepSetId, std::vector{step}, std::vector{1}); - appendData(config_.chargesTimeSetId, std::vector{real_c(step) * dt}, std::vector{1}); + appendData( + config_.chargesTimeSetId, std::vector{real_c(step) * dt}, std::vector{1}); hsize_t numberLocalAtoms = atoms.numLocalAtoms; - constexpr int64_t dimensions = 1; + constexpr int64_t dimensions = 1; std::vector charges; charges.reserve(numLocalParticles * dimensions); for (idx_t idx = 0; idx < numLocalParticles; ++idx) @@ -306,13 +347,17 @@ void DumpH5MDParallelImpl::appendCharges(const idx_t& step, const real_t& dt, co charges.emplace_back(atoms.getCharge()(idx)); } MRMD_HOST_CHECK_EQUAL(int64_c(charges.size()), numLocalParticles * dimensions); - appendDataParallel(config_.chargesValueSetId, charges, std::vector{1, numberLocalAtoms, dimensions}); + appendDataParallel( + config_.chargesValueSetId, charges, std::vector{1, numberLocalAtoms, dimensions}); } -void DumpH5MDParallelImpl::appendForces(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const +void DumpH5MDParallelImpl::appendForces(const idx_t& step, + const real_t& dt, + const data::HostAtoms& atoms) const { appendData(config_.forceStepSetId, std::vector{step}, std::vector{1}); - appendData(config_.forceTimeSetId, std::vector{real_c(step) * dt}, std::vector{1}); + appendData( + config_.forceTimeSetId, std::vector{real_c(step) * dt}, std::vector{1}); hsize_t numberLocalAtoms = atoms.numLocalAtoms; constexpr int64_t dimensions = 3; std::vector positions; @@ -324,15 +369,19 @@ void DumpH5MDParallelImpl::appendForces(const idx_t& step, const real_t& dt, con positions.emplace_back(atoms.getForce()(idx, 2)); } MRMD_HOST_CHECK_EQUAL(int64_c(positions.size()), numLocalParticles * dimensions); - appendDataParallel(config_.forceValueSetId, positions, std::vector{1, numberLocalAtoms, dimensions}); + appendDataParallel( + config_.forceValueSetId, positions, std::vector{1, numberLocalAtoms, dimensions}); } -void DumpH5MDParallelImpl::appendMasses(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const +void DumpH5MDParallelImpl::appendMasses(const idx_t& step, + const real_t& dt, + const data::HostAtoms& atoms) const { appendData(config_.massStepSetId, std::vector{step}, std::vector{1}); - appendData(config_.massTimeSetId, std::vector{real_c(step) * dt}, std::vector{1}); + appendData( + config_.massTimeSetId, std::vector{real_c(step) * dt}, std::vector{1}); hsize_t numberLocalAtoms = atoms.numLocalAtoms; - constexpr int64_t dimensions = 1; + constexpr int64_t dimensions = 1; std::vector masses; masses.reserve(numLocalParticles * dimensions); for (idx_t idx = 0; idx < numLocalParticles; ++idx) @@ -340,13 +389,17 @@ void DumpH5MDParallelImpl::appendMasses(const idx_t& step, const real_t& dt, con masses.emplace_back(atoms.getMass()(idx)); } MRMD_HOST_CHECK_EQUAL(int64_c(masses.size()), numLocalParticles * dimensions); - appendDataParallel(config_.massValueSetId, masses, std::vector{1, numberLocalAtoms, dimensions}); + appendDataParallel( + config_.massValueSetId, masses, std::vector{1, numberLocalAtoms, dimensions}); } -void DumpH5MDParallelImpl::appendPositions(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const +void DumpH5MDParallelImpl::appendPositions(const idx_t& step, + const real_t& dt, + const data::HostAtoms& atoms) const { appendData(config_.posStepSetId, std::vector{step}, std::vector{1}); - appendData(config_.posTimeSetId, std::vector{real_c(step) * dt}, std::vector{1}); + appendData( + config_.posTimeSetId, std::vector{real_c(step) * dt}, std::vector{1}); hsize_t numberLocalAtoms = atoms.numLocalAtoms; constexpr int64_t dimensions = 3; std::vector positions; @@ -358,15 +411,20 @@ void DumpH5MDParallelImpl::appendPositions(const idx_t& step, const real_t& dt, positions.emplace_back(atoms.getPos()(idx, 2)); } MRMD_HOST_CHECK_EQUAL(int64_c(positions.size()), numLocalParticles * dimensions); - appendDataParallel(config_.posValueSetId, positions, std::vector{1, numberLocalAtoms, dimensions}); + appendDataParallel( + config_.posValueSetId, positions, std::vector{1, numberLocalAtoms, dimensions}); } -void DumpH5MDParallelImpl::appendRelativeMasses(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const +void DumpH5MDParallelImpl::appendRelativeMasses(const idx_t& step, + const real_t& dt, + const data::HostAtoms& atoms) const { appendData(config_.relativeMassStepSetId, std::vector{step}, std::vector{1}); - appendData(config_.relativeMassTimeSetId, std::vector{real_c(step) * dt}, std::vector{1}); + appendData(config_.relativeMassTimeSetId, + std::vector{real_c(step) * dt}, + std::vector{1}); hsize_t numberLocalAtoms = atoms.numLocalAtoms; - constexpr int64_t dimensions = 1; + constexpr int64_t dimensions = 1; std::vector relativeMasses; relativeMasses.reserve(numLocalParticles * dimensions); for (idx_t idx = 0; idx < numLocalParticles; ++idx) @@ -374,15 +432,20 @@ void DumpH5MDParallelImpl::appendRelativeMasses(const idx_t& step, const real_t& relativeMasses.emplace_back(atoms.getRelativeMass()(idx)); } MRMD_HOST_CHECK_EQUAL(int64_c(relativeMasses.size()), numLocalParticles * dimensions); - appendDataParallel(config_.relativeMassValueSetId, relativeMasses, std::vector{1, numberLocalAtoms, dimensions}); + appendDataParallel(config_.relativeMassValueSetId, + relativeMasses, + std::vector{1, numberLocalAtoms, dimensions}); } -void DumpH5MDParallelImpl::appendTypes(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const +void DumpH5MDParallelImpl::appendTypes(const idx_t& step, + const real_t& dt, + const data::HostAtoms& atoms) const { appendData(config_.typeStepSetId, std::vector{step}, std::vector{1}); - appendData(config_.typeTimeSetId, std::vector{real_c(step) * dt}, std::vector{1}); + appendData( + config_.typeTimeSetId, std::vector{real_c(step) * dt}, std::vector{1}); hsize_t numberLocalAtoms = atoms.numLocalAtoms; - constexpr int64_t dimensions = 1; + constexpr int64_t dimensions = 1; std::vector types; types.reserve(numLocalParticles * dimensions); for (idx_t idx = 0; idx < numLocalParticles; ++idx) @@ -390,13 +453,17 @@ void DumpH5MDParallelImpl::appendTypes(const idx_t& step, const real_t& dt, cons types.emplace_back(atoms.getType()(idx)); } MRMD_HOST_CHECK_EQUAL(int64_c(types.size()), numLocalParticles * dimensions); - appendDataParallel(config_.typeValueSetId, types, std::vector{1, numberLocalAtoms, dimensions}); + appendDataParallel( + config_.typeValueSetId, types, std::vector{1, numberLocalAtoms, dimensions}); } -void DumpH5MDParallelImpl::appendVelocities(const idx_t& step, const real_t& dt, const data::HostAtoms& atoms) const +void DumpH5MDParallelImpl::appendVelocities(const idx_t& step, + const real_t& dt, + const data::HostAtoms& atoms) const { appendData(config_.velStepSetId, std::vector{step}, std::vector{1}); - appendData(config_.velTimeSetId, std::vector{real_c(step) * dt}, std::vector{1}); + appendData( + config_.velTimeSetId, std::vector{real_c(step) * dt}, std::vector{1}); hsize_t numberLocalAtoms = atoms.numLocalAtoms; constexpr int64_t dimensions = 3; std::vector velocities; @@ -408,18 +475,21 @@ void DumpH5MDParallelImpl::appendVelocities(const idx_t& step, const real_t& dt, velocities.emplace_back(atoms.getVel()(idx, 2)); } MRMD_HOST_CHECK_EQUAL(int64_c(velocities.size()), numLocalParticles * dimensions); - appendDataParallel(config_.velValueSetId, velocities, std::vector{1, numberLocalAtoms, dimensions}); + appendDataParallel( + config_.velValueSetId, velocities, std::vector{1, numberLocalAtoms, dimensions}); } template -void DumpH5MDParallelImpl::appendDataParallel(const hid_t datasetId, const std::vector& data, const std::vector& dims) const +void DumpH5MDParallelImpl::appendDataParallel(const hid_t datasetId, + const std::vector& data, + const std::vector& dims) const { std::vector newSize = dims; newSize[0] = config_.saveCount + 1; H5Dset_extent(datasetId, newSize.data()); const auto fileSpace = H5Dget_space(datasetId); - + std::vector offset(dims.size(), 0); offset[0] = config_.saveCount; offset[1] = particleOffset; @@ -431,24 +501,23 @@ void DumpH5MDParallelImpl::appendDataParallel(const hid_t datasetId, const std:: std::vector localOffset(dims.size(), 0); const hid_t memorySpace = H5Screate_simple(dims.size(), dims.data(), NULL); - CHECK_HDF5(H5Sselect_hyperslab(memorySpace, - H5S_SELECT_SET, - localOffset.data(), - stride.data(), - count.data(), - dims.data())); - - auto propertyList = CHECK_HDF5(H5Pcreate(H5P_DATASET_XFER)); - CHECK_HDF5(H5Pset_dxpl_mpio(propertyList, H5FD_MPIO_COLLECTIVE)); - CHECK_HDF5(H5Dwrite(datasetId, typeToHDF5(), memorySpace, fileSpace, propertyList, data.data())); - + CHECK_HDF5(H5Sselect_hyperslab( + memorySpace, H5S_SELECT_SET, localOffset.data(), stride.data(), count.data(), dims.data())); + + auto propertyList = CHECK_HDF5(H5Pcreate(H5P_DATASET_XFER)); + CHECK_HDF5(H5Pset_dxpl_mpio(propertyList, H5FD_MPIO_COLLECTIVE)); + CHECK_HDF5( + H5Dwrite(datasetId, typeToHDF5(), memorySpace, fileSpace, propertyList, data.data())); + CHECK_HDF5(H5Pclose(propertyList)); H5Sclose(fileSpace); H5Sclose(memorySpace); } template -void DumpH5MDParallelImpl::appendData(const hid_t datasetId, const std::vector& data, const std::vector& dims) const +void DumpH5MDParallelImpl::appendData(const hid_t datasetId, + const std::vector& data, + const std::vector& dims) const { const hid_t memorySpace = H5Screate_simple(dims.size(), dims.data(), NULL); @@ -457,7 +526,7 @@ void DumpH5MDParallelImpl::appendData(const hid_t datasetId, const std::vector start(dims.size(), 0); start[0] = config_.saveCount; std::vector count = dims; @@ -465,7 +534,7 @@ void DumpH5MDParallelImpl::appendData(const hid_t datasetId, const std::vector(), memorySpace, fileSpace, H5P_DEFAULT, data.data()); - + H5Sclose(fileSpace); H5Sclose(memorySpace); } @@ -679,7 +748,8 @@ void DumpH5MDParallelImpl::writeForce(hid_t fileId, const data::HostAtoms& atoms using Datatype = real_t; constexpr int64_t dimensions = 3; ///< dimensions of the property - std::string groupName = "/particles/" + config_.particleSubGroupName + "/" + config_.forceDataset; + std::string groupName = + "/particles/" + config_.particleSubGroupName + "/" + config_.forceDataset; auto group = H5Gcreate(fileId, groupName.c_str(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); std::vector data; @@ -715,7 +785,8 @@ void DumpH5MDParallelImpl::writeType(hid_t fileId, const data::HostAtoms& atoms) using Datatype = idx_t; constexpr int64_t dimensions = 1; ///< dimensions of the property - std::string groupName = "/particles/" + config_.particleSubGroupName + "/" + config_.typeDataset; + std::string groupName = + "/particles/" + config_.particleSubGroupName + "/" + config_.typeDataset; auto group = H5Gcreate(fileId, groupName.c_str(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); std::vector data; @@ -749,7 +820,8 @@ void DumpH5MDParallelImpl::writeMass(hid_t fileId, const data::HostAtoms& atoms) using Datatype = real_t; constexpr int64_t dimensions = 1; ///< dimensions of the property - std::string groupName = "/particles/" + config_.particleSubGroupName + "/" + config_.massDataset; + std::string groupName = + "/particles/" + config_.particleSubGroupName + "/" + config_.massDataset; auto group = H5Gcreate(fileId, groupName.c_str(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); std::vector data; @@ -783,7 +855,8 @@ void DumpH5MDParallelImpl::writeCharge(hid_t fileId, const data::HostAtoms& atom using Datatype = real_t; constexpr int64_t dimensions = 1; ///< dimensions of the property - std::string groupName = "/particles/" + config_.particleSubGroupName + "/" + config_.chargeDataset; + std::string groupName = + "/particles/" + config_.particleSubGroupName + "/" + config_.chargeDataset; auto group = H5Gcreate(fileId, groupName.c_str(), H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); std::vector data; @@ -905,11 +978,10 @@ void DumpH5MDParallel::open(const std::string& filename, const data::Atoms& atom helper.open(filename, atoms); } -void DumpH5MDParallel::dumpStep( - const data::Subdomain& subdomain, - const data::Atoms& atoms, - const idx_t step, - const real_t dt) +void DumpH5MDParallel::dumpStep(const data::Subdomain& subdomain, + const data::Atoms& atoms, + const idx_t step, + const real_t dt) { impl::DumpH5MDParallelImpl helper(*this); helper.dumpStep(subdomain, atoms, step, dt); @@ -941,11 +1013,11 @@ void DumpH5MDParallel::close(const hid_t& /*file_id*/); exit(EXIT_FAILURE); } void DumpH5MDParallel::dumpStep(const hid_t& /*file_id*/, - const data::Subdomain& /*subdomain*/, - const data::Atoms& /*atoms*/) + const data::Subdomain& /*subdomain*/, + const data::Atoms& /*atoms*/) { -MRMD_HOST_CHECK(false, "HDF5 Support not available!"); -exit(EXIT_FAILURE); + MRMD_HOST_CHECK(false, "HDF5 Support not available!"); + exit(EXIT_FAILURE); } void DumpH5MDParallel::dump(const std::string& /*filename*/, diff --git a/mrmd/io/DumpH5MDParallel.hpp b/mrmd/io/DumpH5MDParallel.hpp index dd133fd7..58ddeb52 100644 --- a/mrmd/io/DumpH5MDParallel.hpp +++ b/mrmd/io/DumpH5MDParallel.hpp @@ -35,11 +35,10 @@ class DumpH5MDParallel } void open(const std::string& filename, const data::Atoms& atoms); - void dumpStep( - const data::Subdomain& subdomain, - const data::Atoms& atoms, - const idx_t step, - const real_t dt); + void dumpStep(const data::Subdomain& subdomain, + const data::Atoms& atoms, + const idx_t step, + const real_t dt); void close(); diff --git a/mrmd/io/H5MD.test.cpp b/mrmd/io/H5MD.test.cpp index f10ed4fd..fb8697bc 100644 --- a/mrmd/io/H5MD.test.cpp +++ b/mrmd/io/H5MD.test.cpp @@ -129,7 +129,7 @@ TEST(H5MD, dumpMultipleSteps) auto dump = DumpH5MDParallel(mpiInfo, "XzzX"); dump.open("dummyMultipleSteps.h5md", atoms1); - + for (idx_t step = 0; step < 10; ++step) { dump.dumpStep(subdomain1, atoms1, step, dt); From 3cf56de47da0785f9d4f2fde3d2aca3a832b069b Mon Sep 17 00:00:00 2001 From: julianhille Date: Mon, 10 Mar 2025 18:16:39 +0100 Subject: [PATCH 17/29] fixed errors for H5MD support switched off, Ref #038 --- mrmd/io/DumpH5MDParallel.cpp | 13 +++++++------ mrmd/io/DumpH5MDParallel.hpp | 4 ++-- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/mrmd/io/DumpH5MDParallel.cpp b/mrmd/io/DumpH5MDParallel.cpp index d4c90bc1..efafa864 100644 --- a/mrmd/io/DumpH5MDParallel.cpp +++ b/mrmd/io/DumpH5MDParallel.cpp @@ -980,8 +980,8 @@ void DumpH5MDParallel::open(const std::string& filename, const data::Atoms& atom void DumpH5MDParallel::dumpStep(const data::Subdomain& subdomain, const data::Atoms& atoms, - const idx_t step, - const real_t dt) + const idx_t& step, + const real_t& dt) { impl::DumpH5MDParallelImpl helper(*this); helper.dumpStep(subdomain, atoms, step, dt); @@ -1001,7 +1001,7 @@ void DumpH5MDParallel::dump(const std::string& filename, helper.dump(filename, subdomain, atoms); } #else -void DumpH5MDParallel::open(const std::string& /*filename*/) +void DumpH5MDParallel::open(const std::string& /*filename*/, const data::Atoms& /*atoms*/) { MRMD_HOST_CHECK(false, "HDF5 Support not available!"); exit(EXIT_FAILURE); @@ -1012,9 +1012,10 @@ void DumpH5MDParallel::close(const hid_t& /*file_id*/); MRMD_HOST_CHECK(false, "HDF5 Support not available!"); exit(EXIT_FAILURE); } -void DumpH5MDParallel::dumpStep(const hid_t& /*file_id*/, - const data::Subdomain& /*subdomain*/, - const data::Atoms& /*atoms*/) +void DumpH5MDParallel::dumpStep(const data::Subdomain& /*subdomain*/, + const data::Atoms& /*atoms*/, + const idx_t& /*step*/, + const real_t& /*dt*/) { MRMD_HOST_CHECK(false, "HDF5 Support not available!"); exit(EXIT_FAILURE); diff --git a/mrmd/io/DumpH5MDParallel.hpp b/mrmd/io/DumpH5MDParallel.hpp index 58ddeb52..77d64330 100644 --- a/mrmd/io/DumpH5MDParallel.hpp +++ b/mrmd/io/DumpH5MDParallel.hpp @@ -37,8 +37,8 @@ class DumpH5MDParallel void dumpStep(const data::Subdomain& subdomain, const data::Atoms& atoms, - const idx_t step, - const real_t dt); + const idx_t& step, + const real_t& dt); void close(); From 7e1ea198f104d6ce4cf9daa0e04fc1abbdf66169 Mon Sep 17 00:00:00 2001 From: julianhille Date: Mon, 10 Mar 2025 18:30:07 +0100 Subject: [PATCH 18/29] hotfix, Ref #038 --- mrmd/io/DumpH5MDParallel.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mrmd/io/DumpH5MDParallel.cpp b/mrmd/io/DumpH5MDParallel.cpp index efafa864..352fb769 100644 --- a/mrmd/io/DumpH5MDParallel.cpp +++ b/mrmd/io/DumpH5MDParallel.cpp @@ -1007,7 +1007,7 @@ void DumpH5MDParallel::open(const std::string& /*filename*/, const data::Atoms& exit(EXIT_FAILURE); } -void DumpH5MDParallel::close(const hid_t& /*file_id*/); +void DumpH5MDParallel::close(); { MRMD_HOST_CHECK(false, "HDF5 Support not available!"); exit(EXIT_FAILURE); From fca7c9febb529734f6e25c9a30adf290b01e7569 Mon Sep 17 00:00:00 2001 From: julianhille Date: Mon, 10 Mar 2025 18:37:38 +0100 Subject: [PATCH 19/29] hotfix, Ref #038 --- mrmd/io/DumpH5MDParallel.hpp | 74 ++++++++++++++++++------------------ 1 file changed, 37 insertions(+), 37 deletions(-) diff --git a/mrmd/io/DumpH5MDParallel.hpp b/mrmd/io/DumpH5MDParallel.hpp index 77d64330..c5a3336d 100644 --- a/mrmd/io/DumpH5MDParallel.hpp +++ b/mrmd/io/DumpH5MDParallel.hpp @@ -67,43 +67,43 @@ class DumpH5MDParallel std::string author = "xxx"; std::string particleSubGroupName = "atoms"; - hid_t fileId; - hid_t particleGroupId; - hid_t particleSubGroupId; - hid_t boxGroupId; - hid_t edgesGroupId; - hid_t edgesStepSetId; - hid_t edgesTimeSetId; - hid_t edgesValueSetId; - hid_t chargesGroupId; - hid_t chargesStepSetId; - hid_t chargesTimeSetId; - hid_t chargesValueSetId; - hid_t forceGroupId; - hid_t forceStepSetId; - hid_t forceTimeSetId; - hid_t forceValueSetId; - hid_t massGroupId; - hid_t massStepSetId; - hid_t massTimeSetId; - hid_t massValueSetId; - hid_t posGroupId; - hid_t posStepSetId; - hid_t posTimeSetId; - hid_t posValueSetId; - hid_t relativeMassGroupId; - hid_t relativeMassStepSetId; - hid_t relativeMassTimeSetId; - hid_t relativeMassValueSetId; - hid_t typeGroupId; - hid_t typeStepSetId; - hid_t typeTimeSetId; - hid_t typeValueSetId; - hid_t velGroupId; - hid_t velStepSetId; - hid_t velTimeSetId; - hid_t velValueSetId; + int64_t fileId; + int64_t particleGroupId; + int64_t particleSubGroupId; + int64_t boxGroupId; + int64_t edgesGroupId; + int64_t edgesStepSetId; + int64_t edgesTimeSetId; + int64_t edgesValueSetId; + int64_t chargesGroupId; + int64_t chargesStepSetId; + int64_t chargesTimeSetId; + int64_t chargesValueSetId; + int64_t forceGroupId; + int64_t forceStepSetId; + int64_t forceTimeSetId; + int64_t forceValueSetId; + int64_t massGroupId; + int64_t massStepSetId; + int64_t massTimeSetId; + int64_t massValueSetId; + int64_t posGroupId; + int64_t posStepSetId; + int64_t posTimeSetId; + int64_t posValueSetId; + int64_t relativeMassGroupId; + int64_t relativeMassStepSetId; + int64_t relativeMassTimeSetId; + int64_t relativeMassValueSetId; + int64_t typeGroupId; + int64_t typeStepSetId; + int64_t typeTimeSetId; + int64_t typeValueSetId; + int64_t velGroupId; + int64_t velStepSetId; + int64_t velTimeSetId; + int64_t velValueSetId; - hsize_t saveCount = 0; + uint64_t saveCount = 0; }; } // namespace mrmd::io \ No newline at end of file From e9be17b9987753615a0a42c656310952222cdf08 Mon Sep 17 00:00:00 2001 From: julianhille Date: Mon, 10 Mar 2025 18:38:26 +0100 Subject: [PATCH 20/29] hotfix, Ref #038 --- mrmd/io/DumpH5MDParallel.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mrmd/io/DumpH5MDParallel.cpp b/mrmd/io/DumpH5MDParallel.cpp index 352fb769..e4dc26d1 100644 --- a/mrmd/io/DumpH5MDParallel.cpp +++ b/mrmd/io/DumpH5MDParallel.cpp @@ -1007,7 +1007,7 @@ void DumpH5MDParallel::open(const std::string& /*filename*/, const data::Atoms& exit(EXIT_FAILURE); } -void DumpH5MDParallel::close(); +void DumpH5MDParallel::close() { MRMD_HOST_CHECK(false, "HDF5 Support not available!"); exit(EXIT_FAILURE); From 951f7d44861db7fcd094203ccd2d2b799340d0a7 Mon Sep 17 00:00:00 2001 From: julianhille Date: Mon, 10 Mar 2025 18:52:43 +0100 Subject: [PATCH 21/29] clang-tidy fixes, Ref #038 --- mrmd/io/DumpH5MDParallel.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mrmd/io/DumpH5MDParallel.cpp b/mrmd/io/DumpH5MDParallel.cpp index e4dc26d1..f90022b6 100644 --- a/mrmd/io/DumpH5MDParallel.cpp +++ b/mrmd/io/DumpH5MDParallel.cpp @@ -500,7 +500,7 @@ void DumpH5MDParallelImpl::appendDataParallel(const hid_t datasetId, fileSpace, H5S_SELECT_SET, offset.data(), stride.data(), count.data(), dims.data())); std::vector localOffset(dims.size(), 0); - const hid_t memorySpace = H5Screate_simple(dims.size(), dims.data(), NULL); + const hid_t memorySpace = H5Screate_simple(dims.size(), dims.data(), nullptr); CHECK_HDF5(H5Sselect_hyperslab( memorySpace, H5S_SELECT_SET, localOffset.data(), stride.data(), count.data(), dims.data())); @@ -519,7 +519,7 @@ void DumpH5MDParallelImpl::appendData(const hid_t datasetId, const std::vector& data, const std::vector& dims) const { - const hid_t memorySpace = H5Screate_simple(dims.size(), dims.data(), NULL); + const hid_t memorySpace = H5Screate_simple(dims.size(), dims.data(), nullptr); std::vector newSize = dims; newSize[0] = config_.saveCount + 1; @@ -531,7 +531,7 @@ void DumpH5MDParallelImpl::appendData(const hid_t datasetId, start[0] = config_.saveCount; std::vector count = dims; count[0] = 1; - H5Sselect_hyperslab(fileSpace, H5S_SELECT_SET, start.data(), NULL, count.data(), NULL); + H5Sselect_hyperslab(fileSpace, H5S_SELECT_SET, start.data(), nullptr, count.data(), nullptr); H5Dwrite(datasetId, typeToHDF5(), memorySpace, fileSpace, H5P_DEFAULT, data.data()); From c7bc8b12616183d5a7f1840b9b4db441c984d8b4 Mon Sep 17 00:00:00 2001 From: julianhille Date: Mon, 10 Mar 2025 19:11:39 +0100 Subject: [PATCH 22/29] clang-tidy fixes, Ref #038 --- mrmd/io/DumpH5MDParallel.cpp | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/mrmd/io/DumpH5MDParallel.cpp b/mrmd/io/DumpH5MDParallel.cpp index f90022b6..3cc8ffd1 100644 --- a/mrmd/io/DumpH5MDParallel.cpp +++ b/mrmd/io/DumpH5MDParallel.cpp @@ -279,13 +279,12 @@ hid_t DumpH5MDParallelImpl::createChunkedDataset(const hid_t& groupId, { std::vector max_dims = dims; max_dims[0] = H5S_UNLIMITED; - hid_t fileSpace = H5Screate_simple(dims.size(), dims.data(), max_dims.data()); + hid_t fileSpace = H5Screate_simple(int_c(dims.size()), dims.data(), max_dims.data()); hid_t plist = H5Pcreate(H5P_DATASET_CREATE); H5Pset_layout(plist, H5D_CHUNKED); - const std::vector chunk_dims = dims; - H5Pset_chunk(plist, dims.size(), chunk_dims.data()); + H5Pset_chunk(plist, int_c(dims.size()), dims.data()); auto datasetId = H5Dcreate(groupId, name.c_str(), dtype, fileSpace, H5P_DEFAULT, plist, H5P_DEFAULT); @@ -500,7 +499,7 @@ void DumpH5MDParallelImpl::appendDataParallel(const hid_t datasetId, fileSpace, H5S_SELECT_SET, offset.data(), stride.data(), count.data(), dims.data())); std::vector localOffset(dims.size(), 0); - const hid_t memorySpace = H5Screate_simple(dims.size(), dims.data(), nullptr); + const hid_t memorySpace = H5Screate_simple(int_c(dims.size()), dims.data(), nullptr); CHECK_HDF5(H5Sselect_hyperslab( memorySpace, H5S_SELECT_SET, localOffset.data(), stride.data(), count.data(), dims.data())); @@ -519,7 +518,7 @@ void DumpH5MDParallelImpl::appendData(const hid_t datasetId, const std::vector& data, const std::vector& dims) const { - const hid_t memorySpace = H5Screate_simple(dims.size(), dims.data(), nullptr); + const hid_t memorySpace = H5Screate_simple(int_c(dims.size()), dims.data(), nullptr); std::vector newSize = dims; newSize[0] = config_.saveCount + 1; From 2864accc77ea984396deaa1d294f3d30ea619ecc Mon Sep 17 00:00:00 2001 From: julianhille Date: Fri, 21 Mar 2025 10:50:17 +0100 Subject: [PATCH 23/29] renamed particleGroupName_ member to particleSubGroupName_, Ref #038 --- mrmd/io/RestoreH5MDParallel.cpp | 16 ++++++++-------- mrmd/io/RestoreH5MDParallel.hpp | 4 ++-- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/mrmd/io/RestoreH5MDParallel.cpp b/mrmd/io/RestoreH5MDParallel.cpp index 333ce8a3..49337f11 100644 --- a/mrmd/io/RestoreH5MDParallel.cpp +++ b/mrmd/io/RestoreH5MDParallel.cpp @@ -97,7 +97,7 @@ void RestoreH5MDParallel::restore(const std::string& filename, auto fileId = CHECK_HDF5(H5Fopen(filename.c_str(), H5F_ACC_RDONLY, plist)); - std::string groupName = "/particles/" + particleGroupName_ + "/box"; + std::string groupName = "/particles/" + particleSubGroupName_ + "/box"; CHECK_HDF5(H5LTget_attribute_double( fileId, groupName.c_str(), "minCorner", subdomain.minCorner.data())); CHECK_HDF5(H5LTget_attribute_double( @@ -109,48 +109,48 @@ void RestoreH5MDParallel::restore(const std::string& filename, std::vector pos; if (restorePos) { - readParallel(fileId, "/particles/" + particleGroupName_ + "/" + posDataset + "/value", pos); + readParallel(fileId, "/particles/" + particleSubGroupName_ + "/" + posDataset + "/value", pos); MRMD_HOST_CHECK_EQUAL(pos.size() / 3 * 3, pos.size()); } std::vector vel; if (restoreVel) { - readParallel(fileId, "/particles/" + particleGroupName_ + "/" + velDataset + "/value", vel); + readParallel(fileId, "/particles/" + particleSubGroupName_ + "/" + velDataset + "/value", vel); MRMD_HOST_CHECK_EQUAL(pos.size() / 3 * 3, vel.size()); } std::vector force; if (restoreForce) { readParallel( - fileId, "/particles/" + particleGroupName_ + "/" + forceDataset + "/value", force); + fileId, "/particles/" + particleSubGroupName_ + "/" + forceDataset + "/value", force); MRMD_HOST_CHECK_EQUAL(pos.size() / 3 * 3, force.size()); } std::vector type; if (restoreType) { readParallel( - fileId, "/particles/" + particleGroupName_ + "/" + typeDataset + "/value", type); + fileId, "/particles/" + particleSubGroupName_ + "/" + typeDataset + "/value", type); MRMD_HOST_CHECK_EQUAL(pos.size() / 3 * 1, type.size()); } std::vector mass; if (restoreMass) { readParallel( - fileId, "/particles/" + particleGroupName_ + "/" + massDataset + "/value", mass); + fileId, "/particles/" + particleSubGroupName_ + "/" + massDataset + "/value", mass); MRMD_HOST_CHECK_EQUAL(pos.size() / 3 * 1, mass.size()); } std::vector charge; if (restoreCharge) { readParallel( - fileId, "/particles/" + particleGroupName_ + "/" + chargeDataset + "/value", charge); + fileId, "/particles/" + particleSubGroupName_ + "/" + chargeDataset + "/value", charge); MRMD_HOST_CHECK_EQUAL(pos.size() / 3 * 1, charge.size()); } std::vector relativeMass; if (restoreRelativeMass) { readParallel(fileId, - "/particles/" + particleGroupName_ + "/" + relativeMassDataset + "/value", + "/particles/" + particleSubGroupName_ + "/" + relativeMassDataset + "/value", relativeMass); MRMD_HOST_CHECK_EQUAL(pos.size() / 3 * 1, relativeMass.size()); } diff --git a/mrmd/io/RestoreH5MDParallel.hpp b/mrmd/io/RestoreH5MDParallel.hpp index f1866931..fe91212d 100644 --- a/mrmd/io/RestoreH5MDParallel.hpp +++ b/mrmd/io/RestoreH5MDParallel.hpp @@ -31,7 +31,7 @@ class RestoreH5MDParallel public: RestoreH5MDParallel(const std::shared_ptr& mpiInfo, const std::string& particleSubGroupName = "atoms") - : mpiInfo_(mpiInfo), particleGroupName_(particleSubGroupName) + : mpiInfo_(mpiInfo), particleSubGroupName_(particleSubGroupName) { } @@ -58,7 +58,7 @@ class RestoreH5MDParallel void readParallel(hid_t fileId, const std::string& dataset, std::vector& data); std::shared_ptr mpiInfo_; - std::string particleGroupName_; + std::string particleSubGroupName_; }; } // namespace mrmd::io \ No newline at end of file From e29cea087e0af20f1290f1c4cc4acbd0ad85642d Mon Sep 17 00:00:00 2001 From: julianhille Date: Fri, 21 Mar 2025 10:52:39 +0100 Subject: [PATCH 24/29] clang-format fix, Ref #038 --- mrmd/io/RestoreH5MDParallel.cpp | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/mrmd/io/RestoreH5MDParallel.cpp b/mrmd/io/RestoreH5MDParallel.cpp index 49337f11..d07bd088 100644 --- a/mrmd/io/RestoreH5MDParallel.cpp +++ b/mrmd/io/RestoreH5MDParallel.cpp @@ -109,13 +109,15 @@ void RestoreH5MDParallel::restore(const std::string& filename, std::vector pos; if (restorePos) { - readParallel(fileId, "/particles/" + particleSubGroupName_ + "/" + posDataset + "/value", pos); + readParallel( + fileId, "/particles/" + particleSubGroupName_ + "/" + posDataset + "/value", pos); MRMD_HOST_CHECK_EQUAL(pos.size() / 3 * 3, pos.size()); } std::vector vel; if (restoreVel) { - readParallel(fileId, "/particles/" + particleSubGroupName_ + "/" + velDataset + "/value", vel); + readParallel( + fileId, "/particles/" + particleSubGroupName_ + "/" + velDataset + "/value", vel); MRMD_HOST_CHECK_EQUAL(pos.size() / 3 * 3, vel.size()); } std::vector force; From 7b619b5d27438c363eedde58d359e0c4f93e7d69 Mon Sep 17 00:00:00 2001 From: julianhille Date: Tue, 1 Apr 2025 15:40:55 +0200 Subject: [PATCH 25/29] added subdomain corners and ghostLayerThickness to open for step-wise H5MD output, Ref 0#38 --- mrmd/io/DumpH5MDParallel.cpp | 32 ++++++++++++++++++++++++-------- mrmd/io/DumpH5MDParallel.hpp | 2 +- 2 files changed, 25 insertions(+), 9 deletions(-) diff --git a/mrmd/io/DumpH5MDParallel.cpp b/mrmd/io/DumpH5MDParallel.cpp index 3cc8ffd1..dd125796 100644 --- a/mrmd/io/DumpH5MDParallel.cpp +++ b/mrmd/io/DumpH5MDParallel.cpp @@ -39,7 +39,7 @@ class DumpH5MDParallelImpl public: explicit DumpH5MDParallelImpl(DumpH5MDParallel& config) : config_(config) {} - void open(const std::string& filename, const data::Atoms& atoms); + void open(const std::string& filename, const data::Subdomain& subdomain, const data::Atoms& atoms); void dumpStep(const data::Subdomain& subdomain, const data::Atoms& atoms, const idx_t step, @@ -55,7 +55,7 @@ class DumpH5MDParallelImpl void closeFile(const hid_t& fileId) const; hid_t createGroup(const hid_t& parentElementId, const std::string& groupName) const; void closeGroup(const hid_t& groupId) const; - void openBox() const; + void openBox(const data::Subdomain& subdomain) const; hid_t createChunkedDataset(const hid_t& groupId, const std::vector& dims, const std::string& name, @@ -108,7 +108,7 @@ class DumpH5MDParallelImpl int64_t particleOffset = -1; }; -void DumpH5MDParallelImpl::open(const std::string& filename, const data::Atoms& atoms) +void DumpH5MDParallelImpl::open(const std::string& filename, const data::Subdomain& subdomain, const data::Atoms& atoms) { MPI_Info info = MPI_INFO_NULL; @@ -122,7 +122,7 @@ void DumpH5MDParallelImpl::open(const std::string& filename, const data::Atoms& config_.particleGroupId = createGroup(config_.fileId, "particles"); config_.particleSubGroupId = createGroup(config_.particleGroupId, config_.particleSubGroupName); writeHeader(config_.fileId); - openBox(); + openBox(subdomain); config_.chargesGroupId = createGroup(config_.particleSubGroupId, "charge"); config_.chargesStepSetId = createChunkedDataset( @@ -243,13 +243,29 @@ void DumpH5MDParallelImpl::close() const closeFile(config_.fileId); } -void DumpH5MDParallelImpl::openBox() const +void DumpH5MDParallelImpl::openBox(const data::Subdomain& subdomain) const { config_.boxGroupId = createGroup(config_.particleSubGroupId, "box"); std::vector dims = {3}; CHECK_HDF5(H5LTset_attribute_int( config_.particleSubGroupId, "box", "dimension", dims.data(), dims.size())); + CHECK_HDF5(H5LTset_attribute_double(config_.particleSubGroupId, + "box", + "minCorner", + subdomain.minCorner.data(), + subdomain.minCorner.size())); + CHECK_HDF5(H5LTset_attribute_double(config_.particleSubGroupId, + "box", + "maxCorner", + subdomain.maxCorner.data(), + subdomain.maxCorner.size())); + CHECK_HDF5(H5LTset_attribute_double(config_.particleSubGroupId, + "box", + "ghostLayerThickness", + &subdomain.ghostLayerThickness, + 1)); + auto boundaryType = H5Tcopy(H5T_C_S1); CHECK_HDF5(H5Tset_size(boundaryType, 8)); @@ -971,10 +987,10 @@ void DumpH5MDParallelImpl::dump(const std::string& filename, } } // namespace impl -void DumpH5MDParallel::open(const std::string& filename, const data::Atoms& atoms) +void DumpH5MDParallel::open(const std::string& filename, const data::Subdomain& subdomain, const data::Atoms& atoms) { impl::DumpH5MDParallelImpl helper(*this); - helper.open(filename, atoms); + helper.open(filename, subdomain, atoms); } void DumpH5MDParallel::dumpStep(const data::Subdomain& subdomain, @@ -1000,7 +1016,7 @@ void DumpH5MDParallel::dump(const std::string& filename, helper.dump(filename, subdomain, atoms); } #else -void DumpH5MDParallel::open(const std::string& /*filename*/, const data::Atoms& /*atoms*/) +void DumpH5MDParallel::open(const std::string& /*filename*/, const data::Subdomain& /*subdomain*/, const data::Atoms& /*atoms*/) { MRMD_HOST_CHECK(false, "HDF5 Support not available!"); exit(EXIT_FAILURE); diff --git a/mrmd/io/DumpH5MDParallel.hpp b/mrmd/io/DumpH5MDParallel.hpp index c5a3336d..bce45853 100644 --- a/mrmd/io/DumpH5MDParallel.hpp +++ b/mrmd/io/DumpH5MDParallel.hpp @@ -33,7 +33,7 @@ class DumpH5MDParallel : mpiInfo(mpiInfoArg), author(authorArg), particleSubGroupName(particleSubGroupNameArg) { } - void open(const std::string& filename, const data::Atoms& atoms); + void open(const std::string& filename, const data::Subdomain& subdomain, const data::Atoms& atoms); void dumpStep(const data::Subdomain& subdomain, const data::Atoms& atoms, From ca8173d39d25446022af52ee8f647db0c2e0d452 Mon Sep 17 00:00:00 2001 From: julianhille Date: Wed, 2 Apr 2025 16:30:21 +0200 Subject: [PATCH 26/29] enabled restoring any step of H5MD file and added two tests, Ref 0#38 --- mrmd/io/DumpH5MDParallel.cpp | 4 +- mrmd/io/H5MD.test.cpp | 101 +++++++++++++++++++++++++++----- mrmd/io/RestoreH5MDParallel.cpp | 24 ++++---- mrmd/io/RestoreH5MDParallel.hpp | 4 +- 4 files changed, 105 insertions(+), 28 deletions(-) diff --git a/mrmd/io/DumpH5MDParallel.cpp b/mrmd/io/DumpH5MDParallel.cpp index dd125796..b48ef94a 100644 --- a/mrmd/io/DumpH5MDParallel.cpp +++ b/mrmd/io/DumpH5MDParallel.cpp @@ -263,8 +263,8 @@ void DumpH5MDParallelImpl::openBox(const data::Subdomain& subdomain) const CHECK_HDF5(H5LTset_attribute_double(config_.particleSubGroupId, "box", "ghostLayerThickness", - &subdomain.ghostLayerThickness, - 1)); + subdomain.ghostLayerThickness.data(), + subdomain.ghostLayerThickness.size())); auto boundaryType = H5Tcopy(H5T_C_S1); diff --git a/mrmd/io/H5MD.test.cpp b/mrmd/io/H5MD.test.cpp index fb8697bc..4c282ff8 100644 --- a/mrmd/io/H5MD.test.cpp +++ b/mrmd/io/H5MD.test.cpp @@ -66,21 +66,40 @@ data::Atoms getAtoms(const std::shared_ptr& mpiInfo) return atoms; } -TEST(H5MD, dump) + +void shuffleAtoms(data::Subdomain& subdomain, data::Atoms& atoms, const idx_t& step) { - auto mpiInfo = std::make_shared(MPI_COMM_WORLD); + auto RNG = Kokkos::Random_XorShift1024_Pool<>(1234 * step); - auto subdomain1 = data::Subdomain({1_r, 2_r, 3_r}, {4_r, 6_r, 8_r}, 0.5_r); - auto atoms1 = getAtoms(mpiInfo); + auto pos = atoms.getPos(); + auto vel = atoms.getVel(); + auto force = atoms.getForce(); - auto dump = DumpH5MDParallel(mpiInfo, "XzzX"); - dump.dump("dummy.h5md", subdomain1, atoms1); + auto policy = Kokkos::RangePolicy<>(0, atoms.numLocalAtoms); + auto kernel = KOKKOS_LAMBDA(const idx_t idx) + { + auto randGen = RNG.get_state(); + pos(idx, 0) = randGen.drand() * subdomain.diameter[0] + subdomain.minCorner[0]; + pos(idx, 1) = randGen.drand() * subdomain.diameter[1] + subdomain.minCorner[1]; + pos(idx, 2) = randGen.drand() * subdomain.diameter[2] + subdomain.minCorner[2]; - auto subdomain2 = data::Subdomain(); - auto atoms2 = data::Atoms(0); - auto restore = RestoreH5MDParallel(mpiInfo); - restore.restore("dummy.h5md", subdomain2, atoms2); + vel(idx, 0) = (randGen.drand() - 0.5_r); + vel(idx, 1) = (randGen.drand() - 0.5_r); + vel(idx, 2) = (randGen.drand() - 0.5_r); + force(idx, 0) = (randGen.drand() - 0.5_r); + force(idx, 1) = (randGen.drand() - 0.5_r); + force(idx, 2) = (randGen.drand() - 0.5_r); + + RNG.free_state(randGen); + }; + Kokkos::parallel_for("shuffle-atoms", policy, kernel); + Kokkos::fence(); +} + +void compareSystems(const data::Subdomain& subdomain1, const data::Atoms& atoms1, + const data::Subdomain& subdomain2, const data::Atoms& atoms2) +{ EXPECT_FLOAT_EQ(subdomain1.ghostLayerThickness[0], subdomain2.ghostLayerThickness[0]); EXPECT_FLOAT_EQ(subdomain1.ghostLayerThickness[1], subdomain2.ghostLayerThickness[1]); EXPECT_FLOAT_EQ(subdomain1.ghostLayerThickness[2], subdomain2.ghostLayerThickness[2]); @@ -118,6 +137,24 @@ TEST(H5MD, dump) } } +TEST(H5MD, dump) +{ + auto mpiInfo = std::make_shared(MPI_COMM_WORLD); + + auto subdomain1 = data::Subdomain({1_r, 2_r, 3_r}, {4_r, 6_r, 8_r}, 0.5_r); + auto atoms1 = getAtoms(mpiInfo); + + auto dump = DumpH5MDParallel(mpiInfo, "XzzX"); + dump.dump("dummy.h5md", subdomain1, atoms1); + + auto subdomain2 = data::Subdomain(); + auto atoms2 = data::Atoms(0); + auto restore = RestoreH5MDParallel(mpiInfo); + restore.restore("dummy.h5md", subdomain2, atoms2); + + compareSystems(subdomain1, atoms1, subdomain2, atoms2); +} + TEST(H5MD, dumpMultipleSteps) { auto mpiInfo = std::make_shared(MPI_COMM_WORLD); @@ -126,16 +163,52 @@ TEST(H5MD, dumpMultipleSteps) auto atoms1 = getAtoms(mpiInfo); real_t dt = 0.002_r; - auto dump = DumpH5MDParallel(mpiInfo, "XzzX"); + auto dump = DumpH5MDParallel(mpiInfo, "J-Hizzle"); + + auto subdomain2 = data::Subdomain(); + auto atoms2 = data::Atoms(0); + auto restore = RestoreH5MDParallel(mpiInfo); + + dump.open("dummyMultipleSteps.h5md", subdomain1, atoms1); + dump.dumpStep(subdomain1, atoms1, 0, dt); + dump.close(); - dump.open("dummyMultipleSteps.h5md", atoms1); + restore.restore("dummyMultipleSteps.h5md", subdomain2, atoms2, 0); + compareSystems(subdomain1, atoms1, subdomain2, atoms2); +} + +TEST(H5MD, dumpConsistency) +{ + auto mpiInfo = std::make_shared(MPI_COMM_WORLD); - for (idx_t step = 0; step < 10; ++step) + auto subdomain0 = data::Subdomain({1_r, 2_r, 3_r}, {4_r, 6_r, 8_r}, 0.5_r); + auto atoms0 = getAtoms(mpiInfo); + real_t dt = 0.002_r; + + auto dump = DumpH5MDParallel(mpiInfo, "J-Hizzle"); + + auto subdomain1 = data::Subdomain(); + auto atoms1 = data::Atoms(0); + auto subdomain2 = data::Subdomain(); + auto atoms2 = data::Atoms(0); + auto restore = RestoreH5MDParallel(mpiInfo); + + dump.open("dummyConsistencyMultipleSteps.h5md", subdomain0, atoms0); + + for (idx_t step = 0; step < 5; ++step) { - dump.dumpStep(subdomain1, atoms1, step, dt); + shuffleAtoms(subdomain0, atoms0, step); + dump.dumpStep(subdomain0, atoms0, step, dt); } dump.close(); + + dump.dump("dummyConsistencyFinalStep.h5md", subdomain0, atoms0); + + restore.restore("dummyConsistencyMultipleSteps.h5md", subdomain1, atoms1, 4); + restore.restore("dummyConsistencyFinalStep.h5md", subdomain2, atoms2); + compareSystems(subdomain1, atoms1, subdomain2, atoms2); } + } // namespace io } // namespace mrmd \ No newline at end of file diff --git a/mrmd/io/RestoreH5MDParallel.cpp b/mrmd/io/RestoreH5MDParallel.cpp index d07bd088..d9db23b5 100644 --- a/mrmd/io/RestoreH5MDParallel.cpp +++ b/mrmd/io/RestoreH5MDParallel.cpp @@ -26,7 +26,8 @@ namespace mrmd::io template void RestoreH5MDParallel::readParallel(hid_t fileId, const std::string& dataset, - std::vector& data) + std::vector& data, + const idx_t& saveCount) { auto dset = CHECK_HDF5(H5Dopen(fileId, dataset.c_str(), H5P_DEFAULT)); auto dspace = CHECK_HDF5(H5Dget_space(dset)); @@ -53,6 +54,7 @@ void RestoreH5MDParallel::readParallel(hid_t fileId, // set up local part of the input file std::vector offset(globalDims.size(), 0); + offset[0] = saveCount; offset[1] = localOffset; std::vector stride(globalDims.size(), 1); std::vector count(globalDims.size(), 1); @@ -88,7 +90,8 @@ void RestoreH5MDParallel::readParallel(hid_t fileId, void RestoreH5MDParallel::restore(const std::string& filename, data::Subdomain& subdomain, - data::Atoms& atoms) + data::Atoms& atoms, + const idx_t& saveCount) { MPI_Info info = MPI_INFO_NULL; @@ -110,42 +113,42 @@ void RestoreH5MDParallel::restore(const std::string& filename, if (restorePos) { readParallel( - fileId, "/particles/" + particleSubGroupName_ + "/" + posDataset + "/value", pos); + fileId, "/particles/" + particleSubGroupName_ + "/" + posDataset + "/value", pos, saveCount); MRMD_HOST_CHECK_EQUAL(pos.size() / 3 * 3, pos.size()); } std::vector vel; if (restoreVel) { readParallel( - fileId, "/particles/" + particleSubGroupName_ + "/" + velDataset + "/value", vel); + fileId, "/particles/" + particleSubGroupName_ + "/" + velDataset + "/value", vel, saveCount); MRMD_HOST_CHECK_EQUAL(pos.size() / 3 * 3, vel.size()); } std::vector force; if (restoreForce) { readParallel( - fileId, "/particles/" + particleSubGroupName_ + "/" + forceDataset + "/value", force); + fileId, "/particles/" + particleSubGroupName_ + "/" + forceDataset + "/value", force, saveCount); MRMD_HOST_CHECK_EQUAL(pos.size() / 3 * 3, force.size()); } std::vector type; if (restoreType) { readParallel( - fileId, "/particles/" + particleSubGroupName_ + "/" + typeDataset + "/value", type); + fileId, "/particles/" + particleSubGroupName_ + "/" + typeDataset + "/value", type, saveCount); MRMD_HOST_CHECK_EQUAL(pos.size() / 3 * 1, type.size()); } std::vector mass; if (restoreMass) { readParallel( - fileId, "/particles/" + particleSubGroupName_ + "/" + massDataset + "/value", mass); + fileId, "/particles/" + particleSubGroupName_ + "/" + massDataset + "/value", mass, saveCount); MRMD_HOST_CHECK_EQUAL(pos.size() / 3 * 1, mass.size()); } std::vector charge; if (restoreCharge) { readParallel( - fileId, "/particles/" + particleSubGroupName_ + "/" + chargeDataset + "/value", charge); + fileId, "/particles/" + particleSubGroupName_ + "/" + chargeDataset + "/value", charge, saveCount); MRMD_HOST_CHECK_EQUAL(pos.size() / 3 * 1, charge.size()); } std::vector relativeMass; @@ -153,7 +156,7 @@ void RestoreH5MDParallel::restore(const std::string& filename, { readParallel(fileId, "/particles/" + particleSubGroupName_ + "/" + relativeMassDataset + "/value", - relativeMass); + relativeMass, saveCount); MRMD_HOST_CHECK_EQUAL(pos.size() / 3 * 1, relativeMass.size()); } @@ -202,7 +205,8 @@ void RestoreH5MDParallel::readParallel(hid_t /*fileId*/, void RestoreH5MDParallel::restore(const std::string& /*filename*/, data::Subdomain& /*subdomain*/, - data::Atoms& /*atoms*/) + data::Atoms& /*atoms*/, + const idx_t& /*step*/) { MRMD_HOST_CHECK(false, "HDF5 support not available!"); exit(EXIT_FAILURE); diff --git a/mrmd/io/RestoreH5MDParallel.hpp b/mrmd/io/RestoreH5MDParallel.hpp index fe91212d..4026a7c0 100644 --- a/mrmd/io/RestoreH5MDParallel.hpp +++ b/mrmd/io/RestoreH5MDParallel.hpp @@ -35,7 +35,7 @@ class RestoreH5MDParallel { } - void restore(const std::string& filename, data::Subdomain& subdomain, data::Atoms& atoms); + void restore(const std::string& filename, data::Subdomain& subdomain, data::Atoms& atoms, const idx_t& saveCount = 0); bool restorePos = true; bool restoreVel = true; @@ -55,7 +55,7 @@ class RestoreH5MDParallel private: template - void readParallel(hid_t fileId, const std::string& dataset, std::vector& data); + void readParallel(hid_t fileId, const std::string& dataset, std::vector& data, const idx_t& saveCount); std::shared_ptr mpiInfo_; std::string particleSubGroupName_; From 3331bf0b7c565c11f058cd09bac8615266d9be99 Mon Sep 17 00:00:00 2001 From: julianhille Date: Wed, 2 Apr 2025 16:37:40 +0200 Subject: [PATCH 27/29] clang-format fixes, Ref 0#38 --- mrmd/io/DumpH5MDParallel.cpp | 32 +++++++++++++++++---------- mrmd/io/DumpH5MDParallel.hpp | 4 +++- mrmd/io/H5MD.test.cpp | 6 +++-- mrmd/io/RestoreH5MDParallel.cpp | 39 ++++++++++++++++++++++----------- mrmd/io/RestoreH5MDParallel.hpp | 10 +++++++-- 5 files changed, 61 insertions(+), 30 deletions(-) diff --git a/mrmd/io/DumpH5MDParallel.cpp b/mrmd/io/DumpH5MDParallel.cpp index b48ef94a..de28d6d5 100644 --- a/mrmd/io/DumpH5MDParallel.cpp +++ b/mrmd/io/DumpH5MDParallel.cpp @@ -39,7 +39,9 @@ class DumpH5MDParallelImpl public: explicit DumpH5MDParallelImpl(DumpH5MDParallel& config) : config_(config) {} - void open(const std::string& filename, const data::Subdomain& subdomain, const data::Atoms& atoms); + void open(const std::string& filename, + const data::Subdomain& subdomain, + const data::Atoms& atoms); void dumpStep(const data::Subdomain& subdomain, const data::Atoms& atoms, const idx_t step, @@ -108,7 +110,9 @@ class DumpH5MDParallelImpl int64_t particleOffset = -1; }; -void DumpH5MDParallelImpl::open(const std::string& filename, const data::Subdomain& subdomain, const data::Atoms& atoms) +void DumpH5MDParallelImpl::open(const std::string& filename, + const data::Subdomain& subdomain, + const data::Atoms& atoms) { MPI_Info info = MPI_INFO_NULL; @@ -251,15 +255,15 @@ void DumpH5MDParallelImpl::openBox(const data::Subdomain& subdomain) const CHECK_HDF5(H5LTset_attribute_int( config_.particleSubGroupId, "box", "dimension", dims.data(), dims.size())); CHECK_HDF5(H5LTset_attribute_double(config_.particleSubGroupId, - "box", - "minCorner", - subdomain.minCorner.data(), - subdomain.minCorner.size())); + "box", + "minCorner", + subdomain.minCorner.data(), + subdomain.minCorner.size())); CHECK_HDF5(H5LTset_attribute_double(config_.particleSubGroupId, - "box", - "maxCorner", - subdomain.maxCorner.data(), - subdomain.maxCorner.size())); + "box", + "maxCorner", + subdomain.maxCorner.data(), + subdomain.maxCorner.size())); CHECK_HDF5(H5LTset_attribute_double(config_.particleSubGroupId, "box", "ghostLayerThickness", @@ -987,7 +991,9 @@ void DumpH5MDParallelImpl::dump(const std::string& filename, } } // namespace impl -void DumpH5MDParallel::open(const std::string& filename, const data::Subdomain& subdomain, const data::Atoms& atoms) +void DumpH5MDParallel::open(const std::string& filename, + const data::Subdomain& subdomain, + const data::Atoms& atoms) { impl::DumpH5MDParallelImpl helper(*this); helper.open(filename, subdomain, atoms); @@ -1016,7 +1022,9 @@ void DumpH5MDParallel::dump(const std::string& filename, helper.dump(filename, subdomain, atoms); } #else -void DumpH5MDParallel::open(const std::string& /*filename*/, const data::Subdomain& /*subdomain*/, const data::Atoms& /*atoms*/) +void DumpH5MDParallel::open(const std::string& /*filename*/, + const data::Subdomain& /*subdomain*/, + const data::Atoms& /*atoms*/) { MRMD_HOST_CHECK(false, "HDF5 Support not available!"); exit(EXIT_FAILURE); diff --git a/mrmd/io/DumpH5MDParallel.hpp b/mrmd/io/DumpH5MDParallel.hpp index bce45853..3cc16112 100644 --- a/mrmd/io/DumpH5MDParallel.hpp +++ b/mrmd/io/DumpH5MDParallel.hpp @@ -33,7 +33,9 @@ class DumpH5MDParallel : mpiInfo(mpiInfoArg), author(authorArg), particleSubGroupName(particleSubGroupNameArg) { } - void open(const std::string& filename, const data::Subdomain& subdomain, const data::Atoms& atoms); + void open(const std::string& filename, + const data::Subdomain& subdomain, + const data::Atoms& atoms); void dumpStep(const data::Subdomain& subdomain, const data::Atoms& atoms, diff --git a/mrmd/io/H5MD.test.cpp b/mrmd/io/H5MD.test.cpp index 4c282ff8..0c918993 100644 --- a/mrmd/io/H5MD.test.cpp +++ b/mrmd/io/H5MD.test.cpp @@ -97,8 +97,10 @@ void shuffleAtoms(data::Subdomain& subdomain, data::Atoms& atoms, const idx_t& s Kokkos::fence(); } -void compareSystems(const data::Subdomain& subdomain1, const data::Atoms& atoms1, - const data::Subdomain& subdomain2, const data::Atoms& atoms2) +void compareSystems(const data::Subdomain& subdomain1, + const data::Atoms& atoms1, + const data::Subdomain& subdomain2, + const data::Atoms& atoms2) { EXPECT_FLOAT_EQ(subdomain1.ghostLayerThickness[0], subdomain2.ghostLayerThickness[0]); EXPECT_FLOAT_EQ(subdomain1.ghostLayerThickness[1], subdomain2.ghostLayerThickness[1]); diff --git a/mrmd/io/RestoreH5MDParallel.cpp b/mrmd/io/RestoreH5MDParallel.cpp index d9db23b5..512b66f7 100644 --- a/mrmd/io/RestoreH5MDParallel.cpp +++ b/mrmd/io/RestoreH5MDParallel.cpp @@ -112,43 +112,55 @@ void RestoreH5MDParallel::restore(const std::string& filename, std::vector pos; if (restorePos) { - readParallel( - fileId, "/particles/" + particleSubGroupName_ + "/" + posDataset + "/value", pos, saveCount); + readParallel(fileId, + "/particles/" + particleSubGroupName_ + "/" + posDataset + "/value", + pos, + saveCount); MRMD_HOST_CHECK_EQUAL(pos.size() / 3 * 3, pos.size()); } std::vector vel; if (restoreVel) { - readParallel( - fileId, "/particles/" + particleSubGroupName_ + "/" + velDataset + "/value", vel, saveCount); + readParallel(fileId, + "/particles/" + particleSubGroupName_ + "/" + velDataset + "/value", + vel, + saveCount); MRMD_HOST_CHECK_EQUAL(pos.size() / 3 * 3, vel.size()); } std::vector force; if (restoreForce) { - readParallel( - fileId, "/particles/" + particleSubGroupName_ + "/" + forceDataset + "/value", force, saveCount); + readParallel(fileId, + "/particles/" + particleSubGroupName_ + "/" + forceDataset + "/value", + force, + saveCount); MRMD_HOST_CHECK_EQUAL(pos.size() / 3 * 3, force.size()); } std::vector type; if (restoreType) { - readParallel( - fileId, "/particles/" + particleSubGroupName_ + "/" + typeDataset + "/value", type, saveCount); + readParallel(fileId, + "/particles/" + particleSubGroupName_ + "/" + typeDataset + "/value", + type, + saveCount); MRMD_HOST_CHECK_EQUAL(pos.size() / 3 * 1, type.size()); } std::vector mass; if (restoreMass) { - readParallel( - fileId, "/particles/" + particleSubGroupName_ + "/" + massDataset + "/value", mass, saveCount); + readParallel(fileId, + "/particles/" + particleSubGroupName_ + "/" + massDataset + "/value", + mass, + saveCount); MRMD_HOST_CHECK_EQUAL(pos.size() / 3 * 1, mass.size()); } std::vector charge; if (restoreCharge) { - readParallel( - fileId, "/particles/" + particleSubGroupName_ + "/" + chargeDataset + "/value", charge, saveCount); + readParallel(fileId, + "/particles/" + particleSubGroupName_ + "/" + chargeDataset + "/value", + charge, + saveCount); MRMD_HOST_CHECK_EQUAL(pos.size() / 3 * 1, charge.size()); } std::vector relativeMass; @@ -156,7 +168,8 @@ void RestoreH5MDParallel::restore(const std::string& filename, { readParallel(fileId, "/particles/" + particleSubGroupName_ + "/" + relativeMassDataset + "/value", - relativeMass, saveCount); + relativeMass, + saveCount); MRMD_HOST_CHECK_EQUAL(pos.size() / 3 * 1, relativeMass.size()); } diff --git a/mrmd/io/RestoreH5MDParallel.hpp b/mrmd/io/RestoreH5MDParallel.hpp index 4026a7c0..88f2f194 100644 --- a/mrmd/io/RestoreH5MDParallel.hpp +++ b/mrmd/io/RestoreH5MDParallel.hpp @@ -35,7 +35,10 @@ class RestoreH5MDParallel { } - void restore(const std::string& filename, data::Subdomain& subdomain, data::Atoms& atoms, const idx_t& saveCount = 0); + void restore(const std::string& filename, + data::Subdomain& subdomain, + data::Atoms& atoms, + const idx_t& saveCount = 0); bool restorePos = true; bool restoreVel = true; @@ -55,7 +58,10 @@ class RestoreH5MDParallel private: template - void readParallel(hid_t fileId, const std::string& dataset, std::vector& data, const idx_t& saveCount); + void readParallel(hid_t fileId, + const std::string& dataset, + std::vector& data, + const idx_t& saveCount); std::shared_ptr mpiInfo_; std::string particleSubGroupName_; From e943fa0ed9cee6a902c2838e211816089a705bef Mon Sep 17 00:00:00 2001 From: julianhille Date: Wed, 2 Apr 2025 17:55:38 +0200 Subject: [PATCH 28/29] bugfix, Ref 0#38 --- mrmd/io/RestoreH5MDParallel.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mrmd/io/RestoreH5MDParallel.cpp b/mrmd/io/RestoreH5MDParallel.cpp index 512b66f7..25153244 100644 --- a/mrmd/io/RestoreH5MDParallel.cpp +++ b/mrmd/io/RestoreH5MDParallel.cpp @@ -210,7 +210,8 @@ void RestoreH5MDParallel::restore(const std::string& filename, template void RestoreH5MDParallel::readParallel(hid_t /*fileId*/, const std::string& /*dataset*/, - std::vector& /*data*/) + std::vector& /*data*/, + const idx_t& /*saveCount*/) { MRMD_HOST_CHECK(false, "HDF5 support not available!"); exit(EXIT_FAILURE); From 2b43aaf4c65458c73e035b0063cba6f443a59af2 Mon Sep 17 00:00:00 2001 From: julianhille Date: Tue, 20 May 2025 18:07:38 +0200 Subject: [PATCH 29/29] clang-format fixes, Ref #038 --- mrmd/io/DumpH5MDParallel.cpp | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/mrmd/io/DumpH5MDParallel.cpp b/mrmd/io/DumpH5MDParallel.cpp index de28d6d5..73e8bb2f 100644 --- a/mrmd/io/DumpH5MDParallel.cpp +++ b/mrmd/io/DumpH5MDParallel.cpp @@ -265,11 +265,10 @@ void DumpH5MDParallelImpl::openBox(const data::Subdomain& subdomain) const subdomain.maxCorner.data(), subdomain.maxCorner.size())); CHECK_HDF5(H5LTset_attribute_double(config_.particleSubGroupId, - "box", - "ghostLayerThickness", - subdomain.ghostLayerThickness.data(), - subdomain.ghostLayerThickness.size())); - + "box", + "ghostLayerThickness", + subdomain.ghostLayerThickness.data(), + subdomain.ghostLayerThickness.size())); auto boundaryType = H5Tcopy(H5T_C_S1); CHECK_HDF5(H5Tset_size(boundaryType, 8));