Commit 9d944a43 authored by Dion Haefner's avatar Dion Haefner

parallel RFG works now

parent 8ab2f6c3
...@@ -5,11 +5,11 @@ ...@@ -5,11 +5,11 @@
* This requires the MPI version of HDF5. * This requires the MPI version of HDF5.
* *
* Overview of HDF5 methods: * Overview of HDF5 methods:
* h5g_pWrite : parallel write of grid data to HDF5 file * h5g_Write : parallel write of grid data to HDF5 file
* h5g_pRead : parallel read of grid data from HDF5 file * h5g_Read : parallel read of grid data from HDF5 file
* h5_pAppend : parallel append of multi-dim array to exisiting HDF5 file * h5_pAppend : parallel append of multi-dim array to exisiting HDF5 file
* h5_pWrite1d: parallel write of local 1d arrays to a global 1d array on one continuous disjointly hyperslabbed HDF5 file * h5_Write1d: parallel write of local 1d arrays to a global 1d array on one continuous disjointly hyperslabbed HDF5 file
* h5_pRead1d : parallel read of local 1d arrays from a global 1d array on one continuous disjointly hyperslabbed HDF5 file * h5_Read1d : parallel read of local 1d arrays from a global 1d array on one continuous disjointly hyperslabbed HDF5 file
* *
*/ */
...@@ -51,7 +51,7 @@ namespace Dune { ...@@ -51,7 +51,7 @@ namespace Dune {
* *
*/ */
/** h5g_pWrite(): Writing grid data in parallel mode to HDF5 file. /** h5g_Write(): Writing grid data in parallel mode to HDF5 file.
* *
* \tparam [in] data: grid data from local partition of the grid to be written to the current hyperslab * \tparam [in] data: grid data from local partition of the grid to be written to the current hyperslab
* \tparam [in] filename: HDF5 filename * \tparam [in] filename: HDF5 filename
...@@ -67,7 +67,7 @@ namespace Dune { ...@@ -67,7 +67,7 @@ namespace Dune {
* *
*/ */
template<typename GV,typename VEC> template<typename GV,typename VEC>
static void h5g_pWrite( const VEC &datavector, static void h5g_Write( const VEC &datavector,
const std::string& filename, const std::string& filename,
const std::string& groupname, const std::string& groupname,
const GV& gv, const GV& gv,
...@@ -79,7 +79,7 @@ namespace Dune { ...@@ -79,7 +79,7 @@ namespace Dune {
const bool preserve_structure=true const bool preserve_structure=true
) )
{ {
std::cout << "h5g_pWrite: ... " << filename << std::endl; std::cout << "h5g_Write: ... " << filename << std::endl;
//get the communicator of the dune grid //get the communicator of the dune grid
//const Dune::MPIHelper::MPICommunicator &comm = gv.comm(); //const Dune::MPIHelper::MPICommunicator &comm = gv.comm();
...@@ -116,7 +116,7 @@ namespace Dune { ...@@ -116,7 +116,7 @@ namespace Dune {
*/ */
std::cout << "h5g_pWrite: gridsizes = " std::cout << "h5g_Write: gridsizes = "
<< gridsizes << std::endl; << gridsizes << std::endl;
for (LeafIterator it = gv.template begin<0,partitiontype> () for (LeafIterator it = gv.template begin<0,partitiontype> ()
...@@ -150,20 +150,20 @@ namespace Dune { ...@@ -150,20 +150,20 @@ namespace Dune {
// Set up file access property list with parallel I/O access // Set up file access property list with parallel I/O access
hid_t plist_id = H5Pcreate( H5P_FILE_ACCESS ); hid_t plist_id = H5Pcreate( H5P_FILE_ACCESS );
//std::cout << "h5g_pWrite: H5Pcreate() done. plist_id = " //std::cout << "h5g_Write: H5Pcreate() done. plist_id = "
// << plist_id << std::endl; // << plist_id << std::endl;
assert( plist_id > -1 ); assert( plist_id > -1 );
herr_t status = H5Pset_fapl_mpio( plist_id, gv.comm(), mpiInfo ); herr_t status = H5Pset_fapl_mpio( plist_id, gv.comm(), mpiInfo );
//std::cout << "h5g_pWrite: H5Pset_fapl_mpio: status = " //std::cout << "h5g_Write: H5Pset_fapl_mpio: status = "
// << status << std::endl; // << status << std::endl;
assert( status > -1 ); assert( status > -1 );
if( status < 0 ) if( status < 0 )
std::cout << "Warning: H5Pset_fapl_mpio < 0" << std::endl; std::cout << "Warning: H5Pset_fapl_mpio < 0" << std::endl;
// Create a new file collectively and release property list identifier. // Create a new file collectively and release property list identifier.
//std::cout << "h5g_pWrite: " //std::cout << "h5g_Write: "
// << " create file " // << " create file "
// << filename.c_str() // << filename.c_str()
// << std::endl; // << std::endl;
...@@ -173,7 +173,7 @@ namespace Dune { ...@@ -173,7 +173,7 @@ namespace Dune {
H5P_DEFAULT, H5P_DEFAULT,
plist_id ); plist_id );
H5Pclose( plist_id ); H5Pclose( plist_id );
//std::cout << "h5g_pWrite: " //std::cout << "h5g_Write: "
// << filename.c_str() // << filename.c_str()
// << " file created!" << std::endl; // << " file created!" << std::endl;
assert( file_id > -1 ); assert( file_id > -1 );
...@@ -194,7 +194,7 @@ namespace Dune { ...@@ -194,7 +194,7 @@ namespace Dune {
// Create the dataspace for the dataset. // Create the dataspace for the dataset.
hid_t filespace = H5Screate_simple( dim, dims_global, NULL ); hid_t filespace = H5Screate_simple( dim, dims_global, NULL );
assert(filespace>-1); assert(filespace>-1);
//std::cout << "h5g_pWrite: fileSPACE created!" << std::endl; //std::cout << "h5g_Write: fileSPACE created!" << std::endl;
// Create the dataset with default properties and close filespace. // Create the dataset with default properties and close filespace.
...@@ -205,7 +205,7 @@ namespace Dune { ...@@ -205,7 +205,7 @@ namespace Dune {
H5P_DEFAULT ); H5P_DEFAULT );
H5Sclose(filespace); H5Sclose(filespace);
assert(dset_id>-1); assert(dset_id>-1);
//std::cout<< "h5g_pWrite: dataset created!" << std::endl; //std::cout<< "h5g_Write: dataset created!" << std::endl;
// set the count in the different dimensions (determine the size of the hyperslab) // set the count in the different dimensions (determine the size of the hyperslab)
...@@ -250,7 +250,7 @@ namespace Dune { ...@@ -250,7 +250,7 @@ namespace Dune {
} }
//std::cout<< "h5g_pWrite: memspace created!" << std::endl; //std::cout<< "h5g_Write: memspace created!" << std::endl;
// Select hyperslab in the file. // Select hyperslab in the file.
filespace = H5Dget_space( dset_id ); filespace = H5Dget_space( dset_id );
...@@ -262,7 +262,7 @@ namespace Dune { ...@@ -262,7 +262,7 @@ namespace Dune {
count, count,
NULL // <==> block={1,1,1} NULL // <==> block={1,1,1}
); );
//std::cout<< "h5g_pWrite: hyperslab selected!" << std::endl; //std::cout<< "h5g_Write: hyperslab selected!" << std::endl;
// Create property list for collective dataset write. // Create property list for collective dataset write.
plist_id = H5Pcreate( H5P_DATASET_XFER ); plist_id = H5Pcreate( H5P_DATASET_XFER );
...@@ -271,10 +271,10 @@ namespace Dune { ...@@ -271,10 +271,10 @@ namespace Dune {
H5Pset_dxpl_mpio( plist_id, H5FD_MPIO_COLLECTIVE ); H5Pset_dxpl_mpio( plist_id, H5FD_MPIO_COLLECTIVE );
//H5Pset_dxpl_mpio( plist_id, H5FD_MPIO_INDEPENDENT ); //H5Pset_dxpl_mpio( plist_id, H5FD_MPIO_INDEPENDENT );
//std::cout<< "h5g_pWrite: properties set!" << std::endl; //std::cout<< "h5g_Write: properties set!" << std::endl;
// finally write the data to the disk // finally write the data to the disk
//std::cout<< "h5g_pWrite: writing ... " << std::endl; //std::cout<< "h5g_Write: writing ... " << std::endl;
if( nAllLocalCells != 0 ){ if( nAllLocalCells != 0 ){
status = H5Dwrite( dset_id, status = H5Dwrite( dset_id,
...@@ -297,7 +297,7 @@ namespace Dune { ...@@ -297,7 +297,7 @@ namespace Dune {
assert(status>-1); assert(status>-1);
} }
std::cout << "h5g_pWrite: .... done (writing) " std::cout << "h5g_Write: .... done (writing) "
<< filename << filename
<< std::endl; << std::endl;
...@@ -315,7 +315,7 @@ namespace Dune { ...@@ -315,7 +315,7 @@ namespace Dune {
/** h5g_pRead(): Reading grid data in parallel mode from HDF5 file. /** h5g_Read(): Reading grid data in parallel mode from HDF5 file.
* *
* \tparam [out] local_data: data to be read to the current processor (current hyperslab = current grid partition) * \tparam [out] local_data: data to be read to the current processor (current hyperslab = current grid partition)
* \tparam [in] filename: HDF5 filename * \tparam [in] filename: HDF5 filename
...@@ -333,7 +333,7 @@ namespace Dune { ...@@ -333,7 +333,7 @@ namespace Dune {
template<typename GV, template<typename GV,
typename VEC, typename VEC,
Dune::PartitionIteratorType partitiontype = Dune::All_Partition> Dune::PartitionIteratorType partitiontype = Dune::All_Partition>
static void h5g_pRead( VEC& local_data, static void h5g_Read( VEC& local_data,
const std::string& filename, const std::string& filename,
const std::string& groupname, const std::string& groupname,
const GV gv, const GV gv,
...@@ -346,7 +346,7 @@ namespace Dune { ...@@ -346,7 +346,7 @@ namespace Dune {
const int current_grid_level=0 const int current_grid_level=0
){ ){
//std::cout << getDateAndTime() << "h5g_pRead: " << filename << std::endl; //std::cout << getDateAndTime() << "h5g_Read: " << filename << std::endl;
const Dune::MPIHelper::MPICommunicator &comm = gv.comm(); const Dune::MPIHelper::MPICommunicator &comm = gv.comm();
...@@ -568,7 +568,7 @@ namespace Dune { ...@@ -568,7 +568,7 @@ namespace Dune {
/** h5_pWrite(): Write multi-dim. array in parallel mode to HDF5 file. /** h5_Write(): Write multi-dim. array in parallel mode to HDF5 file.
* *
* \tparam [in] data: data which will be written to the file * \tparam [in] data: data which will be written to the file
* \tparam [in] filename: HDF5 filename * \tparam [in] filename: HDF5 filename
...@@ -584,7 +584,7 @@ namespace Dune { ...@@ -584,7 +584,7 @@ namespace Dune {
* *
*/ */
template<typename VEC> template<typename VEC>
static void h5_pWrite( const VEC& data static void h5_Write( const VEC& data
, const std::string& filename , const std::string& filename
, const std::string& groupname , const std::string& groupname
, const Vector<UINT>& global_dim , const Vector<UINT>& global_dim
...@@ -598,7 +598,7 @@ namespace Dune { ...@@ -598,7 +598,7 @@ namespace Dune {
MPI_Comm_rank( communicator, &mpi_rank ); MPI_Comm_rank( communicator, &mpi_rank );
if( mpi_rank == 0 ) if( mpi_rank == 0 )
std::cout << "h5_pWrite: " << filename << std::endl; std::cout << "h5_Write: " << filename << std::endl;
//Info varibale need for the HDF5 //Info varibale need for the HDF5
MPI_Info mpiInfo = MPI_INFO_NULL; MPI_Info mpiInfo = MPI_INFO_NULL;
...@@ -672,7 +672,7 @@ namespace Dune { ...@@ -672,7 +672,7 @@ namespace Dune {
// Select hyperslab in the file. // Select hyperslab in the file.
filespace = H5Dget_space(dset_id); filespace = H5Dget_space(dset_id);
H5Sselect_hyperslab(filespace, H5S_SELECT_SET, offset, NULL, count, NULL); H5Sselect_hyperslab(filespace, H5S_SELECT_SET, offset, NULL, count, NULL);
//std::cout<< "h5_pWrite: hyperslab selected!" << std::endl; //std::cout<< "h5_Write: hyperslab selected!" << std::endl;
// Create property list for collective dataset write. // Create property list for collective dataset write.
plist_id = H5Pcreate(H5P_DATASET_XFER); plist_id = H5Pcreate(H5P_DATASET_XFER);
...@@ -727,7 +727,7 @@ namespace Dune { ...@@ -727,7 +727,7 @@ namespace Dune {
/** h5_pRead(): Read multi-dim. array in parallel mode from HDF5 file. /** h5_Read(): Read multi-dim. array in parallel mode from HDF5 file.
* *
* \tparam [out] local_data: the data to be read that belongs to the current processor (current hyperslab) * \tparam [out] local_data: the data to be read that belongs to the current processor (current hyperslab)
* \tparam [in] filename: HDF5 filename * \tparam [in] filename: HDF5 filename
...@@ -743,7 +743,7 @@ namespace Dune { ...@@ -743,7 +743,7 @@ namespace Dune {
* *
*/ */
template<typename VEC> template<typename VEC>
static void h5_pRead( VEC& local_data static void h5_Read( VEC& local_data
, const std::string& filename , const std::string& filename
, const std::string& groupname , const std::string& groupname
, const Vector<UINT>& local_count , const Vector<UINT>& local_count
...@@ -755,7 +755,7 @@ namespace Dune { ...@@ -755,7 +755,7 @@ namespace Dune {
int mpi_rank=0; int mpi_rank=0;
MPI_Comm_rank( communicator, &mpi_rank ); MPI_Comm_rank( communicator, &mpi_rank );
if( mpi_rank == 0 ) if( mpi_rank == 0 )
std::cout << "h5_pRead: " << filename << std::endl; std::cout << "h5_Read: " << filename << std::endl;
/* setup file access template with parallel IO access. */ /* setup file access template with parallel IO access. */
hid_t access_pList = H5Pcreate( H5P_FILE_ACCESS ); hid_t access_pList = H5Pcreate( H5P_FILE_ACCESS );
...@@ -978,7 +978,7 @@ namespace Dune { ...@@ -978,7 +978,7 @@ namespace Dune {
// ========== // ==========
// Take care: // Take care:
// ========== // ==========
// h5_pWrite1d() [write_vector_to_HDF5()] is used to store a DOF vector to a one-dimensional HDF5 file. // h5_Write1d() [write_vector_to_HDF5()] is used to store a DOF vector to a one-dimensional HDF5 file.
// For each process, it reserves a section which has the size of the local number of degrees of freedom. // For each process, it reserves a section which has the size of the local number of degrees of freedom.
// This implementation has the advantage that it works indepently of the used FEM basis. // This implementation has the advantage that it works indepently of the used FEM basis.
// Disadvantage: // Disadvantage:
...@@ -990,13 +990,13 @@ namespace Dune { ...@@ -990,13 +990,13 @@ namespace Dune {
// void read_vector_from_HDF5( ) // void read_vector_from_HDF5( )
// //
template<typename NUM,typename GFS> template<typename NUM,typename GFS>
static void h5_pWrite1d( std::vector<NUM>& vData, static void h5_Write1d( std::vector<NUM>& vData,
const std::string & filename, const std::string & filename,
const std::string & groupname, const std::string & groupname,
const GFS &gfs const GFS &gfs
){ ){
std::cout << "h5_pWrite1d: " << filename << std::endl; std::cout << "h5_Write1d: " << filename << std::endl;
const typename GFS::Traits::GridViewType& gv = gfs.gridView(); const typename GFS::Traits::GridViewType& gv = gfs.gridView();
...@@ -1125,20 +1125,20 @@ namespace Dune { ...@@ -1125,20 +1125,20 @@ namespace Dune {
// ========== // ==========
// Take care: // Take care:
// ========== // ==========
// h5_pRead1d() [read_vector_from_HDF5()] is used to read a DOF vector from a one-dimensional HDF5 file that was previously stored using the function // h5_Read1d() [read_vector_from_HDF5()] is used to read a DOF vector from a one-dimensional HDF5 file that was previously stored using the function
// h5_pWrite1d( ). If the number of processes or the grid partioning has changed since then, you will // h5_Write1d( ). If the number of processes or the grid partioning has changed since then, you will
// get a mess! // get a mess!
// //
// void h5_pWrite1d( ) // void h5_Write1d( )
template<typename NUM,typename GFS> template<typename NUM,typename GFS>
static void h5_pRead1d( std::vector<NUM>& vData, static void h5_Read1d( std::vector<NUM>& vData,
const std::string & filename, const std::string & filename,
const std::string & groupname, const std::string & groupname,
const GFS& gfs const GFS& gfs
){ ){
std::cout << "h5_pRead1d: " << filename << std::endl; std::cout << "h5_Read1d: " << filename << std::endl;
const typename GFS::Traits::GridViewType& gv = gfs.gridView(); const typename GFS::Traits::GridViewType& gv = gfs.gridView();
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment