Commit ff94bcf2 authored by Dion Haefner's avatar Dion Haefner

split random field generator in parallel and sequential components

parent 38d5508c
configure_file(dorie.in dorie @ONLY)
install(PROGRAMS ${CMAKE_CURRENT_BINARY_DIR}/dorie DESTINATION ${CMAKE_INSTALL_BINDIR})
configure_file(dorie.in ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/dorie @ONLY)
# copy the temporary file into the final destination, setting the permissions
file(COPY ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/dorie
DESTINATION ${CMAKE_CURRENT_BINARY_DIR}
FILE_PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ
GROUP_EXECUTE WORLD_READ WORLD_EXECUTE)
install(PROGRAMS ${CMAKE_CURRENT_BINARY_DIR}/dorie DESTINATION ${CMAKE_INSTALL_BINDIR} PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ
GROUP_EXECUTE WORLD_READ WORLD_EXECUTE)
......@@ -5,8 +5,8 @@ DORIEDIR=@CMAKE_BINARY_DIR@
MPIRUN=@MPIEXEC@
############################
DORIE_EXECUTABLE=$DORIEDIR/src/dorie
PARAMETERDIR=$DORIEDIR/doc/parameters
DORIE_EXECUTABLE=$DORIEDIR/dune/dorie/dorie
PARAMETERDIR=$DORIEDIR/doc/default_files
DORIE_PYTHON=$DORIEDIR/dune-env
run_dorie_sequential()
......@@ -30,8 +30,10 @@ initialize_folder()
echo "###################"
echo "INITIALIZING FOLDER"
echo "###################"
cp -iv $PARAMETERDIR/default.ini parameter.ini
cp -iv $PARAMETERDIR/default_bc.dat bc.dat
cp -iv $PARAMETERDIR/default.ini $PWD/config.ini
cp -iv $PARAMETERDIR/default_pf.ini $PWD/parfield.ini
cp -iv $PARAMETERDIR/2d_infiltr.bcdat $PWD/2d_infiltr.bcdat
cp -iv $PARAMETERDIR/3d_infiltr.bcdat $PWD/3d_infiltr.bcdat
echo "Initialization done"
}
......@@ -40,6 +42,11 @@ plot_vtk()
$DORIE_PYTHON plot_vtk.py --vtk $1 --var $2
}
pf_from_file()
{
$DORIE_PYTHON pf_from_file.py --param $1 --debug
}
###################
# CHECK IF DORIE WAS INSTALLED
......@@ -51,6 +58,7 @@ fi
RUN=0
PLOT=0
CREATE=0
PFG=0
PARALLEL=0
# NO ARGUMENTS SUPPLIED
......@@ -62,8 +70,7 @@ fi
# PARSE EACH ARGUMENT
case $1 in
run)
run) # run dorie
RUN=1
shift
if [[ $# > 0 ]]; then
......@@ -97,6 +104,16 @@ case $1 in
esac
fi
if [[ -e $INIFILE && -f $INIFILE && -s $INIFILE ]]; then
if [[ $PARALLEL > 0 ]]; then
run_dorie_parallel $INIFILE $PARALLEL
else
run_dorie_sequential $INIFILE
fi
else
echo "Error: Invalid parameter file $INIFILE"
fi
exit
;;
plot)
......@@ -125,20 +142,36 @@ case $1 in
shift
done
fi
plot_vtk "$VTKFILE" "$VARIABLES"
exit
;;
create)
CREATE=1
initialize_folder
exit
;;
pfg)
shift
if [[ $# = 1 ]]; then
pf_from_file $1
exit
else
echo "Usage: dorie pfg INIFILE. For more information run 'dorie help'"
exit
fi
;;
help)
echo -e "Usage: dorie COMMAND [COMMAND-OPTIONS]\n"
echo -e "COMMANDS:"
echo -e " 'run INIFILE [OPTIONS]' starts DORiE with parameter file INIFILE"
echo -e " OPTIONS:"
echo -e " '--parallel NPROC' starts DORiE in parallel with NPROC processes"
echo -e " '--sequential' starts DORiE in sequential mode (default)"
echo -e " OPTIONS:"
echo -e " '--parallel NPROC' starts DORiE in parallel with NPROC processes"
echo -e " '--sequential' starts DORiE in sequential mode (default)"
echo -e " 'create' creates dummy parameter files in the current directory"
echo -e " 'pfg INIFILE' runs the parameter field generator"
echo -e " 'plot VTK [--var VAR]' plots the variables VAR (default: all variables) from the VTK file(s) VTK"
echo -e " 'help' displays this message"
......@@ -150,31 +183,3 @@ case $1 in
exit
;;
esac
# RUN DORIE
if [[ $RUN = 1 ]]; then
if [[ -e $INIFILE && -f $INIFILE && -s $INIFILE ]]; then
if [[ $PARALLEL > 0 ]]; then
run_dorie_parallel $INIFILE $PARALLEL
else
run_dorie_sequential $INIFILE
fi
else
echo "Error: Invalid parameter file $INIFILE"
fi
exit
fi
# INITIALIZE FOLDER
if [[ $CREATE = 1 ]]; then
initialize_folder
exit
fi
# PLOT VTK FILES
if [[ $PLOT = 1 ]]; then
plot_vtk "$VTKFILE" "$VARIABLES"
exit
fi
# File for module specific CMake tests.
# find all required packages
FIND_PACKAGE (HDF5 REQUIRED)
FIND_PACKAGE (HDF5 REQUIRED COMPONENTS C)
if(HDF5_IS_PARALLEL)
message(STATUS "Parallel HDF5 library found")
add_definitions(-DHDF5_PARALLEL)
endif()
FIND_PACKAGE (FFTW REQUIRED)
FIND_PACKAGE (SuperLU REQUIRED)
FIND_PACKAGE (MPI REQUIRED)
......
This source diff could not be displayed because it is too large. You can view the blob instead.
/*
* File: HDF5Tools.hh
* Author: Adrian Ngo
*
* This requires the MPI version of HDF5.
*
* Overview of HDF5 methods:
* h5_Write : sequential write of multi-dim array to HDF5 file
* h5_Read : sequential read of multi-dim array from HDF5 file
*
*/
#ifndef DUNE_GEOINVERSION_HDF5_TOOLS_HH
#define DUNE_GEOINVERSION_HDF5_TOOLS_HH
#include <dune/pdelab/common/geometrywrapper.hh>
#include <assert.h>
#include <sstream>
namespace Dune {
namespace Dorie {
class H5Tools{
private:
H5Tools(){};
public:
/*
* Note that in hdf5, all array indices are ordered the other way round!
*
* So far, we had this:
*
* nCells[0] = number of cells in x-direction
* nCells[1] = number of cells in y-direction
* nCells[2] = number of cells in z-direction
*
* But storing data in hdf5 requires to define a vector 'dims' (in 3d) with
*
* dims[2] = (hsize_t) ( nCells[0] ); // x-direction
* dims[1] = (hsize_t) ( nCells[1] ); // y-direction
* dims[0] = (hsize_t) ( nCells[2] ); // z-direction
*
* and in 2d:
*
* dims[1] = (hsize_t) ( nCells[0] ); // x-direction
* dims[0] = (hsize_t) ( nCells[1] ); // y-direction
*
*/
/** h5_Write(): Write multi-dim. array in sequential mode to HDF5 file.
*
* \tparam [in] data: data which will be written to the file
* \tparam [in] filename: HDF5 filename
* \tparam [in] groupname: HDF5 group
* \tparam [in] gobal_dim: the global dimension of the stored data (total size)
* \tparam [in] local_count: give the size of the local data
* \tparam [in] local_offset: the offset of the data (in each direction)
* \tparam [in] communicator: MPI communicator
*
See
http://www.hdfgroup.org/HDF5/doc/RM/RM_H5F.html
for a documentation of the HDF5 API.
*
*/
template<typename VEC>
static void h5_Write( const VEC& data
, const std::string& filename
, const std::string& groupname
, const Vector<UINT>& global_dim
, const Vector<UINT>& local_count
, const Vector<UINT>& local_offset
, MPI_Comm communicator
)
{
std::cout << "h5_Write: " << filename << std::endl;
UINT dim = local_count.size();
//std::cout << std::endl << "h5_Write: dim = " << dim << std::endl;
std::cout << "h5_Write: groupname = " << groupname << std::endl;
/* Create a new file using default properties. */
hid_t file_id = H5Fcreate( filename.c_str() // name of the file to be created
, H5F_ACC_TRUNC // if you are trying to create a file that exists already, the existing file will be truncated, i.e., all data stored on the original file will be erased
, H5P_DEFAULT // default file creation property list
, H5P_DEFAULT // default file access property list
);
assert( file_id > -1 );
//std::cout << "h5_Write: h5-file created: " << filename.c_str() << std::endl;
hsize_t mdims[1];
mdims[0] = data.size();
hid_t memspace_id = H5Screate_simple( // H5Screate_simple creates a new simple dataspace and opens it for access, returning a dataset identifier.
1 // rank=1 is the number of dimensions used in the dataspace
, mdims // mdims is a one-dimensional array of size rank specifying the size of each dimension of the dataset.
, NULL // maxdims is an array of the same size specifying the upper limit on the size of each dimension. maxdims may be NULL, in which case the maxdims == mdims.
); // must be released with H5Sclose or resource leaks will occur
assert( memspace_id > -1 );
//std::cout << "h5_Write: memspace_id created." << std::endl;
/* Create the dataspace for the dataset.
* The dataspace describes the dimensions of the dataset array. */
hsize_t mindims = 50000;
hsize_t dims[ dim ];
for(UINT i=0;i<dim;i++){
dims[dim-i-1] = (hsize_t) ( global_dim[i] );
mindims = std::min( mindims, dims[dim-i-1] );
}
//std::cout << "dims [0] = "<< dims[0]<<std::endl;
//std::cout << "dims [1] = "<< dims[1]<<std::endl;
hid_t dataspace_id = H5Screate_simple(
dim // number of dimensions = rank
, dims // vector containing sizes per dimension
, NULL // maxdims == dims
);
assert( dataspace_id > -1 );
//std::cout << "h5_Write: dataspace_id created." << std::endl;
hid_t plist_id = H5Pcreate( // The new property list is initialized with default values for the specified class.
H5P_DATASET_CREATE // Properties for dataset creation
);
assert( plist_id > -1 );
//std::cout << "h5_Write: plist_id created." << std::endl;
herr_t status;
hsize_t maxnchunks = 1;
hsize_t chunk_dims[ dim ];
for(UINT i=0;i<dim;i++)
chunk_dims[i] = (hsize_t) 1;
status = H5Pset_chunk(
plist_id
, dim // must be == rank of the dataset
, chunk_dims // The values of the check_dims array define the size of the chunks to store the dataset's raw data. The unit of measure for check_dims values is dataset elements.
);
assert( status > -1 );
//std::cout << "h5_Write: H5Pset_chunk() o.k." << std::endl;
status = H5Pset_shuffle( plist_id ); // Sets the shuffle filter, H5Z_FILTER_SHUFFLE, in the dataset creation property list. This re-orders data to simplify compression.
assert( status > -1 );
//std::cout << "h5_Write: H5Pset_shuffle() o.k." << std::endl;
status = H5Pset_deflate( plist_id, 1 ); // Sets deflate (GNU gzip) compression method and compression level. ( 0 < level < 9, lower = faster, but less compression )
assert( status > -1 );
//std::cout << "h5_Write: H5Pset_deflate() o.k." << std::endl;
/* Create the dataset. */
hid_t dataset_id = H5Dcreate1( file_id,
groupname.c_str(),
HDF5_DATA_TYPE, // 32-bit or 64-bit, "datatypes.hh"
dataspace_id,
plist_id
);
assert( dataset_id > -1 );
//std::cout << "h5_Write: dataset_id created." << std::endl;
/* Write the dataset. */
status = H5Dwrite( // Writes raw data from a buffer to a dataset.
dataset_id // dataset identifier
, H5T_NATIVE_DOUBLE // memory datatype id
, memspace_id // specifies the memory dataspace and the selection within it
, H5S_ALL // specifies the selection within the file dataset's dataspace. H5S_ALL indicates that the entire file dataspace, as defined by the current dimensions of the dataset, is to be selected
, H5P_DEFAULT // Identifier of a transfer property list for this I/O operation. H5P_DEFAULT: The default data transfer properties are used.
, &(data[0]) // application memory buffer
);
assert( status > -1 );
//std::cout << "h5_Write: H5Dwrite( " <<groupname.c_str()<<" ) o.k." << std::endl;
status = H5Sclose( dataspace_id );
assert( status > -1 );
//std::cout << "h5_Write: dataspace_id closed." << std::endl;
status = H5Sclose( memspace_id );
assert( status > -1 );
//std::cout << "h5_Write: memspace_id closed." << std::endl;
status = H5Dclose( dataset_id );
assert( status > -1 );
//std::cout << "h5_Write: dataset_id closed." << std::endl;
/* Close the property list. */
status = H5Pclose( plist_id );
assert( status > -1 );
//std::cout << "h5_Write: H5Pclose(plist_id) done." << std::endl;
/* Close the file. */
status = H5Fclose( file_id );
assert( status > -1 );
//std::cout << "h5_Write: H5Fclose( file_id ) done." << std::endl;
return;
}
/** h5_Read(): Read multi-dim. array in sequential mode from HDF5 file.
*
* \tparam [out] local_data: the data to be read that belongs to the current processor (current hyperslab)
* \tparam [in] filename: HDF5 filename
* \tparam [in] groupname: HDF5 group
* \tparam [in] local_count: array sizes of the local data
* \tparam [in] local_offset: the offset of the data (in each direction)
* \tparam [in] communicator: MPI communicator
*
*
See
http://www.hdfgroup.org/HDF5/doc/RM/RM_H5F.html
for a documentation of the HDF5 API.
*
*/
template<typename VEC>
static void h5_Read( VEC& local_data
, const std::string& filename
, const std::string& groupname
, const Vector<UINT>& local_count
, const Vector<UINT>& local_offset
, MPI_Comm communicator
)
{
// open the file for reading
hid_t file_id= H5Fopen (filename.c_str(), H5F_ACC_RDONLY, H5P_DEFAULT);
assert( file_id > -1 );
// open the dataset
hid_t dataset_id = H5Dopen1(file_id, groupname.c_str());
assert( dataset_id > -1 );
// get the dataspace
hid_t dataspace_id=H5Dget_space (dataset_id);
assert(dataspace_id>-1);
// some needed variables
herr_t status;
hsize_t dim,*dims;
// get the dimension (2-d or 3-d)
dim = H5Sget_simple_extent_ndims( dataspace_id );
// get the size of the problem
dims=(hsize_t*)malloc(dim * sizeof (hsize_t));
status = H5Sget_simple_extent_dims( dataspace_id , dims, 0);
assert( status > -1 );
if( status < 0 )
std::cout << "Warning: H5Sget_simple_extent_dims < 0" << std::endl;
UINT local_size=1;
hsize_t offset[dim];
for( UINT i=0; i<dim; i++ ) {
local_size*=dims[i];
offset[i]=0;
}
// create the memory space
hid_t memspace_id = H5Screate_simple (dim, dims, NULL);
//select the hyperslab-
status = H5Sselect_hyperslab( memspace_id,
H5S_SELECT_SET,
offset,
NULL,
dims,
NULL );
assert(status>-1);
//resize the return data
if( local_data.size() != local_size )
local_data.resize( local_size );
/* set up the collective transfer properties list */
hid_t xferPropList = H5Pcreate (H5P_DATASET_XFER);
assert( xferPropList > -1 );
// finally the reading from the file
status = H5Dread( dataset_id
, H5T_NATIVE_DOUBLE //image.DataType
, memspace_id
, dataspace_id // H5S_ALL //
, xferPropList //H5P_DEFAULT //
, &( local_data[0] )
);
assert(status>-1);
// close the identifyers
H5Dclose (dataset_id);
H5Sclose (dataspace_id);
H5Sclose (memspace_id);
free(dims);
status = H5Fclose( file_id );
assert( status > -1 );
return;
};
}; // class H5Tools
} // GeoInversion
} // Dune
#endif /* DUNE_GEOINVERSION_HDF5_TOOLS_HH */
......@@ -29,7 +29,11 @@
#include "datatypes.hh"
#include "Vector.hh"
#include "tools.hh"
#include "H5Tools.hh"
#if HDF5_PARALLEL
#include "H5Tools_parallel.hh"
#else
#include "H5Tools_sequential.hh"
#endif
#include "FieldData.hh"
#include "random_field_generator.hh"
......
......@@ -254,7 +254,7 @@ namespace Dune {
*/
H5Tools::h5_pWrite( R_YY
H5Tools::h5_Write( R_YY
, fielddata.location + "/R_YY.h5"
, "/R_YY"
, nCells_ExtendedDomain
......@@ -345,7 +345,7 @@ namespace Dune {
// save FFT of R_YY!
H5Tools::h5_pWrite( tmp_vec
H5Tools::h5_Write( tmp_vec
, fielddata.location + "/FFT_R_YY.h5"
, "/FFT_R_YY"
, nCells_ExtendedDomain
......@@ -435,7 +435,7 @@ namespace Dune {
//loading in HDF5
Vector<RF> tmp_vec; // temporary vector for the loaded eigenvalues
H5Tools::h5_pRead( tmp_vec
H5Tools::h5_Read( tmp_vec
, fielddata.location + "/FFT_R_YY.h5"
, "/FFT_R_YY"
, local_count
......@@ -608,7 +608,7 @@ namespace Dune {
fftw_free( KField );
// save the field to disc
H5Tools::h5_pWrite( tmp,
H5Tools::h5_Write( tmp,
fielddata.location + "/YField.h5",
"/YField",
fielddata.nCells,
......@@ -620,7 +620,7 @@ namespace Dune {
l = tmp.size();
tmp.resize(0);
tmp.resize(l,1.0);
H5Tools::h5_pWrite( tmp,
H5Tools::h5_Write( tmp,
fielddata.location + "/matX.h5",
"/X",
fielddata.nCells,
......@@ -631,8 +631,10 @@ namespace Dune {
if(comm_size==1){
H5Tools::h5_Read( YFieldVector,
fielddata.location + "/YField.h5",
"/YField"
);
"/YField",
local_count,
local_offset,
comm );
UINT N = YFieldVector.size();
......@@ -645,11 +647,6 @@ namespace Dune {
}; // End of void generateY()
// If fielddata.newEV==false and fielddata.newField==false,
// we try to load an existing field.
bool load_from_file() {
......@@ -672,9 +669,16 @@ namespace Dune {
<< std::endl;
if(comm_size==1){
Vector<UINT> local_count(dim,0);
Vector<UINT> local_offset(dim,0);
H5Tools::h5_Read( YFieldVector,
fielddata.location + "/YField.h5",
"/YField"
"/YField",
local_count,
local_offset,
comm
);
UINT N = YFieldVector.size();
......@@ -689,134 +693,6 @@ namespace Dune {
}
//
// Read Y-field from HDF5 file.
//
void h5_Read( const std::string& filename, const std::string& groupname ){
Vector<REAL> yfield_vector;
H5Tools::h5_Read( yfield_vector,
filename,
groupname );
import_from_vector( yfield_vector );
}
void import_from_vector(const Vector<RF>& yfield_vector) {
UINT VectorSize = 0;
if (dim == 3) {
const UINT L = fielddata.nCells[2];
const UINT M = fielddata.nCells[1];
const UINT N = fielddata.nCells[0];
VectorSize = L * M * N;
}
else {
const UINT M = fielddata.nCells[1];
const UINT N = fielddata.nCells[0];
VectorSize = M * N;
}
if( yfield_vector.size() != VectorSize ){
// std::cout << "Warning: mismatch at import_from_vector: " << std::endl;
// std::cout << "yfield_vector.size() = " << yfield_vector.size() << std::endl;
// std::cout << "VectorSize = " << VectorSize << std::endl;
DUNE_THROW(RangeError, "Mismatch at import_from vector: \n"
<< "yfield_vector.size() = " << yfield_vector.size() << "\n"
<< "VectorSize = " << VectorSize << ".\n"
<< "Possible reason could be: different dimensions of domain and hdf5-file.");
}
YFieldVector = yfield_vector;
KFieldVector.resize( VectorSize );
for (UINT l=0; l<VectorSize; l++)
KFieldVector[l] = std::exp( yfield_vector[l] );
};
//
// Read Y-field from HDF5 file in parallel mode onto the grid.
//
template<typename GV>
void h5g_pRead( GV gv,
const std::string& filename,
const std::string& groupname
){
Vector<REAL> local_Yfield_vector;
Vector<UINT> local_count;
Vector<UINT> local_offset;
H5Tools::h5g_pRead( local_Yfield_vector,
filename,
groupname,
gv,