Commit 9383b363 authored by Santiago Ospina De Los Ríos's avatar Santiago Ospina De Los Ríos

Merge branch '101-couple-the-transportsimulation-with-richardssimulation' into...

Merge branch '101-couple-the-transportsimulation-with-richardssimulation' into 111-add-dg-local-operator-to-the-simulationtransport-class
parents 96f1efc6 1265ec85
......@@ -10,6 +10,8 @@ variables:
CMAKE_FLAGS:
-DDUNE_PYTHON_VIRTUALENV_SETUP=True
-DDUNE_PYTHON_ALLOW_GET_PIP=True
# Recursively load submodules by default
GIT_SUBMODULE_STRATEGY: recursive
MAKE_FLAGS:
-j $CPUS_MULTICORE
RUN_IN_DUNE_ENV: $CI_PROJECT_DIR/build-cmake/run-in-dune-env
......@@ -111,7 +113,7 @@ build:debug: &debug
script:
- CMAKE_FLAGS="
$CMAKE_FLAGS
-DCMAKE_BUILD_TYPE=Debug
-DCMAKE_BUILD_TYPE=Debug
-DCMAKE_CXX_FLAGS_DEBUG='-Werror'"
$DUNECONTROL --only=dorie configure
- $DUNECONTROL --only=dorie make $MAKE_FLAGS build_unit_tests
......@@ -122,7 +124,7 @@ build:debug-clang:
script:
- CMAKE_FLAGS="
$CMAKE_FLAGS
-DCMAKE_BUILD_TYPE=Debug
-DCMAKE_BUILD_TYPE=Debug
-DCMAKE_C_COMPILER=clang
-DCMAKE_CXX_COMPILER=clang++
-DCMAKE_CXX_FLAGS_DEBUG='-Werror'"
......@@ -161,12 +163,14 @@ test:unit-tests:
- ./run-in-dune-env gcovr
--root ../
--exclude ../dune/dorie/test
--exclude ../plugins
--html --html-details
-o dune/dorie/test/coverage/coverage.html
# run again for coverage report in GitLab
- ./run-in-dune-env gcovr
--root ../
--exclude ../dune/dorie/test
--exclude ../plugins
coverage: '/^TOTAL.*\s+(\d+\%)$/'
artifacts:
name: "$CI_JOB_NAME"
......@@ -226,4 +230,4 @@ deploy:sphinx-docs:
-P $CI_PROJECT_DIR/build-cmake/doc/html
environment:
name: sphinx-docs
url: https://dorie-doc.netlify.com/
\ No newline at end of file
url: https://dorie-doc.netlify.com/
[submodule "plugins/vendor/spdlog"]
path = plugins/vendor/spdlog
url = https://github.com/gabime/spdlog.git
......@@ -30,6 +30,9 @@
which solves the transport equation with a finite volume scheme for give
grid functions of water content and velocity.
* Grid function container: Container to support transient grid functions.
* Infrastructure for the input of Miller scaling fields. !110
* Add logging framework 'spdlog' as submodule !106
* Support input of boundary segmentations !119
### Changed
* `Simulation` is renamed `RichardsSimulation` and moved to
......@@ -127,9 +130,13 @@
It dumps its data into a YAML file which is then loaded for writing
the default configuration files and the cheat sheet.
* Use logging framework 'spdlog' for all terminal output !106
* DORiE now writes vertex data by default. #129
### Fixed
* Solver in `RichardsSimulation` was using the wrong time variable.
[!116](https://ts-gitlab.iup.uni-heidelberg.de/dorie/dorie/merge_requests/116)
* Shape of input datasets was flipped when loading scaling factors !124
### Deprecated
* The configuration file key `[parameters.interpolation]` is deprecated due to
......@@ -143,7 +150,7 @@
### Removed
* The class `H5File::AttributeReader` was completely removed.
* Boundary conditions `evaporation` and `limitedInflux` !120
## 1.1.1 (2018-08-21)
......
......@@ -41,6 +41,7 @@ dune_enable_all_packages()
dune_require_cxx_standard(MODULE "dorie" VERSION 14)
# add subdirectories
add_subdirectory("plugins/vendor")
add_subdirectory("m4")
add_subdirectory("cmake/modules")
add_subdirectory("python")
......
# Welcome to DORiE!
(**D**UNE-**O**perated **Ri**chards equation solving **E**nvironment)
DORiE is a software suite for solving Richard's Equation. The core feature is a C++ PDE-solver powered by [DUNE](https://dune-project.org/) and the [DUNE-PDELab](https://dune-project.org/modules/dune-pdelab/) module. It implements a Discontinous Galerkin (DG) discretization scheme on structured rectangular / cubic and unstructured simplex grids in two and three spatial dimensions, and makes use of advanced features like adaptive grid refinement.
DORiE is a software suite for solving the Richards Equation. The core feature is a C++ PDE-solver powered by [DUNE](https://dune-project.org/) and the [DUNE-PDELab](https://dune-project.org/modules/dune-pdelab/) module. It implements a Discontinous Galerkin (DG) discretization scheme on structured rectangular / cubic and unstructured simplex grids in two and three spatial dimensions, and makes use of advanced features like adaptive grid refinement.
The suite encapsulates a documentation and various tools for program setup, program testing, and output analysis, which are mostly written in Python.
......@@ -63,7 +63,7 @@ by CI tests.
| Software | Version/Branch | Comments |
| ---------| -------------- | -------- |
| CMake | 3.10.2 |
| GCC | 7.3 | Alternatively: LLVM Clang >=6
| GCC | 7.3 | Alternatively: LLVM Clang >=6, or Apple Clang 10
| git |
| pkg-config |
| HDF5 | 1.10 | with MPI support
......@@ -73,6 +73,7 @@ by CI tests.
| MPI | | Tested with OpenMPI 2.1.1
| SuperLU | 5.2 |
| [yaml-cpp](https://github.com/jbeder/yaml-cpp) | >= 5.2.0 |
| [spdlog](https://github.com/gabime/spdlog) | 1.1.0 | Included as Git Submodule
| [dune-common](https://gitlab.dune-project.org/core/dune-common) | releases/2.6
| [dune-geometry](https://gitlab.dune-project.org/core/dune-geometry) | releases/2.6
| [dune-grid](https://gitlab.dune-project.org/core/dune-grid) | releases/2.6
......@@ -102,6 +103,8 @@ If you installed [Anaconda](https://conda.io/docs/user-guide/install/download.ht
1. **macOS** users need to start by installing the Apple Command Line Tools by executing
xcode-select --install
Make sure you have no pending software updates for your respective version of macOS!
2. Install third party packages:
......@@ -132,14 +135,22 @@ If you installed [Anaconda](https://conda.io/docs/user-guide/install/download.ht
**Parallel runs without these two packages are possible but not supported!**
4. Clone the DUNE modules and DORiE into a suitable folder on your machine. Use `git checkout` to switch to the correct branches (see above). Enter the parent folder, and call
4. Clone the DUNE modules into a suitable folder on your machine.
Use `git checkout` to switch to the correct branches (see above).
5. Clone DORiE into the same folder.
DORiE includes
[Git Submodules](https://git-scm.com/book/en/v2/Git-Tools-Submodules),
which requires you to add the `--recurse-submodules` option to the
`git clone` command. Switch to the desired branch or tag.
6. Enter the parent folder, and call
CMAKE_FLAGS="-DDUNE_PYTHON_VIRTUALENV_SETUP=True -DDUNE_PYTHON_ALLOW_GET_PIP=True" ./dune-common/bin/dunecontrol all
to build all DUNE modules. Additionally, you can add `MAKE_FLAGS="-j X"` before the call to `dunecontrol` to compile on `X` processes in parallel.
**Warning:** Users of **macOS** with Apple Clang version >=10 need to append `-DDUNE_HAVE_CXX_OPTIONAL=Off` to the `CMAKE_FLAGS`.
If you installed software into paths not appended to your `PATH` variable, you will have to add `CMAKE_FLAGS` to the call to make sure that CMake finds all packages. Alternatively, you can add a custom options file. See the [DUNE Installation Instructions](https://dune-project.org/doc/installation/) for details. CMake will throw an error if required packages are not found.
**Warning:** Anacoda supplies its own version of HDF5 which is typically found first by CMake. If you have Anaconda installed on your machine, add
......@@ -298,8 +309,8 @@ it is recommended to build them in a `Release` environment.
| Test category | Build tests | Execute tests | Recommended build type |
| ------------- | ----------- | ------------- | ---------------------- |
| Unit tests | `make build_unit_tests` | `make_unit_tests` | `Debug`
| System tests | `make build_system_tests` | `make_system_tests` | `Release`
| Unit tests | `make build_unit_tests` | `make unit_tests` | `Debug`
| System tests | `make build_system_tests` | `make system_tests` | `Release`
The `make` commands are to be executed from within the `build-cmake` directory.
......
......@@ -45,6 +45,7 @@ dune_cmake_sphinx_doc(SPHINX_CONF ${CMAKE_CURRENT_SOURCE_DIR}/conf.py.in
man-installation.rst
man-config-file.rst
man-parameter-file.rst
man-grid.rst
MODULE_ONLY)
if(TARGET sphinx_html)
......
......@@ -116,9 +116,8 @@ function(create_default_config INPUT OUTPUT SOURCE_DIR CSS)
endif()
endfunction()
# copy BC files
file(COPY 2d_infiltr.bcdat DESTINATION .)
file(COPY 3d_infiltr.bcdat DESTINATION .)
# copy BC & parameter files
file(COPY 2d_infiltr.bcdat 3d_infiltr.bcdat param.yml DESTINATION .)
# Random field generator
scrape_parameters(
......
......@@ -89,36 +89,88 @@ adding an empty line, make text **bold** or ``monospaced``.
<values> int </values>
<suggestion> 0 </suggestion>
</parameter>
</category>
<parameter name="mappingFile">
<definition> The H5 file containing the mapping from cell to medium index.
Specify the dataset inside the file with the next key. Leave empty or
set to ``None`` for a global homogeneous medium. In this case,
``grid.globalIndex`` has to be specified.
<category name="grid.mapping">
<parameter name="file">
<definition> The H5 file containing all mapping datasets.
Leave empty or set to ``None`` for global mappings.
</definition>
<values> path </values>
<suggestion> None </suggestion>
<comment> This category is only used for 'rectangular' grids </comment>
</parameter>
<parameter name="mappingFileDataset">
<definition> The H5 dataset inside ``grid.mappingFile`` containing the
mapping from cell to medium index.
<parameter name="volume">
<definition> The H5 dataset containing the
mapping from cell to medium index. May specify a global index for the
entire volume if its value can be parsed as ``int``.
</definition>
<values> path </values>
<suggestion> 0 </suggestion>
<values> path or int </values>
</parameter>
<parameter name="globalIndex">
<definition> The medium index to use for all grid cells, if
``mappingFile`` is unset or ``None``.
<parameter name="boundaryLower">
<definition> The H5 dataset mapping the lower boundary faces to
boundary condition indices. May specify a global index for the
boundary if its value can be parsed as ``int``.
</definition>
<values> int </values>
<suggestion> 0 </suggestion>
<values> path or int </values>
</parameter>
<parameter name="boundaryUpper">
<definition> The H5 dataset mapping the upper boundary faces to
boundary condition indices. May specify a global index for the
boundary if its value can be parsed as ``int``.
</definition>
<suggestion> 1 </suggestion>
<values> path or int </values>
</parameter>
<parameter name="boundaryLeft">
<definition> The H5 dataset mapping the left boundary faces to
boundary condition indices. May specify a global index for the
boundary if its value can be parsed as ``int``.
</definition>
<suggestion> 2 </suggestion>
<values> path or int </values>
</parameter>
<parameter name="boundaryRight">
<definition> The H5 dataset mapping the right boundary faces to
boundary condition indices. May specify a global index for the
boundary if its value can be parsed as ``int``.
</definition>
<suggestion> 3 </suggestion>
<values> path or int </values>
</parameter>
<parameter name="boundaryFront">
<definition> The H5 dataset mapping the front boundary faces to
boundary condition indices (3D only). May specify a global index for
the boundary if its value can be parsed as ``int``.
</definition>
<suggestion> 4 </suggestion>
<values> path or int </values>
<comment> Only in 3D </comment>
</parameter>
<parameter name="boundaryBack">
<definition> The H5 dataset mapping the back boundary faces to
boundary condition indices (3D only). May specify a global index for
the boundary if its value can be parsed as ``int``.
</definition>
<suggestion> 5 </suggestion>
<values> path or int </values>
</parameter>
</category>
<category name="adaptivity">
<parameter name="policy">
<definition> Switches the target policy to do adaptive grid refinement
(h-adaptivity). If enabled, an unstructured grid manager with higher
computational cost is used when using rectangular / cubic grids.
<definition> Switches the target policy to do adaptive grid refinement
(h-adaptivity). If enabled, an unstructured grid manager with higher
computational cost is used when using rectangular / cubic grids.
</definition>
<values> none, waterFlux </values>
<suggestion> none </suggestion>
......@@ -188,10 +240,10 @@ adding an empty line, make text **bold** or ``monospaced``.
</category>
<category name="output">
<parameter name="verbose">
<definition> Overall verbosity of the program </definition>
<suggestion> 0 </suggestion>
<values> 0, 1, 2, 3 </values>
<parameter name="logLevel">
<definition> Logging level of the core functions. </definition>
<suggestion> info </suggestion>
<values> trace, debug, info, warning, error, critical </values>
</parameter>
</category>
......
......@@ -50,10 +50,12 @@ adding an empty line, make text **bold** or ``monospaced``.
<parameter name="converter">
<definition> Identifier of the random field converter. The converter is
applied after the random field is created and modifies it.
Use ``exponential`` for creating a suitable Miller scaling field.
</definition>
<values> none, binary </values>
<values> none, binary, exponential </values>
<suggestion> binary </suggestion>
<comment> none, binary </comment>
<comment> none, binary, exponential </comment>
</parameter>
<parameter name="tempDir">
......@@ -136,4 +138,17 @@ adding an empty line, make text **bold** or ``monospaced``.
<suggestion> 0 1 </suggestion>
</parameter>
</category>
<category name="converter.exponential">
<parameter name="varianceScaling">
<definition> Substract the variance from the random field values before
applying the exponential. For a random field with ``gaussian``
covariance, this will result in a conductivity field with the same
macroscopic properties that a homogeneous field, in the sense that
average water flux is the same (Roth 1995).
</definition>
<values> bool </values>
<suggestion> true </suggestion>
</parameter>
</category>
</dorie>
volumes:
sand:
index: 0
type: MvG
parameters:
alpha: -2.3
n: 4.17
k0: 2.2E-5
theta_r: 0.03
theta_s: 0.31
tau: -1.1
silt:
index: 1
type: MvG
parameters:
alpha: -0.7
n: 1.3
k0: 1.0E-5
theta_r: 0.01
theta_s: 0.41
tau: 0.0
scaling:
type: None
......@@ -32,10 +32,10 @@ adding an empty line, make text **bold** or ``monospaced``.
<dorie>
<category name="output">
<parameter name="verbose">
<definition> Verbosity of the Richards simulation </definition>
<suggestion> 0 </suggestion>
<values> 0, 1, 2, 3 </values>
<parameter name="logLevel">
<definition> Logging level of the Richards solver. </definition>
<suggestion> info </suggestion>
<values> trace, debug, info, warning, error, critical </values>
</parameter>
<parameter name="outputPath">
......@@ -58,7 +58,7 @@ adding an empty line, make text **bold** or ``monospaced``.
cell-centered data.
</definition>
<values> true, false </values>
<suggestion> false </suggestion>
<suggestion> true </suggestion>
</parameter>
<parameter name="subsamplingLevel">
......@@ -218,10 +218,22 @@ adding an empty line, make text **bold** or ``monospaced``.
<parameter name="fluxReconstruction">
<definition> Apply the flux reconstruction method to the solved matric
head and obtain conservative gradients. </definition>
head and obtain conservative gradients. It always computes (internally)
the local lifting independent whether the ``lifting`` keyword is active
or not.
</definition>
<suggestion> true </suggestion>
<values> true, false </values>
</parameter>
<parameter name="lifting">
<definition> Compute the local lifting for the discrete gradient. It serves
as a measure of the influence of symmetry term on the interior degrees
of freedom.
</definition>
<suggestion> false </suggestion>
<values> true, false </values>
</parameter>
</category>
<category name="NewtonParameters">
......
......@@ -38,10 +38,10 @@ adding an empty line, make text **bold** or ``monospaced``.
<values> endOfStep, all, none </values>
</parameter>
<parameter name="verbose">
<definition> Overall verbosity of the program </definition>
<suggestion> 0 </suggestion>
<values> 0, 1, 2, 3 </values>
<parameter name="logLevel">
<definition> Logging level of the Richards solver. </definition>
<suggestion> info </suggestion>
<values> trace, debug, info, warning, error, critical </values>
</parameter>
<parameter name="outputPath">
......@@ -64,7 +64,7 @@ adding an empty line, make text **bold** or ``monospaced``.
cell-centered data.
</definition>
<values> true, false </values>
<suggestion> false </suggestion>
<suggestion> true </suggestion>
</parameter>
<parameter name="subsamplingLevel">
......
......@@ -21,5 +21,4 @@ USE_MATHJAX = YES
# EXCLUDE += @top_srcdir@/...
# Include documents with other extesions
FILE_PATTERNS += *.dox
FILE_PATTERNS += *.dox
\ No newline at end of file
spatial_resolution_north 0
spatial_resolution_south 0
spatial_resolution_west -1
spatial_resolution_east -1
number_BC_change_times 1
0 evaporation 3.55e-8 -6.0 dirichlet 0
\ No newline at end of file
spatial_resolution_north_we 0
spatial_resolution_north_fb 0
spatial_resolution_south_we 0
spatial_resolution_south_fb 0
spatial_resolution_west_sn -1
spatial_resolution_west_fb -1
spatial_resolution_east_sn -1
spatial_resolution_east_fb -1
spatial_resolution_front_sn -1
spatial_resolution_front_we -1
spatial_resolution_back_sn -1
spatial_resolution_back_we -1
number_BC_change_times 1
0 evaporation 3.55e-8 -6.0 dirichlet 0
\ No newline at end of file
......@@ -48,6 +48,7 @@ Manual
man-installation
python-dorie-wrapper
man-config-file
man-grid
man-bcfile
man-cookbook
man-parameter-file
......
......@@ -30,20 +30,6 @@ Neumann
Set a fixed volumetric flux [m/s] at the boundary. Positive values imply fluxes out of the domain, negative values imply fluxes into the domain.
Limited Influx
++++++++++++++
.. object:: limited_influx <flux>
Set a volumetric flux [m/s] at the boundary (Neumann BC). Fluxes into the domain are stopped as soon as the matric head at the respective boundary segment reaches :math:`h_m \geq 0.0 \, \text{m}`.
Evaporation
+++++++++++
.. object:: evaporation <flux> <head>
Set a volumetric flux [m/s] (Neumann BC) and a fallback matric head [m] (Dirichlet BC) at the boundary. The Neumann BC is invoked as long as the water content inside the soil can sustain the flux. Before every time step, the numeric flux caused by the given Dirichlet BC is estimated. If it is lower than the given Neumann flux, the Dirichlet BC is applied.
Datafile Structure
==================
......@@ -96,9 +82,8 @@ These lines follow a simple grammar:
.. productionlist:: bc
bc_line: time { group }*
time: `float`
group: ( bc_type1 `float` ) | ( bc_type2 `float` `float` )
bc_type1: "neumann" | "dirichlet" | "limited_influx"
bc_type2: "evaporation"
group: ( bc_type `float` )
bc_type: "neumann" | "dirichlet"
The boundary conditions defined here are parsed in the same order as the boundary segments have been specified. In 3D, the rectangular boundary segments are parsed in a tabular fashion, where columns run faster than rows. Columns are defined along the first direction specified in the `Boundary Segmentation`_, and rows are defined along the second direction.
......
......@@ -111,8 +111,29 @@ From the description above one can infer that one has to distinguish between *wa
Flux reconstruction
-------------------
The flux reconstruction is as projection of the fluxes used in the Discontinuous Galerkin method into a vector field function. Using correct elements, it can ensure that the fluxes in normal direction to the entity are *equivalent* to those computed by the Discontinuous Galerkin method, and most imprtantly, it can also ensure the continuity of them. This procedure makes possible to use the results of DORiE useful compute other problems that rely on the fluxes of the water (i.e. solute transport).
The flux reconstruction is a projection of the fluxes used in the Discontinuous Galerkin method into a vector field function. Using correct elements, this procedure can ensure that fluxes in normal direction to the element are *equivalent* to those computed by the Discontinuous Galerkin method, and most importantly, it can also ensure the continuity of them. Hence, the resulting vector field is useful to compute other problems that rely on the fluxes of the water (i.e. solute transport).
The flux reconstruction technique always use Raviar Thomas finite elements of one degree less than the one set for the Richards model. It can be identified in the vtk file by the name ``flux_RT{k-1}``, where ``k`` is the finite element order set for the Richards model. Flux reconstruction is not available for non-conforming grids (i.e. Cube-adaptive grids).
+---------------------------+---+---+---+
| Richards FEorder | 1 | 2 | 3 |
+============+====+=========+===+===+===+
| | 2D | Simplex | ✓ | ✓ | ✓ |
| | +---------+---+---+---+
| | | Cube | ✓ | ✓ | ✓ |
| Non-adapt. +----+---------+---+---+---+
| | 3D | Simplex | ✓ | ✓ | ✓ |
| | +---------+---+---+---+
| | | Cube | ✓ | ✓ | |
+------------+----+---------+---+---+---+
| | 2D | Simplex | ✓ | ✓ | ✓ |
| | +---------+---+---+---+
| | | Cube | | | |
| Adapt. +----+---------+---+---+---+
| | 3D | Simplex | ✓ | ✓ | ✓ |
| | +---------+---+---+---+
| | | Cube | | | |
+------------+----+---------+---+---+---+
Examples
========
......
Grid Creation and Mapping
=========================
To guarantee numeric accuracy, :doc:`boundary conditions <man-bcfile>`
and :doc:`parameterizations <man-parameter-file>` must exactly map to grid
boundary faces and grid cells, respectively. DORiE therefore not only requires
a specification of the actual grid, but also of these mappings.
DORiE supports two types of grid input, which are controlled via the config
file parameter ``grid.gridType``:
1. Building a rectangular grid on the fly (``rectangular``).
Depending on whether grid adaptivity is enabled or not, DORiE will use
a structured or an unstructured grid manager. This does not change the input
scheme. The user has to specify the config parameters for ``[grid]``
``dimensions``, spatial ``extensions``, and grid ``cells`` in each
direction. For the mapping of boundary conditions and parameters,
:ref:`mapping datasets <man-grid_mapping-dataset>` are required.
2. Building a grid according to a GMSH grid file (``gmsh``).
This mode always uses an unstructured grid manager. The user only has to
specify the ``[grid]`` ``dimensions`` and the GMSH ``gridFile``.
The mapping is read from this file and must be considered when
:ref:`building the mesh <man-grid_gmsh>`.
.. _man-grid_mapping-dataset:
Mapping Datasets
----------------
Mappings are realized as HDF5_ datasets, where every value is mapped to a
single grid entity. Changing the grid configuration requires adapting these
datasets! All config keys of this part refer to the config section
``[grid.mapping]``. There is one dataset for the mapping of the soil
architecture (``volume``) and one dataset for each segment of the boundary
(``boundaryXYZ``). The datasets contain *integer* indices, which are then
identified with certain boundary conditions or parameterizations given by
the respective YAML input files. All mapping datasets must be stored inside
the same H5 file (``file``). Use the Python module ``h5py`` for easily
creating these data files.
The dataset layout follows the C memory layout, with the primary dimension
running fastest. The coordinate system is (right-hand) cartesian, with the
x-axis running to the right in both 2D and 3D.
All datasets follow the general layout ``array[z, y, x]``, where unused
dimensions are simply omitted. The first dimension is always the vertical axis.
The ``volume`` dataset must have the same dimension as the grid itself
(``grid.dimensions``), whereas the ``boundaryXYZ`` datasets are interpreted as
projections of the boundary in their respective normal direction, reducing the
dimension by one. The ``boundaryLower`` dataset would have the layout
``array[x]`` for a 2D grid, and ``array[y, x]`` for a 3D grid. Therefore,
``boundaryLower`` and ``boundaryUpper`` are "seen from above"
(3D-layout: ``array[y, x]``), and ``boundaryLeft`` and ``boundaryRight`` are
"seen from the right" (3D-layout: ``array[z, y]``).
The following Python code creates a volume mapping dataset ``layered`` in a
file ``mapping.h5``, where the lower half of the domain maps to medium 0,
and the upper half to medium 1. The 3D domain contains 10 cells in each
direction.
.. code-block:: python
import numpy as np
import h5py
# Create dataset. Mind the data type!
size = 10
layered = np.zeros((size, size, size), dtype=np.int_)
layered[5:, ...] = 1
# Write dataset to file in 'append' mode
with h5py.File("mapping.h5", 'a') as file:
file.create_dataset("layered", data=layered)
Homogeneous Mappings
^^^^^^^^^^^^^^^^^^^^
If the entire volume, or a complete boundary, should be mapped to a single
index, no dataset is required. Instead, you can set the value for the
respective config file parameter to the desired index. If no dataset should be
read at all, set ``file`` to ``none``.
Even with ``file`` set to a valid H5 file, DORiE will *always* try to
interpret the input values for ``grid.mapping`` as integers. If this succeeds,
the value is interpreted as "global" mapping index for the respective part
of the grid. Therefore, **do not use dataset names starting with digits!**
.. _man-grid_gmsh:
Mapping Soil Layers with GMSH
-----------------------------
GMSH_ supports mapping lines, surfaces, and volumes to *physical* entities.
These entities may combine multiple of the aforementioned *geometric*
entities. Physical entities are assigned a non-negative index upon creation.
These indices can be set by the user in any of the GMSH frontends.
Indices may not occur multiple times, even if they are assigned to different
types of physical entities.
The following ``.geo`` GMSH input file creates a rectangular domain that is
split in half. The lower part is mapped to index 1, the upper part to index 2.
Additionally, a different index for every boundary orientation is set. Notice
that the left and right boundaries consist of two *geometric* lines each.
This code can directly be transferred to a Python script using
the pygmsh_ module. It writes a ``.geo`` file while checking for a correct
syntax within your script. It is readily installed into the
:doc:`virtual environment <python-dorie-wrapper>`.
.. code-block:: default
// define geometric entities
Point(1) = {0, 0, 0, 0.1};
Point(2) = {2, 0, 0, 0.1};
Point(3) = {2, 2, 0, 0.1};
Point(4) = {0, 2, 0, 0.1};
Point(5) = {0, 1, 0, 0.1};
Point(6) = {2, 1, 0, 0.1};
Line(1) = {1, 2};
Line(2) = {2, 6};
Line(3) = {6, 3};
Line(4) = {3, 4};
Line(5) = {4, 5};
Line(6) = {5, 1};
Line(7) = {5, 6};
Line Loop(1) = {1, 2, -7, 6};
Plane Surface(1) = {1};
Line Loop(2) = {7, 3, 4, 5};
Plane Surface(2) = {2};
// define physical entities, index in round brackets
Physical Surface(1) = {1}; // lower
Physical Surface(2) = {2}; // upper
// entire set of physical entities must always be defined!
Physical Line(3) = {1}; // bottom
Physical Line(4) = {2, 3}; // right
Physical Line(5) = {4}; // top
Physical Line(6) = {5, 6}; // left
A ``.geo`` file is the basis for creating the actual mesh in GMSH. You can
load it into the GMSH GUI, or perform the meshing directly using the
`GMSH command line interface
<http://gmsh.info/doc/texinfo/gmsh.html#Non_002dinteractive-mode>`_:
gmsh <geo-file> -<dim>
Replace ``<geo-file>`` with the appropriate file, and ``dim`` with the
spatial dimension of the intended mesh.
.. _HDF5: https://www.h5py.org/
.. _GMSH: http://gmsh.info/
.. _pygmsh: https://pypi.org/project/pygmsh/
This diff is collapsed.
......@@ -5,15 +5,15 @@ if(dune-testtools_FOUND)
endif()
add_executable("richards" richards.cc)
dune_target_link_libraries(richards richards-impl ${DUNE_LIBS})
dune_target_link_libraries(richards richards-impl ${DUNE_LIBS} spdlog)
add_executable("transport" transport.cc)
dune_target_link_libraries(transport richards-impl ${DUNE_LIBS})
dune_target_link_libraries(transport transport-impl ${DUNE_LIBS})
dune_target_link_libraries(transport richards-impl ${DUNE_LIBS} spdlog)
dune_target_link_libraries(transport transport-impl ${DUNE_LIBS} spdlog)
add_custom_target("dorie" DEPENDS richards transport)
# enable setting operator scheme from config file
if(EXPERIMENTAL_DG_FEATURES)
target_compile_definitions("dorie" PUBLIC -DEXPERIMENTAL_DG_FEATURES)
endif()
\ No newline at end of file
endif()
......@@ -120,7 +120,7 @@ namespace Dorie{