...
 
Commits (113)
......@@ -3,8 +3,9 @@ build-cmake/
# Exclude generated files
doc/manual/config-file.rst
doc/default_files/config.ini
doc/cookbook/1-infiltration-sand/config.ini
python/dorie/wrapper/pf_from_file.py
python/dorie/wrapper/test_dorie.py
python/dorie/dorie/cli/cmds.py
test/maps/cell_ids.h5
......
variables:
DUNE_CONTROL_PATH: /opt/dune:$CI_PROJECT_DIR
DUNE_DIR: /opt/dune
DUNE_CONTROL_PATH: $DUNE_DIR:$CI_PROJECT_DIR
DUNECONTROL: ./dune-common/bin/dunecontrol
CPUS_MULTICORE: 4
CPUS_DIND: 2
......@@ -10,7 +11,7 @@ variables:
BASE_IMAGE: dorie/dune-env
# Use semantic versioning (not the version of DUNE) and bump according to
# to whether changes are backwards-compatible or not.
IMAGE_VERSION: "1.2"
IMAGE_VERSION: "1.3"
DUNE_ENV_IMAGE: ${BASE_IMAGE}:img-v${IMAGE_VERSION}
CMAKE_FLAGS:
......@@ -22,10 +23,38 @@ variables:
-j $CPUS_MULTICORE
RUN_IN_DUNE_ENV: $CI_PROJECT_DIR/build-cmake/run-in-dune-env
# Documentation server configuration
DOC_HOST: root@hermes.iup.uni-heidelberg.de
DOC_PORT: 2345 # ... forwards to utopia_doc_server container. root ok ;)
DOC_REMOTE_BASE_DIR: /var/dorie_doc
DOC_REMOTE_PATH: $DOC_REMOTE_BASE_DIR/$CI_COMMIT_REF_SLUG
image: $DUNE_ENV_IMAGE
# Move into top-level DUNE directory
before_script:
- cd /opt/dune
- cd $DUNE_DIR
# Provide SSH access via the SSH_PRIVATE_KEY and SSH_KNOWN_HOSTS variables
.ssh-access: &ssh_access
before_script:
# Run ssh-agent (inside the build environment)
- eval $(ssh-agent -s)
# Add the SSH key stored in SSH_PRIVATE_KEY variable to the agent store
# We're using tr to fix line endings which makes ed25519 keys work
# without extra base64 encoding.
# https://gitlab.com/gitlab-examples/ssh-private-key/issues/1#note_48526556
- echo "$SSH_PRIVATE_KEY" | tr -d '\r' | ssh-add - > /dev/null
# Create the SSH directory and give it the right permissions
- mkdir -p ~/.ssh
- chmod 700 ~/.ssh
# Add the known hosts lists to ensure this ssh connection is the right one
- echo "$SSH_KNOWN_HOSTS" > ~/.ssh/known_hosts
- chmod 644 ~/.ssh/known_hosts
stages:
- setup
......@@ -95,12 +124,14 @@ prep:update-dune-clang:
# --- Build jobs ---
build:system-tests: &build-tests
stage: build
# Reduce concurrent jobs due to RAM limitations
variables:
MAKE_FLAGS: -j 2
script:
- CMAKE_FLAGS="$CMAKE_FLAGS"
- CMAKE_FLAGS="$CMAKE_FLAGS -DCOVERAGE_REPORT=On"
$DUNECONTROL --only=dorie configure
- $DUNECONTROL --only=dorie make $MAKE_FLAGS dorie-rfg
- $DUNECONTROL --only=dorie make $MAKE_FLAGS build_system_tests
- $DUNECONTROL --only=dorie make doc
artifacts:
name: "$CI_JOB_NAME"
paths:
......@@ -110,27 +141,32 @@ build:system-tests: &build-tests
build:unit-tests:
<<: *build-tests
script:
- CMAKE_FLAGS="$CMAKE_FLAGS
-DCMAKE_BUILD_TYPE=Debug"
- CMAKE_FLAGS="$CMAKE_FLAGS -DCOVERAGE_REPORT=On
-DCMAKE_BUILD_TYPE=None"
$DUNECONTROL --only=dorie configure
- $DUNECONTROL --only=dorie make $MAKE_FLAGS build_unit_tests
build:debug: &debug
stage: build
allow_failure: true
script:
before_script:
- cd $DUNE_DIR
- CMAKE_FLAGS="
$CMAKE_FLAGS
-DCMAKE_BUILD_TYPE=Debug
-DCMAKE_CXX_FLAGS_DEBUG='-Werror'"
$DUNECONTROL --only=dorie configure
script:
- $DUNECONTROL --only=dorie make $MAKE_FLAGS dorie-rfg
- $DUNECONTROL --only=dorie make $MAKE_FLAGS build_unit_tests
- $DUNECONTROL --only=dorie make $MAKE_FLAGS richards_d2_r1 transport_d2_r0_t0
build:debug-clang:
<<: *debug
image: ${DUNE_ENV_IMAGE}-clang
script:
before_script:
- cd $DUNE_DIR
# Choose Clang compiler explicitly
- CMAKE_FLAGS="
$CMAKE_FLAGS
-DCMAKE_BUILD_TYPE=Debug
......@@ -138,59 +174,71 @@ build:debug-clang:
-DCMAKE_CXX_COMPILER=clang++
-DCMAKE_CXX_FLAGS_DEBUG='-Werror'"
$DUNECONTROL --only=dorie configure
- $DUNECONTROL --only=dorie make $MAKE_FLAGS dorie-rfg
- $DUNECONTROL --only=dorie make $MAKE_FLAGS build_unit_tests
build:docs:
stage: build
script:
- CMAKE_FLAGS="$CMAKE_FLAGS"
$DUNECONTROL --only=dorie configure
- $DUNECONTROL --only=dorie make doc
artifacts:
name: "$CI_JOB_NAME"
paths:
- $CI_PROJECT_DIR/build-cmake/doc
expire_in: 1 day
# --- Tests ---
test:system-tests: &test
stage: test
dependencies:
- build:system-tests
needs: ["build:system-tests"]
variables:
GCOVR_PATHS:
--root $CI_PROJECT_DIR
--exclude $CI_PROJECT_DIR/dune/dorie/test
--exclude $CI_PROJECT_DIR/plugins
script:
- $DUNECONTROL --only=dorie configure
- $DUNECONTROL --only=dorie make system_tests
# Run coverage utility (will also run after failed job)
after_script:
- source $CI_PROJECT_DIR/build-cmake/activate
- pip3 install gcovr
- cd $CI_PROJECT_DIR/build-cmake
- mkdir coverage
- gcovr $GCOVR_PATHS --html --html-details -o coverage/report.html
# run again for coverage report in GitLab
- gcovr $GCOVR_PATHS
coverage: '/^TOTAL.*\s+(\d+\%)$/'
artifacts:
name: "$CI_JOB_NAME"
paths:
- $CI_PROJECT_DIR/build-cmake/test
- $CI_PROJECT_DIR/build-cmake/coverage
expire_in: 1 day
test:unit-tests:
<<: *test
dependencies:
- build:unit-tests
needs: ["build:unit-tests"]
script:
# install coverage reporter
- $DUNECONTROL --only=dorie bexec
"./run-in-dune-env python3 -m pip install gcovr"
# perform the tests
- $DUNECONTROL --only=dorie configure
- $DUNECONTROL --only=dorie make unit_tests
# report coverage
- cd $CI_PROJECT_DIR/build-cmake
- mkdir -p dune/dorie/test/coverage
- ./run-in-dune-env gcovr
--root ../
--exclude ../dune/dorie/test
--exclude ../plugins
--html --html-details
-o dune/dorie/test/coverage/coverage.html
# run again for coverage report in GitLab
- ./run-in-dune-env gcovr
--root ../
--exclude ../dune/dorie/test
--exclude ../plugins
coverage: '/^TOTAL.*\s+(\d+\%)$/'
artifacts:
name: "$CI_JOB_NAME"
paths:
- $CI_PROJECT_DIR/build-cmake/dune/dorie/test
- $CI_PROJECT_DIR/build-cmake/coverage
expire_in: 1 day
test:python-tests:
<<: *test
dependencies: []
# TODO: Make empty once https://gitlab.com/gitlab-org/gitlab/issues/30631
# is resolved
needs: ["build:docs"] # Any quick job from previous stage
script:
- $DUNECONTROL --only=dorie configure
- $DUNECONTROL --only=dorie make test_python
......@@ -212,9 +260,12 @@ deploy:dockerhub-devel: &deploy
- $DOCKER_LOGIN
script:
- docker build -f docker/dorie.dockerfile
--build-arg DUNE_ENV_IMAGE=$DUNE_ENV_IMAGE --build-arg PROCNUM=$CPUS_DIND
--build-arg BASE_IMG_VERSION=$IMAGE_VERSION --build-arg PROCNUM=$CPUS_DIND
-t dorie/dorie:devel .
- docker push dorie/dorie:devel
environment:
name: docker/devel
url: https://hub.docker.com/r/dorie/dorie
deploy:dockerhub-stable:
<<: *deploy
......@@ -222,29 +273,55 @@ deploy:dockerhub-stable:
- tags@dorie/dorie
script:
- docker build -f docker/dorie.dockerfile
--build-arg DUNE_ENV_IMAGE=$DUNE_ENV_IMAGE --build-arg PROCNUM=$CPUS_DIND
--build-arg BASE_IMG_VERSION=$IMAGE_VERSION --build-arg PROCNUM=$CPUS_DIND
-t dorie/dorie:$CI_COMMIT_TAG .
- docker push dorie/dorie:$CI_COMMIT_TAG
environment:
name: docker/$CI_COMMIT_TAG
url: https://hub.docker.com/r/dorie/dorie
deploy:sphinx-docs:
deploy:docs:
stage: deploy
only:
- tags@dorie/dorie
- branches@dorie/dorie
dependencies:
- build:system-tests
before_script:
# install the netfly CLI
- apt-get install -y golang-go golang-glide
- go get -d github.com/netlify/netlifyctl
- cd $HOME/go/src/github.com/netlify/netlifyctl/
- make deps build
- go install
- cd $HOME/go/bin
- build:docs
needs: ["build:docs"]
<<: *ssh_access
script:
# Create the directory on the remote, removing any prior version
- echo "Creating remote directory $DOC_REMOTE_PATH ..."
- ssh -p $DOC_PORT $DOC_HOST "rm -rf $DOC_REMOTE_PATH"
- ssh -p $DOC_PORT $DOC_HOST "mkdir -p $DOC_REMOTE_PATH/doxygen/"
# Copy sphinx & doxygen HTML documentation to remote
- cd build-cmake/doc
- echo "Uploading documentation to $DOC_REMOTE_PATH/ ..."
- scp -P $DOC_PORT -pr html $DOC_HOST:$DOC_REMOTE_PATH/
- scp -P $DOC_PORT -pr doxygen/html $DOC_HOST:$DOC_REMOTE_PATH/doxygen/
environment:
name: docs/$CI_COMMIT_REF_NAME
url: https://hermes.iup.uni-heidelberg.de/dorie_doc/$CI_COMMIT_REF_SLUG/html/
on_stop: deploy:stop_docs
# This job is called when the environment is stopped, which automatically
# happens when the respective branch is deleted
deploy:stop_docs:
stage: deploy
when: manual
variables:
# Stop GitLab from checking out the commit again (branch is deleted)
GIT_STRATEGY: none
dependencies: []
needs: ["build:docs"]
<<: *ssh_access
script:
- ./netlifyctl deploy
-A $NETFLY_DEPLOY_TOKEN
-s $NETFLY_SITE_ID
-P $CI_PROJECT_DIR/build-cmake/doc/html
- echo "Removing remote directory $DOC_REMOTE_PATH ..."
- ssh -p $DOC_PORT $DOC_HOST "rm -rf $DOC_REMOTE_PATH"
environment:
name: sphinx-docs
url: https://dorie-doc.netlify.com/
name: docs/$CI_COMMIT_REF_NAME
action: stop
(One-sentence description of what kind of bug you would like to report)
_One-sentence description of what kind of bug you would like to report_
#### Summary
(Summarise the encountered bug concisely)
### Summary
_Summarise the encountered bug concisely_
#### Steps to reproduce
(How can this be reproduced? If you can, point to specific files/configurations where the bug occurs)
### Steps to reproduce
_How can this be reproduced? If you can, point to specific files/configurations where the bug occurs_
#### What is the current _bug_ behaviour?
(More about the behaviour of the bug)
### What is the current _bug_ behaviour?
_More about the behaviour of the bug_
#### What is the expected _correct_ behaviour?
(Which behaviour would you have expected?)
### What is the expected _correct_ behaviour?
_Which behaviour would you have expected?_
#### Relevant logs, screenshots, files...?
(Anything that helps reproducing the bug)
### Relevant logs, screenshots, files...?
_Anything that helps reproducing the bug_
<!-- Put very long log outputs within the <pre></pre> tags below -->
<!-- If this doesn't apply, delete the whole <details></details> block -->
......@@ -28,8 +28,21 @@
</pre>
</details>
#### Reproducing input
_Do you have input files reproducing the problem? Insert them here:_
#### Ideas how to fix this?
(Add them here, if you have any.)
| Input data | |
| - | - |
| Simulation Case | _Description goes here_ |
| PFG config file | _if any_ |
| Grid mapping file | _if any_ |
| GMSH grid file | _if any_ |
| Boundary Condition file | |
| Parameterization file | |
| Run config file | |
/label ~bug
\ No newline at end of file
### Ideas how to fix this?
_Add them here, if you have any._
/label ~Bug
<!-- _Set the title to: "Patch Release: X.Y.Z" -->
<!-- Replace X.Y.Z with the actual version numbers everywhere -->
We're releasing patch version `X.Y.Z` for branch `X.Y-stable`! :tada:
### 1 — List of MRs to be Included
List the MRs and the commit SHA of their respective merge commits here.
Placing them in chronological order here ensures fewer issues when
cherry-picking them!
The MRs are indicated by the ~"Pick into X.Y" label.
| MR | Merge Commit SHA |
| -- | ---------------- |
| ! | ... |
### 2 — On GitLab
Use the "patch-release" template for creating a new Merge Request.
- [ ] [Create a branch][new branch] `X.Y-patch` from `X.Y-stable`
- [ ] [Create a Merge Request][new mr] with source branch `X.Y-patch` and
target branch `X.Y-stable`: !
- [ ] Merge this MR: !
- [ ] [Create tag][new tag] `X.Y.Z` from branch `X.Y-stable`
- Message:
```
Version X.Y.Z (YYYY-MM-DD)
```
- Release Notes:
```
# Version X.Y.Z (YYYY-MM-DD)
-> Copy appropriate entries from MR changelog here <-
```
- [ ] Update "Release" [project badge][badge] <!-- only if applicable -->
### 3 — On Docker Hub
- [ ] Update [description on DockerHub][DockerHub description]
- [ ] Push new `latest` tag to DockerHub <!-- only if applicable -->
### 4 — All done? :white_check_mark:
Close this issue!
/label ~Release
[new branch]: https://ts-gitlab.iup.uni-heidelberg.de/dorie/dorie/-/branches/new
[new mr]: https://ts-gitlab.iup.uni-heidelberg.de/dorie/dorie/merge_requests/new
[new tag]: https://ts-gitlab.iup.uni-heidelberg.de/dorie/dorie/-/tags/new
[DockerHub description]: https://hub.docker.com/r/dorie/dorie
[badge]: https://ts-gitlab.iup.uni-heidelberg.de/dorie/dorie/edit
......@@ -8,7 +8,7 @@
### Related issues
See #...
See #
<!--
PLEASE READ THIS
......
<!-- _Set the title to: "Release: X.Y.0" -->
<!-- Replace X.Y with the actual version numbers everywhere -->
We're rolling out version `X.Y.0`! :tada:
### 1 — In the Code
- [ ] `master`: Update version numbers in `VERSION`, `CHANGELOG.md`,
`dune.module` to `X.Y.0`.
- [ ] `master`: Update all `version="unreleased"` tags in the default config
file XML sources to `version="X.Y.0"`.
- [ ] `master`: Update all [version directives][sphinx-ver-dir] with version
`unreleased` in the user docs to `X.Y.0`.
### 2 — On GitLab
- [ ] [Create branch][new branch] `X.Y-stable` from `master`
- [ ] [Create label][new label] ~"Pick into X.Y"
- [ ] [Create tag][new tag] `X.Y.0` from branch `X.Y-stable`
- Message:
```
Version X.Y.0 (YYYY-MM-DD)
```
- Release Notes:
```
# Version X.Y.0 (YYYY-MM-DD)
-> Insert version changelog here! <-
```
Shortcut to the most up-to-date [CHANGELOG.md][changelog]
- [ ] Update "Release" [project badge][badge] <!-- only if applicable -->
### 3 — In the Code
- [ ] `master`: Update version numbers in `VERSION`, `CHANGELOG.md`,
`dune.module` to `X.Y+1-pre`.
### 4 — On Docker Hub
- [ ] Update [description on DockerHub][DockerHub description]
- [ ] Push new `latest` tag to DockerHub <!-- only if applicable -->
### 5 — All done? :white_check_mark:
Close this issue!
/label ~Release
[new branch]: https://ts-gitlab.iup.uni-heidelberg.de/dorie/dorie/-/branches/new
[new tag]: https://ts-gitlab.iup.uni-heidelberg.de/dorie/dorie/-/tags/new
[changelog]: https://ts-gitlab.iup.uni-heidelberg.de/dorie/dorie/blob/master/CHANGELOG.md
[new label]: https://ts-gitlab.iup.uni-heidelberg.de/dorie/dorie/-/labels/new
[DockerHub description]: https://hub.docker.com/r/dorie/dorie
[badge]: https://ts-gitlab.iup.uni-heidelberg.de/dorie/dorie/edit
[sphinx-ver-dir]: https://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html#directive-versionadded
### What does this MR do?
_Fill this in_
### Is there something that needs to be double checked?
<!-- Is there something a reviewer should look out for _especially_? -->
_Fill this in_
### Can this MR be accepted?
- [ ] Implemented ...
- [ ] Added test to ...
- ...
- [ ] Pipeline passing
- [ ] ...
- [ ] Added/Updated tests:
- [ ] ...
- [ ] Added/Updated documentation
- [ ] Pipeline passing <!-- please check for new warnings -->
- [ ] Squash option set <!-- unless there's a good reason -->
- [ ] Delete branch option set <!-- unless there's a good reason -->
- [ ] Added entry to `CHANGELOG.md`
_**Assignee:** If the Squash option is set, check/update the commit message right before merging!_
### Related issues
Closes #, # and #
Closes #
<!-- For automatic closing, do not forget the commas between issue numbers-->
......
<!-- _Set the title to: "Resolve "Patch Release: X.Y.Z"" -->
<!-- Replace X.Y.Z with the actual version numbers everywhere -->
We're releasing patch version `X.Y.Z` for branch `X.Y-stable`! :tada:
### Release Issue
<!-- DO NOT use automatic Issue resolution here! -->
The MRs to be considered in this update are listed in #
### Tasks
- [ ] Cherry-pick the listed commits into `X.Y-patch`
- [ ] `CHANGELOG.md`: Move appropriate entries into new section
- [ ] Update version numbers in `VERSION`, `CHANGELOG.md`, `dune.module`
- [ ] Update `version="unreleased"` tags in the default config file XML
sources <!-- only if applicable -->
- [ ] Update Sphinx version directives with version `unreleased` in the user
docs. <!-- only if applicable -->
#### Help on Cherry-Picking
Cherry-picking merge commits requires specifying the "mainline" parent, which
should always be number 1. Append a line indicating a cherry-pick to the commit
message with the `-x` argument. The following should do the trick:
```bash
git cherry-pick -m 1 -x <commits>
```
Replace `<commits>` with the Commit SHAs listed above, separated by a single
whitespace each. Make sure they are in chronological order to reduce the
number of merge conflicts! Fix such conflicts as unintrusively as possible.
### Can this MR be merged?
- [ ] Pipeline passing <!-- please check for new warnings -->
- [ ] Delete branch option set <!-- unless there's a good reason -->
### Related Issues
[submodule "plugins/vendor/spdlog"]
path = plugins/vendor/spdlog
url = https://github.com/gabime/spdlog.git
[submodule "plugins/vendor/googletest"]
path = plugins/vendor/googletest
url = https://github.com/google/googletest.git
This diff is collapsed.
cmake_minimum_required(VERSION 3.10)
project(dorie C CXX)
# Configuration options
option(COVERAGE_REPORT "Enable coverage compiler flags")
# set build type
if(NOT CMAKE_BUILD_TYPE)
set(CMAKE_BUILD_TYPE "Release")
set(CMAKE_BUILD_TYPE "Release")
endif()
string(TOUPPER ${CMAKE_BUILD_TYPE} CMAKE_BUILD_TYPE_UPPER)
if(CMAKE_BUILD_TYPE_UPPER MATCHES DEBUG)
set(CMAKE_VERBOSE_MAKEFILE ON)
set(CMAKE_VERBOSE_MAKEFILE ON)
endif()
# add extra flags to debug compiler flags
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -Wall")
# option to change DG scheme via config file
option(EXPERIMENTAL_DG_FEATURES
"Enable experimental DG settings through the config file"
OFF
)
#
if(NOT (dune-common_DIR OR dune-common_ROOT OR
"${CMAKE_PREFIX_PATH}" MATCHES ".*dune-common.*"))
......@@ -40,6 +37,10 @@ dune_project()
dune_enable_all_packages()
dune_require_cxx_standard(MODULE "dorie" VERSION 14)
# Cache the executable path
set(DORIE_EXE_PATH ${PROJECT_BINARY_DIR}/dune/dorie/
CACHE STRING "Path to the directory containing the executables")
# add subdirectories
add_subdirectory("plugins/vendor")
add_subdirectory("m4")
......@@ -48,8 +49,9 @@ add_subdirectory("python")
add_subdirectory("doc")
add_subdirectory("dune")
add_subdirectory("lib")
if(dune-testtools_FOUND)
if(DORIE_TESTING)
add_subdirectory("test")
add_subdirectory("doc/cookbook")
endif()
# finalize the dune project, e.g. generating config.h etc.
......
# Contribution Guide
# Contributing to DORiE
We encourage everyone to contribute to the development and improvement of DORIE.
**Thank you for taking your time and contributing to DORiE!** :+1:
Please notice our included [Code of Conduct](CODE_OF_CONDUCT.md).
## Code of Conduct
### Restricted Access to Repository
Everybody participating in and contributing to this project is expected to
uphold our attached [Code of Conduct](CODE_OF_CONDUCT.md). Report any
unacceptable behavior to the [DORiE Developers][mailinglist]!
Free access to the [GitLab instance](https://ts-gitlab.iup.uni-heidelberg.de) of
the [TS-CCEES group](http://ts.iup.uni-heidelberg.de/) at the
[Institute of Environmental Physics](http://www.iup.uni-heidelberg.de/) is
restricted to members of said group.
Non-members are left with read-only access to public repositories.
## How to Contribute
### How to Contribute
DORiE is open source software. We strive for making every stage of development
public, sharing our advances, and incorporating community contributions. We
therefore prefer public contributions of any kind via GitLab.
If you would like to _actively contribute_ to DORiE, and you are affiliated
with [Heidelberg University](http://www.uni-heidelberg.de/) or an associated
research institute, you can become a member of the DORiE development team.
### GitLab Account
Otherwise, we encourage you to contribute bug reports, feature requests, or
improvement suggestions by contacting the development team
[via mail](mailto:dorieteam@iup.uni-heidelberg.de).
The DORiE repository is hosted on the private GitLab instance of the
[TS-CCEES](http://ts.iup.uni-heidelberg.de/) research group at the
[Institute of Environmental Physics (IUP) Heidelberg](http://www.iup.uni-heidelberg.de/).
As we want to keep most of our projects private, we disabled the regular
sign-up procedure. If you are not a member of said research group, we encourage
you to request an account [via mail][mailinglist]. Notice that you will only
receive an account flagged as
"[External User](https://docs.gitlab.com/ee/user/permissions.html#external-users-core-only)"
in this case, with access to the DORiE repository only.
Notice, however, that resources for development and support are streched thin:
_Your mileage may vary._
\ No newline at end of file
### Issues and Merge Requests
Report bugs, suggest features, or plan implementations in GitLab Issues. We
provide several Description Templates you may find useful for structuring
your Issue description and providing the required information.
Any changes to source code should be accompanied by a (unit) test verifying the
functionality. Designing this test ideally happens in the planning phase of
a change.
After a proper discussion of the Issue, and the resulting implementation, open
a Merge Request. Again, we encourage you to use one of the Description
Templates. Provide information on how your code changes and additions solve the
problem or implement the feature. Make sure that your MR fulfills the the
criteria for being merged.
### Old-Fashioned Email
Of course, you can always contact the developers directly
[via mail][mailinglist].
[mailinglist]: mailto:dorieteam@iup.uni-heidelberg.de
This diff is collapsed.
# Default Dorie model targets
add_custom_target(richards)
add_custom_target(transport)
add_custom_target(dorie ALL)
add_dependencies(dorie richards transport)
# Maximum polynomial orders of Richards model for available targets
set(DORIE_MAX_RORDER_2 6)
set(DORIE_MAX_RORDER_3 6)
# Maximum polynomial orders of transport model for available targets
set(DORIE_MAX_TORDER_2 3)
set(DORIE_MAX_TORDER_3 3)
# Maximum polynomial orders of Richards model for default targets
set(DORIE_MAX_DEFAULT_RORDER_2 3)
set(DORIE_MAX_DEFAULT_RORDER_3 1)
# Maximum polynomial orders of transport model for default targets
set(DORIE_MAX_DEFAULT_TORDER_2 3)
set(DORIE_MAX_DEFAULT_TORDER_3 1)
#
# .. cmake_function:: dorie_compile_instance
#
# Adds an executable and library for the specified model.
#
# The parameters specify static settings for the model instance. If these
# settings comply to the limits of the default variables, the instance is
# added to the global "richards" or "transport" targets, depending on which
# MODEL type is built.
#
# In case of "transport", the appropriate "richards" library must be available.
# Otherwise, it is also defined by this function.
#
# A sanity check for the input variables is not performed by CMake, but by
# the C++ code during compile-time.
#
# This function takes the following arguments:
#
# - MODEL: Name of the model. Accepts "richards" or "transport".
# - DIMENSION: Spatial dimension.
# - RORDER: Finite element polynomial order of the Richards module.
# - TORDER: Finite element polynomial order of the Transport module.
# - CREATED_LIB: Variable to store the created library target name in.
#
function(dorie_compile_instance)
# parse the function arguments
set(SINGLE_ARGS MODEL DIMENSION RORDER TORDER CREATED_LIB)
cmake_parse_arguments(ARGS "" "${SINGLE_ARGS}" "" ${ARGN})
if (ARGS_UNPARSED_ARGUMENTS)
message(WARNING "Unparsed arguments when calling "
"'dorie_create_executable: "
"${ARGS_UNPARSED_ARGUMENTS}")
endif ()
# set dimension string
set(DIM_STR "d${ARGS_DIMENSION}")
# set option string
set(OPTION_STR "r${ARGS_RORDER}")
# issue warning if transport order is given for 'richards'
if (ARGS_MODEL STREQUAL "richards")
set (lib_src ${PROJECT_SOURCE_DIR}/dune/dorie/model/richards/impl/impl.cc)
if (ARGS_TORDER)
message(WARNING "Ignoring argument TORDER for MODEL "
"'richards'")
endif ()
# append transport order to option string
elseif (ARGS_MODEL STREQUAL "transport")
set (lib_src ${PROJECT_SOURCE_DIR}/dune/dorie/model/coupling/impl/impl.cc)
string(APPEND OPTION_STR "_t${ARGS_TORDER}")
# unknown model
else ()
message(SEND_ERROR "Unsupported model: ${ARGS_MODEL}. "
"Must be either 'richards' or 'transport'")
endif ()
# register the library
set(lib_name "dorie_${ARGS_MODEL}_${DIM_STR}_${OPTION_STR}")
if (NOT TARGET ${lib_name})
add_library(${lib_name} EXCLUDE_FROM_ALL STATIC ${lib_src})
# link to dependencies
target_link_libraries(${lib_name}
PUBLIC
spdlog
muparser::muparser
hdf5
yaml-cpp
${DUNE_LIBS}
)
# register the executable
set(exe_name "${ARGS_MODEL}_${DIM_STR}_${OPTION_STR}")
set(src_file ${CMAKE_SOURCE_DIR}/dune/dorie/${ARGS_MODEL}.cc)
add_executable(${exe_name} EXCLUDE_FROM_ALL ${src_file})
target_link_libraries(${exe_name} PUBLIC ${lib_name})
# Coverage links if enabled
if(COVERAGE_REPORT)
target_compile_options(${exe_name} PUBLIC --coverage)
target_link_libraries(${exe_name} PUBLIC --coverage)
endif()
# Add the executable to the default targets
if (ARGS_RORDER LESS_EQUAL DORIE_MAX_DEFAULT_RORDER_${ARGS_DIMENSION})
if (ARGS_MODEL STREQUAL "richards")
add_dependencies(${ARGS_MODEL} ${exe_name})
elseif ((ARGS_TORDER LESS_EQUAL DORIE_MAX_DEFAULT_TORDER_${ARGS_DIMENSION})
AND (ARGS_RORDER EQUAL ARGS_TORDER))
add_dependencies(${ARGS_MODEL} ${exe_name})
endif()
endif()
# set compile definitions
target_compile_definitions(${lib_name}
PUBLIC
DORIE_DIM=${ARGS_DIMENSION}
DORIE_RORDER=${ARGS_RORDER})
if (ARGS_MODEL STREQUAL "transport")
target_compile_definitions(${lib_name}
PUBLIC DORIE_TORDER=${ARGS_TORDER})
endif ()
endif()
# If we build a transport model, build the Richards library as well
if (ARGS_MODEL STREQUAL "transport")
dorie_compile_instance(MODEL "richards"
DIMENSION ${ARGS_DIMENSION}
RORDER ${ARGS_RORDER}
CREATED_LIB richards_lib
)
# ... and link to it!
target_link_libraries(${exe_name} PUBLIC ${richards_lib})
endif()
# Report the library target name
if (ARGS_CREATED_LIB)
set(${ARGS_CREATED_LIB} ${lib_name} PARENT_SCOPE)
endif ()
endfunction()
# --- DEPENDENCIES --- #
# These macros check for the following packages, yielding the respective
# targets
#
......@@ -47,5 +48,17 @@ message (STATUS "DUNE Libraries: ${DUNE_LIBS}")
# Remove CMake policy stack
cmake_policy(POP)
# Add DORiE testing functions
include(DorieTesting)
# --- CMAKE MODULES --- #
# Include the CMake modules used in the project
include(DorieCompileInstance)
# Check if testing is enabled
if (dune-testtools_FOUND)
message(STATUS "Testing enabled: dune-testtools found.")
set(DORIE_TESTING TRUE)
# include the DORiE testing macros
include(DorieTesting)
else()
message(STATUS "Testing disabled: dune-testtools not found.")
endif()
......@@ -18,6 +18,11 @@ add_custom_target(prepare_testing
add_dependencies(system_tests prepare_testing)
add_dependencies(unit_tests prepare_testing)
# Create a fake library target to satisfy dune-testtools
add_library(dorie_test UNKNOWN IMPORTED)
set_property(TARGET dorie_test
PROPERTY IMPORTED_LOCATION ${PROJECT_BINARY_DIR}/activate)
#
# .. cmake_function:: add_coverage_links
#
......@@ -48,6 +53,12 @@ endfunction()
# The target this test applies to. This is only required if no SOURCES
# are specified.
#
# .. cmake_param:: CUSTOM_MAIN
# :option:
#
# Write a custom `main()` function for the unit test executables instead
# of generating a default one automatically.
#
# This function serves as wrapper around the function `dune_add_test` which
# registers test for existing targets or adds new test executables from the
# given source files. This function additionally registers the tests as unit
......@@ -61,7 +72,8 @@ endfunction()
#
function(dorie_add_unit_test)
set(SINGLE NAME TARGET)
cmake_parse_arguments(UNIT_TEST "" "${SINGLE}" "" ${ARGN})
set(OPTION CUSTOM_MAIN)
cmake_parse_arguments(UNIT_TEST "${OPTION}" "${SINGLE}" "" ${ARGN})
# use name prefix for test
if(NOT UNIT_TEST_NAME)
......@@ -81,7 +93,23 @@ function(dorie_add_unit_test)
# add to build target and employ compile options
target_link_libraries(${UNIT_TEST_TARGET}
muparser::muparser hdf5 yaml-cpp spdlog)
add_coverage_links(${UNIT_TEST_TARGET})
# add_coverage_links(${UNIT_TEST_TARGET})
target_compile_definitions(${UNIT_TEST_TARGET}
PUBLIC
GTEST
)
if(COVERAGE_REPORT)
add_coverage_links(${UNIT_TEST_TARGET})
endif()
if (UNIT_TEST_CUSTOM_MAIN)
target_link_libraries(${UNIT_TEST_TARGET} gtest)
else ()
target_link_libraries(${UNIT_TEST_TARGET} gtest_main)
endif()
add_dependencies(build_unit_tests ${UNIT_TEST_TARGET})
endfunction()
......@@ -94,6 +122,13 @@ endfunction()
# Registers the created tests as unit tests, including coverage flags.
# If not specified, the tests are registered as system tests.
#
# .. cmake_param:: CUSTOM_MAIN
# :option:
#
# Write a custom `main()` function for the unit test executables instead
# of generating a default one automatically. Only applies if UNIT_TEST
# is enabled.
#
# .. cmake_param:: TARGET
# :single:
#
......@@ -139,7 +174,7 @@ endfunction()
# executable will be linked to the libraries DORiE depends on.
#
function(dorie_add_metaini_test)
set(OPTIONS UNIT_TEST)
set(OPTIONS UNIT_TEST CUSTOM_MAIN)
set(SINGLE TARGET METAINI SCRIPT BASENAME CREATED_TARGETS)
cmake_parse_arguments(SYSTEM_TEST "${OPTIONS}" "${SINGLE}" "" ${ARGN})
......@@ -147,6 +182,11 @@ function(dorie_add_metaini_test)
message(SEND_ERROR "No meta ini file given!")
endif()
if(SYSTEM_TEST_CUSTOM_MAIN AND NOT SYSTEM_TEST_UNIT_TEST)
message(WARNING "Ignoring option CUSTOM_MAIN because option UNIT_TEST "
"was not enabled")
endif()
# configure meta ini file or just copy.
get_filename_component(metaini-name ${SYSTEM_TEST_METAINI} NAME_WE)
get_filename_component(metaini-extension ${SYSTEM_TEST_METAINI} EXT)
......@@ -190,16 +230,32 @@ function(dorie_add_metaini_test)
# report created targets to parent scope
set(${SYSTEM_TEST_CREATED_TARGETS} ${created_targets} PARENT_SCOPE)
# Link to dependencies
# Set properties for new target
if(NOT SYSTEM_TEST_TARGET)
# Link to dependencies
target_link_libraries(${created_targets}
muparser::muparser hdf5 yaml-cpp spdlog)
# Add coverage flags if enabled
if(COVERAGE_REPORT)
add_coverage_links(${created_targets})
endif()
endif()
# add dependencies and flags
if(SYSTEM_TEST_UNIT_TEST)
add_coverage_links(${created_targets})
add_dependencies(build_unit_tests ${created_targets})
target_compile_definitions(${created_targets}
PUBLIC
GTEST
)
if (SYSTEM_TEST_CUSTOM_MAIN)
target_link_libraries(${created_targets} gtest)
else ()
target_link_libraries(${created_targets} gtest_main)
endif()
else()
add_dependencies(build_system_tests ${created_targets})
endif()
......
......@@ -46,7 +46,7 @@ configure_file(manual/config-file.rst.in
configure_file(conf.py.in conf.py)
add_custom_target(sphinx_html
COMMAND ${SPHINX_EXECUTABLE}
COMMAND ${CMAKE_BINARY_DIR}/run-in-dune-env ${SPHINX_EXECUTABLE}
-T -b html
-c ${CMAKE_CURRENT_BINARY_DIR} # conf.py dir
-d ${CMAKE_CURRENT_BINARY_DIR}/_doctrees # doctree pickles dir
......
......@@ -213,9 +213,14 @@ breathe_default_project = "dorie"
# -- Pass special options to build setup -------------------------------------
# Config: dune-testtools module was found by CMake
meta_ini_available = "@DORIE_TESTING@".lower() in ["1", "true", "yes", "on"]
# Apply custom configuration to the Sphinx app
def setup(app):
app.add_config_value('recommonmark_config', {
'enable_math': True,
'enable_inline_math': True
}, True)
app.add_config_value('meta_ini_available', False, 'env')
app.add_transform(AutoStructify)
dorie_add_metaini_test(TARGET dorie_test
METAINI tutorial-1.mini.in)
configure_file(${CMAKE_CURRENT_BINARY_DIR}/tutorial-1.ini
${CMAKE_CURRENT_SOURCE_DIR}/config.ini
COPYONLY)
# Copy files needed for the test to run
configure_file(${CMAKE_BINARY_DIR}/doc/default_files/richards_bc.yml
richards_bc.yml
COPYONLY)
configure_file(${CMAKE_BINARY_DIR}/doc/default_files/richards_param.yml
richards_param.yml
COPYONLY)
include ${CMAKE_BINARY_DIR}/doc/default_files/config.ini
_test_command = run
__name = tutorial-1
# WARNING: Any change in the line ordering of this file will affect the
# tutorial view on this file
[simulation]
mode = richards
[grid]
gridType = rectangular
dimensions = 2
extensions = 1 4
cells = 1 200
[grid.mapping]
volume = 0
[richards.parameters]
file = richards_param.yml
[richards.initial]
type = analytic
quantity = matricHead
equation = -h
[richards.boundary]
file = richards_bc.yml
[richards.output]
fileName = infiltr_homogeneous_sand_2D
outputPath = ./
**********************************
Infiltration in Homogeneous Medium
**********************************
One of the most simple DORiE simulations is the case of constant infiltration on
a 2D homogeneous medium. In particular, we will create a similar simulation to
the one conducted by Kurt Roth in his
`Soil Physics Lecture Notes <http://ts.iup.uni-heidelberg.de/teaching/#c520>`_,
Chapter 6.3.1.
Study Case
----------
Consider a uniform and isotropic soil with constant water table
(:math:`h_m = 0\,\text{m}`) at height :math:`y=0\,\text{m}` and vertical flux
in the vadose zone. We choose the :math:`y`-axis to point upwards.
For times :math:`t < 0`, the water phase is in static equilibrium, i.e.,
:math:`j_w = 0\,\text{ms}^{-1}` in the entire domain. The soil surface is
located at :math:`y=4\,\text{m}`. For :math:`t \leq 0`, the water flux through
the surface boundary is set to
:math:`j_w = 5.56 \cdot 10^{−6}\,\text{ms}^{-1} = 20\,\text{mm}\,\text{h}^{-1}`,
equivalent to heavy rainfall.
Configure DORiE Input Files
---------------------------
In what follows, we will set up input files and run the simulation step-by-step
reviewing the most important parts of the DORiE work-flow.
Simulation Mode
^^^^^^^^^^^^^^^
The first decision to make is to choose the mode of your simulation. In this
case, we are only interested in the water flow movement. Hence, the richards
mode in the configuration file is well suited for our purpose.
.. literalinclude:: tutorial-1.mini.in
:language: ini
:lines: 9-10
:caption: config.ini
Grid Creation
^^^^^^^^^^^^^
For any simulation, the :doc:`grid settings </manual/grid>` have to be
setup exactly once since all models share the same grid.
In this case, we will create a ``rectangular`` grid by specifying the number
of ``cells`` and the ``extensions`` of the domain. Grids of this type are
directly created within DORiE, thus, the keyword ``gridFile`` is ignored.
.. note::
The original simulation is one dimensional, but DORiE only supports two and
three dimensions. Hence, we use a 2D simulation with 1 cell for the
:math:`x`-direction, as the simulation is symmetrical along this axis.
We set ``1 4`` as the ``extensions`` of the domain. This means that the
rectangular grid will be generated with an extension of :math:`1\,\text{m}` in
the :math:`x`-direction and an extension of :math:`4\,\text{m}` in
the :math:`y`-direction. Notice that the :math:`x`-direction points to the
right, and the :math:`y`-direction upwards. The point of origin in DORiE's
reference frame is located at the lower left point.
Now we have to fill a domain of this size with rectangular grid cells by
specifying the number of cells into each direction. For the
:math:`x`-direction, we will set this to ``1``. For the :math:`y`-direction,
we choose a reasonable resolution of :math:`2\,\text{cm}` per cell, meaning
that we need ``200`` cells in total. That is, we set the pair ``1 200`` of
``cells`` in the config file.
.. literalinclude:: tutorial-1.mini.in
:language: ini
:lines: 12-16
:caption: config.ini
Soil Parameterization
^^^^^^^^^^^^^^^^^^^^^
First, in the configuration file we set the
:ref:`parameterization file <man-parameter_file>` that we want to use. In this
case, we select the file ``richards_param.yml`` provided by the
``dorie create`` command.
.. literalinclude:: tutorial-1.mini.in
:language: ini
:lines: 21-22
:caption: config.ini
Now, for homogeneous materials, the key ``grid.mapping.volume`` in the config
file refers to the keyword ``index`` to use in the parameterization file for
the whole domain. That said, if the parameterization file looks like this:
.. literalinclude:: ../../default_files/richards_param.yml
:emphasize-lines: 2-3,14-15
:language: yaml
:caption: richards_param.yml
then, a ``volume`` set to ``0`` will have a parameterization for ``sand`` while
a ``volume`` set to ``1`` will have a parameterization for
``silt``. For now, let's say we want to simulate an homogeneous sand.
.. literalinclude:: tutorial-1.mini.in
:language: ini
:lines: 18-19
:caption: config.ini
Initial Condition
^^^^^^^^^^^^^^^^^
The :doc:`initial condition </manual/initial>` can be fed with HDF5 data
files or with analytic functions. In this case, we set an initial
condition with the water table at :math:`y = 0\,\text{m}` with a fluid phase in
hydrostatic equilibrium. This can be represented by an ``analytic`` function
where the ``matricHead`` is simply set to ``-h``. See the documentation of
:ref:`analytic initial conditions <man-initial-types>` for details.
.. literalinclude:: tutorial-1.mini.in
:language: ini
:lines: 24-27
:caption: config.ini
Boundary Condition
^^^^^^^^^^^^^^^^^^
The :doc:`boundary conditions file </manual/bcfile>` has to be specified by the
keyword ``richards.boundary.file`` in the configuration file. For now, we will
use the boundary condition file ``richards_bc.yml`` provided by the command
``dorie create``. By default, this file sets a constant infiltration of
:math:`j_w = -5.55\cdot 10^{-6}\,\text{ms}^{-1}` at the top, a constant matric
head of :math:`h_m = 0\,\text{m}` at the bottom, and a no-flux condition on the
sides of the domain.
.. literalinclude:: tutorial-1.mini.in
:language: ini
:lines: 29-30
:caption: config.ini
Output
^^^^^^
Finally, we give a name and a path to the
:doc:`output files </introduction/data-io>` of the simulation:
.. literalinclude:: tutorial-1.mini.in
:language: ini
:lines: 32-34
:caption: config.ini
Run DORiE
---------
Once everything is set up, we :doc:`run DORiE </manual/cli>` by calling the
command ``dorie run`` followed by the configuration file ``confing.ini``:
.. code-block:: bash
dorie run config.ini
By doing this, the simulation should start and provide information about the
status of the simulation depending on the on the ``logLevel`` keyword on the
configuration file. A typical DORiE simulation has the following output:
.. code-block:: none
[18:27:32.762 I] Starting DORiE
[18:27:32.762 I] Reading configuration file: config.ini
[18:27:32.768 I] Creating output directory: ./
[18:27:32.768 I] Creating a rectangular grid in 2D
[18:27:32.776 I] [RIC] Loading parameter data file: richards_param.yml
[18:27:32.779 I] [RIC] Reading boundary condition data file: richards_bc.yml
[18:27:32.784 I] [RIC] Setup complete
[18:27:33.296 I] [RIC] Time Step 0: 0.00e+00 + 1.00e+01 -> 1.00e+01
[18:27:33.581 I] [RIC] Time Step 1: 1.00e+01 + 1.50e+01 -> 2.50e+01
[18:27:33.899 I] [RIC] Time Step 2: 2.50e+01 + 2.25e+01 -> 4.75e+01
[18:27:34.177 I] [RIC] Time Step 3: 4.75e+01 + 3.38e+01 -> 8.12e+01
...
[18:27:46.863 I] [RIC] Time Step 51: 7.67e+05 + 1.00e+05 -> 8.67e+05
[18:27:46.894 I] [RIC] Time Step 52: 8.67e+05 + 1.00e+05 -> 9.67e+05
[18:27:46.923 I] [RIC] Time Step 53: 9.67e+05 + 3.34e+04 -> 1.00e+06
[18:27:46.938 I] DORiE finished after 1.47e+01s :)
Results
-------
The :ref:`results <intro-io-output>` should have been written in several
:file:`.vtu` files, one for each time step, and gathered by a :file:`.pvd`
file. By opening the latter in Paraview_ (or VisIt_) it is possible to
visualize the dynamics of the matric head, water content, and water flux as
shown below.
.. _Paraview: http://www.paraview.org/
.. _VisIt: https://visit.llnl.gov/
.. image:: result.gif
.. admonition:: Input files
============= ======================================================================
Configuration :download:`config.ini <config.ini>`
Boundary :download:`richards_bc.yml </default_files/richards_bc.yml>`
Parameters :download:`richards_param.yml </default_files/richards_param.yml>`
============= ======================================================================
**************************
ParaView for DORiE Results
**************************
`ParaView`_ is a powerful data analysis and visualization application. Its many
features make for a rather overwhelming GUI. For analyzing the output of DORiE,
we typically only need a small subset of the available tools. This is a quick
introduction on how to use ParaView for first-time users.
Opening an Output File
======================
DORiE prints the output of every time step into a separate ``.vtu`` file
containing the grid information and the data on it. We typically do not want to
open single output files but the time series ``.pvd`` files. This file
references all output files and stores their respective simulation time stamp.
.. note::
In case of a parallel run, there is one ``.vtu`` output file for each
processor per time stamp. Additionally, a single ``.pvtu`` file collects
all ``.vtu`` files for displaying the complete data set. The ``.pvd`` file
will then reference the ``.pvtu`` files, so opening it gives you the data
just like for a sequential run.
If you have ParaView installed and associated with the appropriate file types,
you can simply open the file with it by double-clicking the file in the folder
overview. Alternatively, open ParaView, choose *File* > *Open...*, and
select the file, or use the *Open* symbol (1).
The file will appear in the *Pipeline Browser* (2), where you can click on the
eye symbol to enable or disable the display of any object in your currently
selected *View*. You may then need to click *Apply* (3) in the
*Properties View* right below to actually load the data.
.. image:: https://ts-gitlab.iup.uni-heidelberg.de/dorie/dorie/wikis/uploads/bb836687637b4e2763379b04a857c932/1-load_data.png
:alt: ParaView screenshot on how to load data.
Working with the Direct Data Visualization
==========================================
Inside the *Render View*, the content of your file is now displayed. The 2D or
3D render is interactive: You can drag the view around and rotate it. With the
symbols in the second toolbar (1) you can reset the view direction along
certain axes.
Choosing Datasets
-----------------
Select the dataset you want to evaluate using the dropdown menu (2). If the
dataset is non-scalar (like e.g., ``flux``) you can choose to evaluate certain
components or the magnitude of the quantity.
The visualization type can be chosen with the adjacent dropdown menu (2). Most
appropriate for 2D and 3D datasets are *Surface* and *Volume*, respectively.
You can additionally superimpose the grid structure onto the surface
visualization by choosing *Surface with Edges*.
Color Maps
----------
Use the *Rescale* buttons (3) to rescale the color map to the current data
range, the data range across all time steps, the visible data range, and a
custom range.
Select *Edit Color Map* (4) to open the *Color Map Editor* (5). Here, you can
tick *Use log scale when mapping [...]* to enable a logarithmic color map.
Switch between many preset color maps by clicking on the *Choose preset* button
(7), choosing a color map, and clicking *Apply*. The current color map can be
inverted by clicking the *Invert the transfer functions* button (6).
.. image:: https://ts-gitlab.iup.uni-heidelberg.de/dorie/dorie/wikis/uploads/2a84972a3796dffb4a3cad40b205b622/2-visualization.png
:alt: ParaView screenshot on selecting datasets and color maps.
Animations
==========
All data displayed typically relates to a single dataset describing a single
moment in simulation time. Use the time controls (1) in the topmost toolbar to
step through the time sequence and play an animation. By default, the animation
mode is set to *Snap To TimeSteps* which displays every dataset for the same
amount of time, independently from the respective time stamp.
To visualize the sequence of datasets on a time axis, open the *Animation View*
(2) by selecting *View* > *Animation View*. For a real time sequence, choose
*RealTime* from the *Mode* dropdown menu. This mode will display the datasets
in the fraction of animation duration corresponding to their respective
simulation time. You can enter the desired total animation duration in seconds
in the *Duration* field (3).
.. image:: https://ts-gitlab.iup.uni-heidelberg.de/dorie/dorie/wikis/uploads/80da25e52379dccb97f1c075fea41be8/3-animation.png
:alt: ParaView screenshot with animation view opened.
Line Plots
==========
The *Plot Over Line* filter evaluates the dataset across a line and displays
the data in a line plot. Select the filter from the *Filter* menu or the button
(1) in the third toolbar.
You can choose simple locations in the *Properties* window with the
*PlotOverLine* pipeline selected in the *Pipeline Browser*. You can also set
the endpoint coordinates explicitly here. Additionally, the line indicating the
evaluation locations can be dragged around in the *Render View*.
Applying the *Plot Over Line* filter opens a new *Line Chart View* (2) where
the data is displayed. The distance along the line is given on the x-axis and
the respective dataset values are displayed on the y-axis.
With the *Line Chart View* of the line plot and the *PlotOverLine* pipeline
**both** selected, you can choose the variables displayed in the line plot, and
modify their colors and legend names (3).
.. image:: https://ts-gitlab.iup.uni-heidelberg.de/dorie/dorie/wikis/uploads/5b1e19c051bfaa13c883626769ef9651/4-line_plot.png
:alt: ParaView screenshot displaying a Plot Over Line pipeline.
.. tip::
You can display multiple *Plot Over Line* pipelines inside a single *Line
Chart View* by selecting the target view and enabling the desired pipelines
with the eye symbol in the *Pipeline Browser*.
Exporting Line Plot Data
------------------------
If you want to further analyze the data displayed by a Line Plot, you can
export it into a CSV file. To do so, select the *LineChartView* displaying the
desired data and then select *File* > *Save Data...*. Choose *Comma or Tab
Delimited Files* in the *Files of type* dropdown menu, a destination
file name, and a directory, and click *OK*. If desired, you can change the
output floating point precision in the now opening *Configure Writer* window.
Confirming with *OK* will write the file.
.. tip::
The resulting CSV file can be loaded into a ``numpy`` data array using the
`numpy.loadtxt`_ function.
Visualizing Fluxes
==================
A useful tool for visualizing fluxes in a transient situation is the *Glyph*
filter. Apply it by first selecting the dataset in the *Pipeline Browser* and
then choosing the filter from the *Filter* menu or the symbol (1) in the third
toolbar. The glyphs will be superimposed onto the visualization in the *Render