diff --git a/.clang-tidy b/.clang-tidy
new file mode 100644
index 000000000000..5b4e0fd7781a
--- /dev/null
+++ b/.clang-tidy
@@ -0,0 +1,9 @@
+# Settings file automatically used by clang-tidy
+#
+# See ./contrib/utilities/run_clang_tidy.sh for details
+
+# We disable performance-inefficient-string-concatenation because we don't care about "a"+to_string(5)+...
+
+Checks: "-*,cppcoreguidelines-pro-type-static-cast-downcast,google-readability-casting,modernize-*,-modernize-pass-by-value,-modernize-raw-string-literal,-modernize-use-auto,-modernize-use-override,-modernize-use-default-member-init,-modernize-use-transparent-functors,use-emplace,mpi-*,performance-*,-performance-inefficient-string-concatenation"
+
+WarningsAsErrors: '*'
diff --git a/Jenkinsfile b/Jenkinsfile
index 09370d793b2a..2f6d440b15bd 100644
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -1,11 +1,25 @@
#!groovy
+// load library https://github.com/tjhei/jenkins-stuff to provide
+// killold.killOldBuilds() function:
+@Library('tjhei') _
+
pipeline
{
agent none
stages
{
+ stage("abort old")
+ {
+ agent none
+ steps
+ {
+ // kill older builds in this PR:
+ script { killold.killOldBuilds() }
+ }
+ }
+
stage("check")
{
agent
@@ -40,8 +54,17 @@ pipeline
stage("indent")
{
+ post {
+ failure {
+ githubNotify context: 'indent', description: 'failed', status: 'FAILURE'
+ }
+ }
+
steps
{
+ // we are finally running, so we can mark the 'ready' context from Jenkinsfile.mark as success:
+ githubNotify context: 'ready', description: ':-)', status: 'SUCCESS'
+
// We can not use 'indent' because we are missing the master branch:
// "fatal: ambiguous argument 'master': unknown revision or path"
sh '''
@@ -73,7 +96,23 @@ pipeline
image 'tjhei/candi:v9.0.1-r4'
}
}
- post { cleanup { cleanWs() } }
+
+ post {
+ always {
+ sh "cp /home/dealii/build/Testing/*/*.xml $WORKSPACE/serial.xml || true"
+ xunit tools: [CTest(pattern: '*.xml')]
+ sh "cp /home/dealii/build/detailed.log $WORKSPACE/detailed-serial.log || true"
+ archiveArtifacts artifacts: 'detailed-serial.log', fingerprint: true
+ }
+
+ cleanup {
+ cleanWs()
+ }
+
+ failure {
+ githubNotify context: 'CI', description: 'serial build failed', status: 'FAILURE'
+ }
+ }
steps
{
@@ -81,9 +120,9 @@ pipeline
{
sh "echo \"building on node ${env.NODE_NAME}\""
sh '''#!/bin/bash
+ set -e
export NP=`grep -c ^processor /proc/cpuinfo`
- export TEST_TIME_LIMIT=1200
- echo $NP
+ export TEST_TIME_LIMIT=1200
mkdir -p /home/dealii/build
cd /home/dealii/build
cmake -G "Ninja" \
@@ -93,8 +132,9 @@ pipeline
-D DEAL_II_UNITY_BUILD=ON \
$WORKSPACE/
time ninja -j $NP
+ time ninja test # quicktests
time ninja setup_tests
- time ctest --output-on-failure -DDESCRIPTION="CI-$JOB_NAME" -j $NP
+ time ctest --output-on-failure -DDESCRIPTION="CI-$JOB_NAME" -j $NP --no-compress-output -T test
'''
}
}
@@ -110,7 +150,23 @@ pipeline
image 'tjhei/candi:v9.0.1-r4'
}
}
- post { cleanup { cleanWs() } }
+
+ post {
+ always {
+ sh "cp /home/dealii/build/Testing/*/*.xml $WORKSPACE/mpi.xml || true"
+ xunit tools: [CTest(pattern: '*.xml')]
+ sh "cp /home/dealii/build/detailed.log $WORKSPACE/detailed-mpi.log || true"
+ archiveArtifacts artifacts: 'detailed-mpi.log', fingerprint: true
+ }
+
+ cleanup {
+ cleanWs()
+ }
+
+ failure {
+ githubNotify context: 'CI', description: 'mpi build failed', status: 'FAILURE'
+ }
+ }
steps
{
@@ -118,6 +174,7 @@ pipeline
{
sh "echo \"building on node ${env.NODE_NAME}\""
sh '''#!/bin/bash
+ set -e
export NP=`grep -c ^processor /proc/cpuinfo`
mkdir -p /home/dealii/build
cd /home/dealii/build
@@ -128,11 +185,13 @@ pipeline
-D DEAL_II_UNITY_BUILD=OFF \
$WORKSPACE/
time ninja -j $NP
+ time ninja test # quicktests
time ninja setup_tests
- time ctest -R "all-headers|multigrid/transfer" --output-on-failure -DDESCRIPTION="CI-$JOB_NAME" -j $NP
+ time ctest -R "all-headers|multigrid/transfer" --output-on-failure -DDESCRIPTION="CI-$JOB_NAME" -j $NP --no-compress-output -T test
'''
}
}
+
}
}
@@ -145,6 +204,8 @@ pipeline
steps
{
githubNotify context: 'CI', description: 'OK', status: 'SUCCESS'
+ // In case the Jenkinsfile.mark job started after we did, make sure we don't leave a pending status around:
+ githubNotify context: 'ready', description: ':-)', status: 'SUCCESS'
}
}
}
diff --git a/README.md b/README.md
index 733e289308c7..ba69add07c7f 100644
--- a/README.md
+++ b/README.md
@@ -45,7 +45,7 @@ on how to set this up.
License:
--------
-Please see the file [./LICENSE](LICENSE) for details
+Please see the file [./LICENSE.md](LICENSE.md) for details
Further information:
--------------------
diff --git a/bundled/.clang-tidy b/bundled/.clang-tidy
new file mode 100644
index 000000000000..eb40a98a77a0
--- /dev/null
+++ b/bundled/.clang-tidy
@@ -0,0 +1,6 @@
+# Disable all checks because we don't want to fix things inside
+# bundled/. Clang-tidy fails if nothing is chosen, so we just enable a single
+# harmless warning:
+
+Checks: "-*,misc-sizeof-container"
+
diff --git a/bundled/boost-1.62.0/include/boost/algorithm/cxx14/equal.hpp b/bundled/boost-1.62.0/include/boost/algorithm/cxx14/equal.hpp
index f1539f885cee..9f97be1d626d 100644
--- a/bundled/boost-1.62.0/include/boost/algorithm/cxx14/equal.hpp
+++ b/bundled/boost-1.62.0/include/boost/algorithm/cxx14/equal.hpp
@@ -13,7 +13,6 @@
#define BOOST_ALGORITHM_EQUAL_HPP
#include // for std::equal
-#include // for std::binary_function
#include
namespace boost { namespace algorithm {
@@ -21,7 +20,7 @@ namespace boost { namespace algorithm {
namespace detail {
template
- struct eq : public std::binary_function {
+ struct eq {
bool operator () ( const T1& v1, const T2& v2 ) const { return v1 == v2 ;}
};
diff --git a/bundled/boost-1.62.0/include/boost/algorithm/string/detail/case_conv.hpp b/bundled/boost-1.62.0/include/boost/algorithm/string/detail/case_conv.hpp
index 42621c74f06e..233912ca0f20 100644
--- a/bundled/boost-1.62.0/include/boost/algorithm/string/detail/case_conv.hpp
+++ b/bundled/boost-1.62.0/include/boost/algorithm/string/detail/case_conv.hpp
@@ -30,8 +30,10 @@ namespace boost {
// a tolower functor
template
- struct to_lowerF : public std::unary_function
+ struct to_lowerF
{
+ typedef CharT argument_type;
+ typedef CharT result_type;
// Constructor
to_lowerF( const std::locale& Loc ) : m_Loc( &Loc ) {}
@@ -50,8 +52,10 @@ namespace boost {
// a toupper functor
template
- struct to_upperF : public std::unary_function
+ struct to_upperF
{
+ typedef CharT argument_type;
+ typedef CharT result_type;
// Constructor
to_upperF( const std::locale& Loc ) : m_Loc( &Loc ) {}
diff --git a/bundled/boost-1.62.0/include/boost/algorithm/string/detail/util.hpp b/bundled/boost-1.62.0/include/boost/algorithm/string/detail/util.hpp
index cf4a8b1c8cd4..7844b6723cc6 100644
--- a/bundled/boost-1.62.0/include/boost/algorithm/string/detail/util.hpp
+++ b/bundled/boost-1.62.0/include/boost/algorithm/string/detail/util.hpp
@@ -89,9 +89,10 @@ namespace boost {
template<
typename SeqT,
typename IteratorT=BOOST_STRING_TYPENAME SeqT::const_iterator >
- struct copy_iterator_rangeF :
- public std::unary_function< iterator_range, SeqT >
+ struct copy_iterator_rangeF
{
+ typedef iterator_range argument_type;
+ typedef SeqT result_type;
SeqT operator()( const iterator_range& Range ) const
{
return copy_range(Range);
diff --git a/bundled/boost-1.62.0/include/boost/config/compiler/visualc.hpp b/bundled/boost-1.62.0/include/boost/config/compiler/visualc.hpp
index 200d39df5e13..3a68c4d6bd9f 100644
--- a/bundled/boost-1.62.0/include/boost/config/compiler/visualc.hpp
+++ b/bundled/boost-1.62.0/include/boost/config/compiler/visualc.hpp
@@ -294,10 +294,10 @@
#endif
//
-// masterleinad: upgrade supported MSVC version to 19.16 (last checked 19.16.27024.1)
+// masterleinad: upgrade supported MSVC version to 19.20 (last checked 19.20.27404)
// Boost repo has only 19.10:
// last known and checked version is 19.10.24629 (VC++ 2017 RC):
-#if (_MSC_VER > 1916)
+#if (_MSC_VER > 1920)
# if defined(BOOST_ASSERT_CONFIG)
# error "Unknown compiler version - please run the configure tests and report the results"
# else
diff --git a/bundled/boost-1.62.0/include/boost/config/stdlib/dinkumware.hpp b/bundled/boost-1.62.0/include/boost/config/stdlib/dinkumware.hpp
index af8ddda528da..293a17675f5c 100644
--- a/bundled/boost-1.62.0/include/boost/config/stdlib/dinkumware.hpp
+++ b/bundled/boost-1.62.0/include/boost/config/stdlib/dinkumware.hpp
@@ -180,7 +180,7 @@
// If _HAS_AUTO_PTR_ETC is defined to 0, std::auto_ptr is not available.
// See https://www.visualstudio.com/en-us/news/vs2015-vs.aspx#C++
// and http://blogs.msdn.com/b/vcblog/archive/2015/06/19/c-11-14-17-features-in-vs-2015-rtm.aspx
-# if defined(_HAS_AUTO_PTR_ETC) && (_HAS_AUTO_PTR_ETC == 0)
+# if defined(_HAS_AUTO_PTR_ETC) && (_HAS_AUTO_PTR_ETC == 0) && !defined BOOST_NO_AUTO_PTR
# define BOOST_NO_AUTO_PTR
# endif
#endif
diff --git a/bundled/boost-1.62.0/include/boost/function/function_template.hpp b/bundled/boost-1.62.0/include/boost/function/function_template.hpp
index 82c81d769733..7984c8323fed 100644
--- a/bundled/boost-1.62.0/include/boost/function/function_template.hpp
+++ b/bundled/boost-1.62.0/include/boost/function/function_template.hpp
@@ -656,17 +656,6 @@ namespace boost {
BOOST_FUNCTION_TEMPLATE_PARMS
>
class BOOST_FUNCTION_FUNCTION : public function_base
-
-#if BOOST_FUNCTION_NUM_ARGS == 1
-
- , public std::unary_function
-
-#elif BOOST_FUNCTION_NUM_ARGS == 2
-
- , public std::binary_function
-
-#endif
-
{
public:
#ifndef BOOST_NO_VOID_RETURNS
diff --git a/bundled/boost-1.62.0/include/boost/functional.hpp b/bundled/boost-1.62.0/include/boost/functional.hpp
index b618485c102e..731e4e1f1c97 100644
--- a/bundled/boost-1.62.0/include/boost/functional.hpp
+++ b/bundled/boost-1.62.0/include/boost/functional.hpp
@@ -18,6 +18,36 @@
namespace boost
{
+ namespace functional
+ {
+ namespace detail {
+#if defined(_HAS_AUTO_PTR_ETC) && !_HAS_AUTO_PTR_ETC
+ // std::unary_function and std::binary_function were both removed
+ // in C++17.
+
+ template
+ struct unary_function
+ {
+ typedef Arg1 argument_type;
+ typedef Result result_type;
+ };
+
+ template
+ struct binary_function
+ {
+ typedef Arg1 first_argument_type;
+ typedef Arg2 second_argument_type;
+ typedef Result result_type;
+ };
+#else
+ // Use the standard objects when we have them.
+
+ using std::unary_function;
+ using std::binary_function;
+#endif
+ }
+ }
+
#ifndef BOOST_NO_TEMPLATE_PARTIAL_SPECIALIZATION
// --------------------------------------------------------------------------
// The following traits classes allow us to avoid the need for ptr_fun
@@ -147,7 +177,7 @@ namespace boost
// --------------------------------------------------------------------------
template
class unary_negate
- : public std::unary_function::argument_type,bool>
+ : public boost::functional::detail::unary_function::argument_type,bool>
{
public:
explicit unary_negate(typename unary_traits::param_type x)
@@ -181,7 +211,7 @@ namespace boost
// --------------------------------------------------------------------------
template
class binary_negate
- : public std::binary_function::first_argument_type,
+ : public boost::functional::detail::binary_function::first_argument_type,
typename binary_traits::second_argument_type,
bool>
{
@@ -218,7 +248,7 @@ namespace boost
// --------------------------------------------------------------------------
template
class binder1st
- : public std::unary_function::second_argument_type,
+ : public boost::functional::detail::unary_function::second_argument_type,
typename binary_traits::result_type>
{
public:
@@ -264,7 +294,7 @@ namespace boost
// --------------------------------------------------------------------------
template
class binder2nd
- : public std::unary_function::first_argument_type,
+ : public boost::functional::detail::unary_function::first_argument_type,
typename binary_traits::result_type>
{
public:
@@ -309,7 +339,7 @@ namespace boost
// mem_fun, etc
// --------------------------------------------------------------------------
template
- class mem_fun_t : public std::unary_function
+ class mem_fun_t : public boost::functional::detail::unary_function
{
public:
explicit mem_fun_t(S (T::*p)())
@@ -325,7 +355,7 @@ namespace boost
};
template
- class mem_fun1_t : public std::binary_function
+ class mem_fun1_t : public boost::functional::detail::binary_function
{
public:
explicit mem_fun1_t(S (T::*p)(A))
@@ -341,7 +371,7 @@ namespace boost
};
template
- class const_mem_fun_t : public std::unary_function
+ class const_mem_fun_t : public boost::functional::detail::unary_function
{
public:
explicit const_mem_fun_t(S (T::*p)() const)
@@ -357,7 +387,7 @@ namespace boost
};
template
- class const_mem_fun1_t : public std::binary_function
+ class const_mem_fun1_t : public boost::functional::detail::binary_function
{
public:
explicit const_mem_fun1_t(S (T::*p)(A) const)
@@ -402,7 +432,7 @@ namespace boost
// mem_fun_ref, etc
// --------------------------------------------------------------------------
template
- class mem_fun_ref_t : public std::unary_function
+ class mem_fun_ref_t : public boost::functional::detail::unary_function
{
public:
explicit mem_fun_ref_t(S (T::*p)())
@@ -418,7 +448,7 @@ namespace boost
};
template
- class mem_fun1_ref_t : public std::binary_function
+ class mem_fun1_ref_t : public boost::functional::detail::binary_function
{
public:
explicit mem_fun1_ref_t(S (T::*p)(A))
@@ -434,7 +464,7 @@ namespace boost
};
template
- class const_mem_fun_ref_t : public std::unary_function
+ class const_mem_fun_ref_t : public boost::functional::detail::unary_function
{
public:
explicit const_mem_fun_ref_t(S (T::*p)() const)
@@ -451,7 +481,7 @@ namespace boost
};
template
- class const_mem_fun1_ref_t : public std::binary_function
+ class const_mem_fun1_ref_t : public boost::functional::detail::binary_function
{
public:
explicit const_mem_fun1_ref_t(S (T::*p)(A) const)
@@ -497,7 +527,7 @@ namespace boost
// ptr_fun
// --------------------------------------------------------------------------
template
- class pointer_to_unary_function : public std::unary_function
+ class pointer_to_unary_function : public boost::functional::detail::unary_function
{
public:
explicit pointer_to_unary_function(Result (*f)(Arg))
@@ -521,7 +551,7 @@ namespace boost
}
template
- class pointer_to_binary_function : public std::binary_function
+ class pointer_to_binary_function : public boost::functional::detail::binary_function
{
public:
explicit pointer_to_binary_function(Result (*f)(Arg1, Arg2))
diff --git a/bundled/boost-1.62.0/include/boost/functional/hash/extensions.hpp b/bundled/boost-1.62.0/include/boost/functional/hash/extensions.hpp
index eafaefe85dce..9f8fe4d65da1 100644
--- a/bundled/boost-1.62.0/include/boost/functional/hash/extensions.hpp
+++ b/bundled/boost-1.62.0/include/boost/functional/hash/extensions.hpp
@@ -254,8 +254,9 @@ namespace boost
#if !defined(BOOST_NO_TEMPLATE_PARTIAL_SPECIALIZATION)
template struct hash
- : std::unary_function
{
+ typedef T argument_type;
+ typedef std::size_t result_type;
#if !defined(BOOST_NO_FUNCTION_TEMPLATE_ORDERING)
std::size_t operator()(T const& val) const
{
@@ -271,8 +272,9 @@ namespace boost
#if BOOST_WORKAROUND(__DMC__, <= 0x848)
template struct hash
- : std::unary_function
{
+ typedef T[n] argument_type;
+ typedef std::size_t result_type;
std::size_t operator()(const T* val) const
{
return boost::hash_range(val, val+n);
@@ -296,8 +298,9 @@ namespace boost
{
template
struct inner
- : std::unary_function
{
+ typedef T argument_type;
+ typedef std::size_t result_type;
#if !defined(BOOST_NO_FUNCTION_TEMPLATE_ORDERING)
std::size_t operator()(T const& val) const
{
diff --git a/bundled/boost-1.62.0/include/boost/functional/hash/hash.hpp b/bundled/boost-1.62.0/include/boost/functional/hash/hash.hpp
index 0a8ceeb4742e..56941c964116 100644
--- a/bundled/boost-1.62.0/include/boost/functional/hash/hash.hpp
+++ b/bundled/boost-1.62.0/include/boost/functional/hash/hash.hpp
@@ -419,8 +419,9 @@ namespace boost
#define BOOST_HASH_SPECIALIZE(type) \
template <> struct hash \
- : public std::unary_function \
{ \
+ typedef type argument_type; \
+ typedef std::size_t result_type; \
std::size_t operator()(type v) const \
{ \
return boost::hash_value(v); \
@@ -429,8 +430,9 @@ namespace boost
#define BOOST_HASH_SPECIALIZE_REF(type) \
template <> struct hash \
- : public std::unary_function \
{ \
+ typedef type argument_type; \
+ typedef std::size_t result_type; \
std::size_t operator()(type const& v) const \
{ \
return boost::hash_value(v); \
@@ -483,8 +485,9 @@ namespace boost
template
struct hash
- : public std::unary_function
{
+ typedef T* argument_type;
+ typedef std::size_t result_type;
std::size_t operator()(T* v) const
{
#if !BOOST_WORKAROUND(__SUNPRO_CC, <= 0x590)
@@ -516,8 +519,9 @@ namespace boost
{
template
struct inner
- : public std::unary_function
{
+ typedef T argument_type;
+ typedef std::size_t result_type;
std::size_t operator()(T val) const
{
#if !BOOST_WORKAROUND(__SUNPRO_CC, <= 590)
diff --git a/bundled/boost-1.62.0/include/boost/graph/astar_search.hpp b/bundled/boost-1.62.0/include/boost/graph/astar_search.hpp
index 435ccf03b53a..95795f27c2d3 100644
--- a/bundled/boost-1.62.0/include/boost/graph/astar_search.hpp
+++ b/bundled/boost-1.62.0/include/boost/graph/astar_search.hpp
@@ -46,11 +46,12 @@ namespace boost {
template
- class astar_heuristic : public std::unary_function<
- typename graph_traits::vertex_descriptor, CostType>
+ class astar_heuristic
{
public:
typedef typename graph_traits::vertex_descriptor Vertex;
+ typedef Vertex argument_type;
+ typedef CostType result_type;
astar_heuristic() {}
CostType operator()(Vertex u) { return static_cast(0); }
};
diff --git a/bundled/boost-1.62.0/include/boost/graph/detail/geodesic.hpp b/bundled/boost-1.62.0/include/boost/graph/detail/geodesic.hpp
index adcb17f0ae64..0f2f2024fa2a 100644
--- a/bundled/boost-1.62.0/include/boost/graph/detail/geodesic.hpp
+++ b/bundled/boost-1.62.0/include/boost/graph/detail/geodesic.hpp
@@ -82,8 +82,11 @@ namespace detail {
// Similar to std::plus, but maximizes parameters
// rather than adding them.
template
- struct maximize : public std::binary_function
+ struct maximize
{
+ typedef T result_type;
+ typedef T first_argument_type;
+ typedef T second_argument_type;
T operator ()(T x, T y) const
{ BOOST_USING_STD_MAX(); return max BOOST_PREVENT_MACRO_SUBSTITUTION (x, y); }
};
@@ -93,11 +96,10 @@ namespace detail {
// types, but should be specialized for those types that have
// discrete notions of reciprocals.
template
- struct reciprocal : public std::unary_function
+ struct reciprocal
{
- typedef std::unary_function function_type;
- typedef typename function_type::result_type result_type;
- typedef typename function_type::argument_type argument_type;
+ typedef T result_type;
+ typedef T first_argument_type;
T operator ()(T t)
{ return T(1) / t; }
};
diff --git a/bundled/boost-1.62.0/include/boost/graph/distributed/crauser_et_al_shortest_paths.hpp b/bundled/boost-1.62.0/include/boost/graph/distributed/crauser_et_al_shortest_paths.hpp
index 060cbf9cabe9..70090ce4cfbe 100644
--- a/bundled/boost-1.62.0/include/boost/graph/distributed/crauser_et_al_shortest_paths.hpp
+++ b/bundled/boost-1.62.0/include/boost/graph/distributed/crauser_et_al_shortest_paths.hpp
@@ -95,8 +95,10 @@ namespace detail {
template
struct min_in_distance_compare
- : std::binary_function
{
+ typedef Vertex first_argument_type;
+ typedef Vertex second_argument_type;
+ typedef bool result_type;
min_in_distance_compare(DistanceMap d, MinInWeightMap m,
Combine combine, Compare compare)
: distance_map(d), min_in_weight(m), combine(combine),
@@ -119,9 +121,11 @@ namespace detail {
template
- struct min_out_distance_compare
- : std::binary_function
+ struct min_out_distance_compares
{
+ typedef Vertex first_argument_type;
+ typedef Vertex second_argument_type;
+ typedef bool result_type;
min_out_distance_compare(DistanceMap d, MinOutWeightMap m,
Combine combine, Compare compare)
: distance_map(d), min_out_weight(m), combine(combine),
diff --git a/bundled/boost-1.62.0/include/boost/graph/parallel/algorithm.hpp b/bundled/boost-1.62.0/include/boost/graph/parallel/algorithm.hpp
index eed9bf8769a0..21ad7cc29588 100644
--- a/bundled/boost-1.62.0/include/boost/graph/parallel/algorithm.hpp
+++ b/bundled/boost-1.62.0/include/boost/graph/parallel/algorithm.hpp
@@ -26,20 +26,29 @@ namespace boost { namespace parallel {
};
template
- struct minimum : std::binary_function
+ struct minimum
{
+ typedef T first_argument_type;
+ typedef T second_argument_type;
+ typedef T result_type;
const T& operator()(const T& x, const T& y) const { return x < y? x : y; }
};
template
- struct maximum : std::binary_function
+ struct maximum
{
+ typedef T first_argument_type;
+ typedef T second_argument_type;
+ typedef T result_type;
const T& operator()(const T& x, const T& y) const { return x < y? y : x; }
};
template
- struct sum : std::binary_function
+ struct sum
{
+ typedef T first_argument_type;
+ typedef T second_argument_type;
+ typedef T result_type;
const T operator()(const T& x, const T& y) const { return x + y; }
};
diff --git a/bundled/boost-1.62.0/include/boost/graph/transitive_closure.hpp b/bundled/boost-1.62.0/include/boost/graph/transitive_closure.hpp
index 4f81349bf2e1..c8c1629f455a 100644
--- a/bundled/boost-1.62.0/include/boost/graph/transitive_closure.hpp
+++ b/bundled/boost-1.62.0/include/boost/graph/transitive_closure.hpp
@@ -41,8 +41,9 @@ namespace boost
{
template < typename TheContainer, typename ST = std::size_t,
typename VT = typename TheContainer::value_type >
- struct subscript_t:public std::unary_function < ST, VT >
+ struct subscript_t
{
+ typedef ST& argument_type;
typedef VT& result_type;
subscript_t(TheContainer & c):container(&c)
diff --git a/bundled/boost-1.62.0/include/boost/iostreams/chain.hpp b/bundled/boost-1.62.0/include/boost/iostreams/chain.hpp
index bc0fab58ea12..78434f393aed 100644
--- a/bundled/boost-1.62.0/include/boost/iostreams/chain.hpp
+++ b/bundled/boost-1.62.0/include/boost/iostreams/chain.hpp
@@ -14,7 +14,6 @@
#include
#include
-#include // unary_function.
#include // advance.
#include
#include // allocator, auto_ptr.
@@ -286,7 +285,9 @@ class chain_base {
static void set_auto_close(streambuf_type* b, bool close)
{ b->set_auto_close(close); }
- struct closer : public std::unary_function {
+ struct closer {
+ typedef streambuf_type* argument_type;
+ typedef void result_type;
closer(BOOST_IOS::openmode m) : mode_(m) { }
void operator() (streambuf_type* b)
{
diff --git a/bundled/boost-1.62.0/include/boost/numeric/conversion/detail/converter.hpp b/bundled/boost-1.62.0/include/boost/numeric/conversion/detail/converter.hpp
index 10550f8daaed..2884e84e8d8a 100644
--- a/bundled/boost-1.62.0/include/boost/numeric/conversion/detail/converter.hpp
+++ b/bundled/boost-1.62.0/include/boost/numeric/conversion/detail/converter.hpp
@@ -450,10 +450,7 @@ namespace boost { namespace numeric { namespace convdetail
// Trivial Converter : used when (cv-unqualified) T == (cv-unqualified) S
//
template
- struct trivial_converter_impl : public std::unary_function< BOOST_DEDUCED_TYPENAME Traits::argument_type
- ,BOOST_DEDUCED_TYPENAME Traits::result_type
- >
- ,public dummy_range_checker
+ struct trivial_converter_impl : public dummy_range_checker
{
typedef Traits traits ;
@@ -471,10 +468,7 @@ namespace boost { namespace numeric { namespace convdetail
// Rounding Converter : used for float to integral conversions.
//
template
- struct rounding_converter : public std::unary_function< BOOST_DEDUCED_TYPENAME Traits::argument_type
- ,BOOST_DEDUCED_TYPENAME Traits::result_type
- >
- ,public RangeChecker
+ struct rounding_converter : public RangeChecker
,public Float2IntRounder
,public RawConverter
{
@@ -501,10 +495,7 @@ namespace boost { namespace numeric { namespace convdetail
// Non-Rounding Converter : used for all other conversions.
//
template
- struct non_rounding_converter : public std::unary_function< BOOST_DEDUCED_TYPENAME Traits::argument_type
- ,BOOST_DEDUCED_TYPENAME Traits::result_type
- >
- ,public RangeChecker
+ struct non_rounding_converter : public RangeChecker
,public RawConverter
{
typedef RangeChecker RangeCheckerBase ;
diff --git a/bundled/boost-1.62.0/include/boost/signals2/detail/signal_template.hpp b/bundled/boost-1.62.0/include/boost/signals2/detail/signal_template.hpp
index fb7591bbc14c..fa19499d871e 100644
--- a/bundled/boost-1.62.0/include/boost/signals2/detail/signal_template.hpp
+++ b/bundled/boost-1.62.0/include/boost/signals2/detail/signal_template.hpp
@@ -599,7 +599,6 @@ namespace boost
class BOOST_SIGNALS2_SIGNAL_CLASS_NAME(BOOST_SIGNALS2_NUM_ARGS)
BOOST_SIGNALS2_SIGNAL_TEMPLATE_SPECIALIZATION: public signal_base,
public detail::BOOST_SIGNALS2_STD_FUNCTIONAL_BASE
- (typename detail::result_type_wrapper::type)
{
typedef detail::BOOST_SIGNALS2_SIGNAL_IMPL_CLASS_NAME(BOOST_SIGNALS2_NUM_ARGS)
impl_class;
diff --git a/bundled/boost-1.62.0/include/boost/signals2/detail/signals_common_macros.hpp b/bundled/boost-1.62.0/include/boost/signals2/detail/signals_common_macros.hpp
index 4ca440382750..acc09362814d 100644
--- a/bundled/boost-1.62.0/include/boost/signals2/detail/signals_common_macros.hpp
+++ b/bundled/boost-1.62.0/include/boost/signals2/detail/signals_common_macros.hpp
@@ -137,7 +137,7 @@
#define BOOST_SIGNALS2_SIGNAL_TEMPLATE_SPECIALIZATION_DECL(arity) BOOST_SIGNALS2_SIGNAL_TEMPLATE_DECL(arity)
#define BOOST_SIGNALS2_SIGNAL_TEMPLATE_SPECIALIZATION
-#define BOOST_SIGNALS2_STD_FUNCTIONAL_BASE(result_type) std_functional_base
+#define BOOST_SIGNALS2_STD_FUNCTIONAL_BASE std_functional_base
#define BOOST_SIGNALS2_PP_COMMA_IF(arity) BOOST_PP_COMMA_IF(arity)
@@ -205,8 +205,8 @@
ExtendedSlotFunction, \
Mutex>
-#define BOOST_SIGNALS2_STD_FUNCTIONAL_BASE(result_type) \
- std_functional_base
+#define BOOST_SIGNALS2_STD_FUNCTIONAL_BASE \
+ std_functional_base
#define BOOST_SIGNALS2_PP_COMMA_IF(arity) ,
diff --git a/bundled/boost-1.62.0/include/boost/signals2/detail/slot_template.hpp b/bundled/boost-1.62.0/include/boost/signals2/detail/slot_template.hpp
index fc19f5139cc4..1c17c5b76aff 100644
--- a/bundled/boost-1.62.0/include/boost/signals2/detail/slot_template.hpp
+++ b/bundled/boost-1.62.0/include/boost/signals2/detail/slot_template.hpp
@@ -35,7 +35,7 @@ namespace boost
template
class BOOST_SIGNALS2_SLOT_CLASS_NAME(BOOST_SIGNALS2_NUM_ARGS) BOOST_SIGNALS2_SLOT_TEMPLATE_SPECIALIZATION
- : public slot_base, public detail::BOOST_SIGNALS2_STD_FUNCTIONAL_BASE(R)
+ : public slot_base, public detail::BOOST_SIGNALS2_STD_FUNCTIONAL_BASE
{
public:
diff --git a/bundled/boost-1.62.0/include/boost/signals2/detail/variadic_arg_type.hpp b/bundled/boost-1.62.0/include/boost/signals2/detail/variadic_arg_type.hpp
index 14d54b2e3ed2..db9e81c2f3d6 100644
--- a/bundled/boost-1.62.0/include/boost/signals2/detail/variadic_arg_type.hpp
+++ b/bundled/boost-1.62.0/include/boost/signals2/detail/variadic_arg_type.hpp
@@ -32,15 +32,20 @@ namespace boost
typedef typename variadic_arg_type::type type;
};
- template
+ template
struct std_functional_base
{};
- template
- struct std_functional_base: public std::unary_function
- {};
- template
- struct std_functional_base: public std::binary_function
- {};
+ template
+ struct std_functional_base
+ {
+ typedef T1 argument_type;
+ };
+ template
+ struct std_functional_base
+ {
+ typedef T1 first_argument_type;
+ typedef T2 second_argument_type;
+ };
} // namespace detail
} // namespace signals2
} // namespace boost
diff --git a/bundled/boost-1.62.0/include/boost/smart_ptr/owner_less.hpp b/bundled/boost-1.62.0/include/boost/smart_ptr/owner_less.hpp
index 6899325bd61e..88c3c226d0b9 100644
--- a/bundled/boost-1.62.0/include/boost/smart_ptr/owner_less.hpp
+++ b/bundled/boost-1.62.0/include/boost/smart_ptr/owner_less.hpp
@@ -5,53 +5,30 @@
// owner_less.hpp
//
// Copyright (c) 2008 Frank Mori Hess
+// Copyright (c) 2016 Peter Dimov
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
-// See http://www.boost.org/libs/smart_ptr/smart_ptr.htm for documentation.
+// See http://www.boost.org/libs/smart_ptr/ for documentation.
//
-#include
-
namespace boost
{
- template class shared_ptr;
- template class weak_ptr;
-
- namespace detail
- {
- template
- struct generic_owner_less : public std::binary_function
- {
- bool operator()(const T &lhs, const T &rhs) const
- {
- return lhs.owner_before(rhs);
- }
- bool operator()(const T &lhs, const U &rhs) const
- {
- return lhs.owner_before(rhs);
- }
- bool operator()(const U &lhs, const T &rhs) const
- {
- return lhs.owner_before(rhs);
- }
- };
- } // namespace detail
- template struct owner_less;
-
- template
- struct owner_less >:
- public detail::generic_owner_less, weak_ptr >
- {};
+template struct owner_less
+{
+ typedef bool result_type;
+ typedef T first_argument_type;
+ typedef T second_argument_type;
- template
- struct owner_less >:
- public detail::generic_owner_less, shared_ptr >
- {};
+ template bool operator()( U const & u, V const & v ) const
+ {
+ return u.owner_before( v );
+ }
+};
} // namespace boost
-#endif // #ifndef BOOST_SMART_PTR_OWNER_LESS_HPP_INCLUDED
+#endif // #ifndef BOOST_SMART_PTR_OWNER_LESS_HPP_INCLUDED
\ No newline at end of file
diff --git a/bundled/boost-1.62.0/include/boost/typeof/std/functional.hpp b/bundled/boost-1.62.0/include/boost/typeof/std/functional.hpp
index f1b157764e4e..043a2ba27a08 100644
--- a/bundled/boost-1.62.0/include/boost/typeof/std/functional.hpp
+++ b/bundled/boost-1.62.0/include/boost/typeof/std/functional.hpp
@@ -10,8 +10,10 @@
#include BOOST_TYPEOF_INCREMENT_REGISTRATION_GROUP()
+#ifndef BOOST_NO_CXX98_FUNCTION_BASE
BOOST_TYPEOF_REGISTER_TEMPLATE(std::unary_function, 2)
BOOST_TYPEOF_REGISTER_TEMPLATE(std::binary_function, 3)
+#endif//BOOST_NO_CXX98_FUNCTION_BASE
BOOST_TYPEOF_REGISTER_TEMPLATE(std::plus, 1)
BOOST_TYPEOF_REGISTER_TEMPLATE(std::minus, 1)
BOOST_TYPEOF_REGISTER_TEMPLATE(std::multiplies, 1)
diff --git a/bundled/boost-1.62.0/include/boost/utility/compare_pointees.hpp b/bundled/boost-1.62.0/include/boost/utility/compare_pointees.hpp
index 7e2515c6eea2..7914370f1fad 100644
--- a/bundled/boost-1.62.0/include/boost/utility/compare_pointees.hpp
+++ b/bundled/boost-1.62.0/include/boost/utility/compare_pointees.hpp
@@ -33,14 +33,17 @@ bool equal_pointees ( OptionalPointee const& x, OptionalPointee const& y )
}
template
-struct equal_pointees_t : std::binary_function
+struct equal_pointees_t
{
+ typedef bool result_type;
+ typedef OptionalPointee first_argument_type;
+ typedef OptionalPointee second_argument_type;
+
bool operator() ( OptionalPointee const& x, OptionalPointee const& y ) const
{ return equal_pointees(x,y) ; }
} ;
// template bool less_pointees(OP const& x, OP const& y);
-// template struct less_pointees_t;
//
// Being OP a model of OptionalPointee (either a pointer or an optional):
//
@@ -56,8 +59,12 @@ bool less_pointees ( OptionalPointee const& x, OptionalPointee const& y )
}
template
-struct less_pointees_t : std::binary_function
+struct less_pointees_t
{
+ typedef bool result_type;
+ typedef OptionalPointee first_argument_type;
+ typedef OptionalPointee second_argument_type;
+
bool operator() ( OptionalPointee const& x, OptionalPointee const& y ) const
{ return less_pointees(x,y) ; }
} ;
diff --git a/cmake/checks/check_01_cpu_features.cmake b/cmake/checks/check_01_cpu_features.cmake
index f69c1ee268f5..c86145d6feff 100644
--- a/cmake/checks/check_01_cpu_features.cmake
+++ b/cmake/checks/check_01_cpu_features.cmake
@@ -28,8 +28,8 @@
# DEAL_II_HAVE_AVX *)
# DEAL_II_HAVE_AVX512 *)
# DEAL_II_HAVE_ALTIVEC *)
-# DEAL_II_COMPILER_VECTORIZATION_LEVEL
# DEAL_II_HAVE_OPENMP_SIMD *)
+# DEAL_II_COMPILER_VECTORIZATION_LEVEL
# DEAL_II_OPENMP_SIMD_PRAGMA
#
# *)
@@ -245,7 +245,36 @@ IF(DEAL_II_ALLOW_PLATFORM_INTROSPECTION)
}
"
DEAL_II_HAVE_ALTIVEC)
-ENDIF()
+
+ #
+ # OpenMP 4.0 can be used for vectorization. Only the vectorization
+ # instructions are allowed, the threading must be done through TBB.
+ #
+
+ #
+ # Choosing the right compiler flag is a bit of a mess:
+ #
+ IF(CMAKE_CXX_COMPILER_ID MATCHES "Intel")
+ IF("${CMAKE_CXX_COMPILER_VERSION}" VERSION_GREATER "15" )
+ SET(_keyword "qopenmp")
+ ELSEIF("${CMAKE_CXX_COMPILER_VERSION}" VERSION_GREATER "14" )
+ SET(_keyword "openmp")
+ ENDIF()
+ ELSEIF(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
+ SET(_keyword "openmp")
+ ELSE()
+ SET(_keyword "fopenmp")
+ ENDIF()
+
+ CHECK_CXX_COMPILER_FLAG("-${_keyword}-simd" DEAL_II_HAVE_OPENMP_SIMD)
+
+ENDIF() # IF DEAL_II_ALLOW_PLATFORM_INTROSPECTION
+
+
+#
+# Choose DEAL_II_COMPILER_VECTORIZATION level depending on AVX support
+# (that was autodetected or manually specified).
+#
IF(DEAL_II_HAVE_AVX512)
SET(DEAL_II_COMPILER_VECTORIZATION_LEVEL 3)
@@ -263,26 +292,9 @@ ENDIF()
#
-# OpenMP 4.0 can be used for vectorization. Only the vectorization
-# instructions are allowed, the threading must be done through TBB.
-#
-
+# If we have OpenMP SIMD support (i.e. DEAL_II_HAVE_OPENMP_SIMD is true)
+# populate DEAL_II_OPENMP_SIMD_PRAGMA.
#
-# Choosing the right compiler flag is a bit of a mess:
-#
-IF(CMAKE_CXX_COMPILER_ID MATCHES "Intel")
- IF("${CMAKE_CXX_COMPILER_VERSION}" VERSION_GREATER "15" )
- SET(_keyword "qopenmp")
- ELSEIF("${CMAKE_CXX_COMPILER_VERSION}" VERSION_GREATER "14" )
- SET(_keyword "openmp")
- ENDIF()
-ELSEIF(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
- SET(_keyword "openmp")
-ELSE()
- SET(_keyword "fopenmp")
-ENDIF()
-
-CHECK_CXX_COMPILER_FLAG("-${_keyword}-simd" DEAL_II_HAVE_OPENMP_SIMD)
SET(DEAL_II_OPENMP_SIMD_PRAGMA " ")
IF(DEAL_II_HAVE_OPENMP_SIMD)
diff --git a/cmake/checks/check_01_cxx_features.cmake b/cmake/checks/check_01_cxx_features.cmake
index bf1aedec3bee..d1ec609daaf0 100644
--- a/cmake/checks/check_01_cxx_features.cmake
+++ b/cmake/checks/check_01_cxx_features.cmake
@@ -32,6 +32,17 @@
#
+#
+# MSVC needs different compiler flags to turn warnings into errors
+# additionally a suitable exception handling model is required
+#
+IF(CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
+ SET(_werror_flag "/WX /EHsc")
+ELSE()
+ SET(_werror_flag "-Werror")
+ENDIF()
+
+
########################################################################
# #
# C++ Version Support: #
@@ -99,9 +110,16 @@ ENDIF()
MACRO(_check_cxx_flag _suffix)
IF("${DEAL_II_CXX_VERSION_FLAG}" STREQUAL "")
- CHECK_CXX_COMPILER_FLAG("-std=c++${_suffix}" DEAL_II_HAVE_FLAG_stdcxx${_suffix})
- IF(DEAL_II_HAVE_FLAG_stdcxx${_suffix})
- SET(DEAL_II_CXX_VERSION_FLAG "-std=c++${_suffix}")
+ IF(CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
+ CHECK_CXX_COMPILER_FLAG("/std:c++${_suffix}" DEAL_II_HAVE_FLAG_stdcxx${_suffix})
+ IF(DEAL_II_HAVE_FLAG_stdcxx${_suffix})
+ SET(DEAL_II_CXX_VERSION_FLAG "/std:c++${_suffix}")
+ ENDIF()
+ ELSE()
+ CHECK_CXX_COMPILER_FLAG("-std=c++${_suffix}" DEAL_II_HAVE_FLAG_stdcxx${_suffix})
+ IF(DEAL_II_HAVE_FLAG_stdcxx${_suffix})
+ SET(DEAL_II_CXX_VERSION_FLAG "-std=c++${_suffix}")
+ ENDIF()
ENDIF()
ENDIF()
ENDMACRO()
@@ -126,7 +144,7 @@ IF(NOT DEFINED DEAL_II_WITH_CXX17 OR DEAL_II_WITH_CXX17)
IF(NOT "${DEAL_II_CXX_VERSION_FLAG}" STREQUAL "")
# Set CMAKE_REQUIRED_FLAGS for the unit tests
MESSAGE(STATUS "Using C++ version flag \"${DEAL_II_CXX_VERSION_FLAG}\"")
- ADD_FLAGS(CMAKE_REQUIRED_FLAGS "${DEAL_II_CXX_VERSION_FLAG} -Werror")
+ ADD_FLAGS(CMAKE_REQUIRED_FLAGS "${DEAL_II_CXX_VERSION_FLAG} ${_werror_flag}")
UNSET_IF_CHANGED(CHECK_CXX_FEATURES_FLAGS_CXX17_SAVED
"${CMAKE_REQUIRED_FLAGS}${DEAL_II_CXX_VERSION_FLAG}"
@@ -512,6 +530,9 @@ _bailout("17" "1z")
# try to avoid adding an extra flag by doing one last test:
#
RESET_CMAKE_REQUIRED()
+IF(CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
+ ADD_FLAGS(CMAKE_REQUIRED_FLAGS "/Zc:__cplusplus")
+ENDIF()
CHECK_CXX_SOURCE_COMPILES(
"
#include
@@ -527,6 +548,7 @@ CHECK_CXX_SOURCE_COMPILES(
}
"
DEAL_II_COMPILER_DEFAULTS_TO_CXX11_OR_NEWER)
+RESET_CMAKE_REQUIRED()
IF(_user_provided_cxx_version_flag OR
NOT DEAL_II_COMPILER_DEFAULTS_TO_CXX11_OR_NEWER OR
@@ -570,7 +592,10 @@ UNSET_IF_CHANGED(CHECK_CXX_FEATURES_FLAGS_SAVED
# possibilities here.
#
ADD_FLAGS(CMAKE_REQUIRED_FLAGS "${DEAL_II_CXX_FLAGS}")
-ADD_FLAGS(CMAKE_REQUIRED_FLAGS "-Werror -Wno-unused-command-line-argument")
+ADD_FLAGS(CMAKE_REQUIRED_FLAGS "${_werror_flag}")
+IF(NOT CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
+ ADD_FLAGS(CMAKE_REQUIRED_FLAGS "-Wno-unused-command-line-argument")
+ENDIF()
#
# first try the attribute [[fallthrough]]
#
diff --git a/cmake/checks/check_02_compiler_features.cmake b/cmake/checks/check_02_compiler_features.cmake
index 7ac38fe19f3c..32019e2d331c 100644
--- a/cmake/checks/check_02_compiler_features.cmake
+++ b/cmake/checks/check_02_compiler_features.cmake
@@ -48,6 +48,15 @@ UNSET_IF_CHANGED(CHECK_CXX_FEATURES_FLAGS_SAVED
DEAL_II_COMPILER_HAS_ATTRIBUTE_DEPRECATED
)
+#
+# MSVC needs different compiler flags to turn warnings into errors
+# additionally a suitable exception handling model is required
+#
+IF(CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
+ SET(_werror_flag "/WX /EHsc")
+ELSE()
+ SET(_werror_flag "-Werror")
+ENDIF()
#
# Check whether the compiler allows to use arithmetic operations
@@ -298,7 +307,10 @@ ENDIF()
# "warning #1292: unknown attribute "deprecated"" (icc)
# Hence, we treat warnings as errors:
ADD_FLAGS(CMAKE_REQUIRED_FLAGS "${DEAL_II_CXX_FLAGS}")
-ADD_FLAGS(CMAKE_REQUIRED_FLAGS "-Werror -Wno-unused-command-line-argument")
+ADD_FLAGS(CMAKE_REQUIRED_FLAGS "${_werror_flag}")
+IF(NOT CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
+ ADD_FLAGS(CMAKE_REQUIRED_FLAGS "-Wno-unused-command-line-argument")
+ENDIF()
# first see if the compiler accepts the attribute
CHECK_CXX_SOURCE_COMPILES(
@@ -405,7 +417,7 @@ ENDIF()
#
# - Matthias Maier, 2015
#
-ADD_FLAGS(CMAKE_REQUIRED_FLAGS "-Werror")
+ADD_FLAGS(CMAKE_REQUIRED_FLAGS "${_werror_flag}")
CHECK_CXX_SOURCE_COMPILES(
"
_Pragma(\"GCC diagnostic push\")
@@ -420,33 +432,52 @@ RESET_CMAKE_REQUIRED()
#
-# Use the 'gold' linker if possible, given that it's substantially faster.
+# Use 'lld' or the 'gold' linker if possible, given that either of them is
+# substantially faster.
#
-# We have to try to link a full executable with -fuse-ld=gold to check
-# whether "ld.gold" is actually available.
+# We have to try to link a full executable with -fuse-ld=lld or -fuse-ld=gold
+# to check whether "ld.lld" or "ld.gold" is actually available.
#
-# Clang always reports "argument unused during compilation"
-# if "-fuse-ld=" is used, but fails at link time for an unsupported linker.
+# Clang always reports "argument unused during compilation", but fails at link
+# time for an unsupported linker.
#
# ICC also emits a warning but passes for unsupported linkers
# unless we turn diagnostic warnings into errors.
#
# Wolfgang Bangerth, Matthias Maier, Daniel Arndt, 2015, 2018
#
-IF(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
- ADD_FLAGS(CMAKE_REQUIRED_FLAGS "-Wno-unused-command-line-argument")
-ELSEIF(CMAKE_CXX_COMPILER_ID MATCHES "Intel")
- ADD_FLAGS(CMAKE_REQUIRED_FLAGS "-diag-error warn")
-ENDIF()
-ADD_FLAGS(CMAKE_REQUIRED_FLAGS "-Werror -fuse-ld=gold")
-CHECK_CXX_SOURCE_COMPILES(
- "
- int main() { return 0; }
- "
- DEAL_II_COMPILER_HAS_FUSE_LD_GOLD)
-RESET_CMAKE_REQUIRED()
+IF(NOT CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
+ IF(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
+ ADD_FLAGS(CMAKE_REQUIRED_FLAGS "-Wno-unused-command-line-argument")
+ ELSEIF(CMAKE_CXX_COMPILER_ID MATCHES "Intel")
+ ADD_FLAGS(CMAKE_REQUIRED_FLAGS "-diag-error warn")
+ ENDIF()
+ ADD_FLAGS(CMAKE_REQUIRED_FLAGS "-Werror")
+ ADD_FLAGS(CMAKE_REQUIRED_FLAGS "-fuse-ld=lld")
+ CHECK_CXX_SOURCE_COMPILES(
+ "
+ int main() { return 0; }
+ "
+ DEAL_II_COMPILER_HAS_FUSE_LD_LLD)
+ RESET_CMAKE_REQUIRED()
-IF(DEAL_II_COMPILER_HAS_FUSE_LD_GOLD)
- ADD_FLAGS(DEAL_II_LINKER_FLAGS "-fuse-ld=gold")
-ENDIF()
+ IF(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
+ ADD_FLAGS(CMAKE_REQUIRED_FLAGS "-Wno-unused-command-line-argument")
+ ELSEIF(CMAKE_CXX_COMPILER_ID MATCHES "Intel")
+ ADD_FLAGS(CMAKE_REQUIRED_FLAGS "-diag-error warn")
+ ENDIF()
+ ADD_FLAGS(CMAKE_REQUIRED_FLAGS "-Werror")
+ ADD_FLAGS(CMAKE_REQUIRED_FLAGS "-fuse-ld=gold")
+ CHECK_CXX_SOURCE_COMPILES(
+ "
+ int main() { return 0; }
+ "
+ DEAL_II_COMPILER_HAS_FUSE_LD_GOLD)
+ RESET_CMAKE_REQUIRED()
+ IF(DEAL_II_COMPILER_HAS_FUSE_LD_LLD)
+ ADD_FLAGS(DEAL_II_LINKER_FLAGS "-fuse-ld=lld")
+ ELSEIF(DEAL_II_COMPILER_HAS_FUSE_LD_GOLD)
+ ADD_FLAGS(DEAL_II_LINKER_FLAGS "-fuse-ld=gold")
+ ENDIF()
+ENDIF()
diff --git a/cmake/config/template-arguments.in b/cmake/config/template-arguments.in
index 551fdd08d3cc..3370a92a6cbd 100644
--- a/cmake/config/template-arguments.in
+++ b/cmake/config/template-arguments.in
@@ -95,6 +95,10 @@ VECTOR_TYPES := { Vector;
@DEAL_II_EXPAND_TRILINOS_MPI_VECTOR@;
@DEAL_II_EXPAND_EPETRA_VECTOR@;
+ @DEAL_II_EXPAND_TPETRA_VECTOR_DOUBLE@;
+ @DEAL_II_EXPAND_TPETRA_VECTOR_FLOAT@;
+ @DEAL_II_EXPAND_TPETRA_VECTOR_COMPLEX_DOUBLE@;
+ @DEAL_II_EXPAND_TPETRA_VECTOR_COMPLEX_FLOAT@;
@DEAL_II_EXPAND_PETSC_MPI_VECTOR@;
@DEAL_II_EXPAND_TRILINOS_MPI_BLOCKVECTOR@;
@@ -120,6 +124,8 @@ REAL_VECTOR_TYPES := { Vector;
@DEAL_II_EXPAND_TRILINOS_MPI_VECTOR@;
@DEAL_II_EXPAND_EPETRA_VECTOR@;
+ @DEAL_II_EXPAND_TPETRA_VECTOR_DOUBLE@;
+ @DEAL_II_EXPAND_TPETRA_VECTOR_FLOAT@;
@DEAL_II_EXPAND_PETSC_MPI_VECTOR_REAL@;
@DEAL_II_EXPAND_TRILINOS_MPI_BLOCKVECTOR@;
@@ -138,6 +144,8 @@ REAL_NONBLOCK_VECTORS := { Vector;
@DEAL_II_EXPAND_TRILINOS_MPI_VECTOR@;
@DEAL_II_EXPAND_EPETRA_VECTOR@;
+ @DEAL_II_EXPAND_TPETRA_VECTOR_DOUBLE@;
+ @DEAL_II_EXPAND_TPETRA_VECTOR_FLOAT@;
@DEAL_II_EXPAND_PETSC_MPI_VECTOR_REAL@;
}
@@ -145,6 +153,10 @@ REAL_NONBLOCK_VECTORS := { Vector;
EXTERNAL_PARALLEL_VECTORS := { @DEAL_II_EXPAND_TRILINOS_MPI_VECTOR@;
@DEAL_II_EXPAND_TRILINOS_MPI_BLOCKVECTOR@;
@DEAL_II_EXPAND_EPETRA_VECTOR@;
+ @DEAL_II_EXPAND_TPETRA_VECTOR_DOUBLE@;
+ @DEAL_II_EXPAND_TPETRA_VECTOR_FLOAT@;
+ @DEAL_II_EXPAND_TPETRA_VECTOR_COMPLEX_DOUBLE@;
+ @DEAL_II_EXPAND_TPETRA_VECTOR_COMPLEX_FLOAT@;
@DEAL_II_EXPAND_PETSC_MPI_VECTOR@;
@DEAL_II_EXPAND_PETSC_MPI_BLOCKVECTOR@
}
@@ -163,6 +175,10 @@ VECTORS_WITH_MATRIX := { Vector;
@DEAL_II_EXPAND_TRILINOS_MPI_VECTOR@;
@DEAL_II_EXPAND_EPETRA_VECTOR@;
+ @DEAL_II_EXPAND_TPETRA_VECTOR_DOUBLE@;
+ @DEAL_II_EXPAND_TPETRA_VECTOR_FLOAT@;
+ @DEAL_II_EXPAND_TPETRA_VECTOR_COMPLEX_DOUBLE@;
+ @DEAL_II_EXPAND_TPETRA_VECTOR_COMPLEX_FLOAT@;
@DEAL_II_EXPAND_PETSC_MPI_VECTOR@;
}
@@ -250,6 +266,9 @@ SPACE_DIMENSIONS := { 1; 2; 3 }
// all ranks used for instantiating tensors
RANKS := { 1; 2; 3; 4 }
+// all ranks used for instantiating symmetric tensors
+SYM_RANKS := { 2; 4 }
+
// Flags that are allowed in DataOutInterface::set_flags
OUTPUT_FLAG_TYPES := { DXFlags; UcdFlags; GnuplotFlags; PovrayFlags; EpsFlags;
GmvFlags; TecplotFlags; VtkFlags; SvgFlags;
diff --git a/cmake/configure/CUDAComputeCapability/cuda_compute_capability.cu b/cmake/configure/CUDAComputeCapability/cuda_compute_capability.cu
new file mode 100644
index 000000000000..23225d463234
--- /dev/null
+++ b/cmake/configure/CUDAComputeCapability/cuda_compute_capability.cu
@@ -0,0 +1,30 @@
+// ---------------------------------------------------------------------
+//
+// Copyright (C) 2019 by the deal.II authors
+//
+// This file is part of the deal.II library.
+//
+// The deal.II library is free software; you can use it, redistribute
+// it, and/or modify it under the terms of the GNU Lesser General
+// Public License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+// The full text of the license can be found in the file LICENSE.md at
+// the top level directory of deal.II.
+//
+// ---------------------------------------------------------------------
+
+#include
+
+int main()
+{
+ cudaDeviceProp device_properties;
+ const cudaError_t error = cudaGetDeviceProperties(&device_properties,
+ /*device*/0);
+ if( error != cudaSuccess)
+ {
+ std::cout << "CUDA error: " << cudaGetErrorString(error) << '\n';
+ return error;
+ }
+ std::cout << device_properties.major << device_properties.minor;
+ return 0;
+}
diff --git a/cmake/configure/configure_1_cuda.cmake b/cmake/configure/configure_1_cuda.cmake
index ce01a3b95a34..94c34940db70 100644
--- a/cmake/configure/configure_1_cuda.cmake
+++ b/cmake/configure/configure_1_cuda.cmake
@@ -85,6 +85,41 @@ MACRO(FEATURE_CUDA_FIND_EXTERNAL var)
SET(CUDA_COMPUTE_CAPABILITY "${CMAKE_MATCH_1}")
ELSEIF("${DEAL_II_CUDA_FLAGS_SAVED}" MATCHES "-arch=sm_([0-9]*)")
SET(CUDA_COMPUTE_CAPABILITY "${CMAKE_MATCH_1}")
+ ELSEIF(DEAL_II_ALLOW_PLATFORM_INTROSPECTION)
+ #
+ # Try to autodetect the CUDA Compute Capability by asking the device
+ #
+ SET(_binary_test_dir ${CMAKE_CURRENT_BINARY_DIR}/cmake/configure/CUDAComputeCapabilityWorkdir)
+ FILE(REMOVE_RECURSE ${_binary_test_dir})
+ FILE(MAKE_DIRECTORY ${_binary_test_dir})
+
+ EXECUTE_PROCESS(
+ COMMAND ${CUDA_NVCC_EXECUTABLE}
+ -ccbin=${CMAKE_CXX_COMPILER}
+ ${CMAKE_CURRENT_SOURCE_DIR}/cmake/configure/CUDAComputeCapability/cuda_compute_capability.cu
+ -o cuda_compute_capability
+ WORKING_DIRECTORY ${_binary_test_dir}
+ OUTPUT_QUIET
+ ERROR_QUIET
+ )
+ EXECUTE_PROCESS(COMMAND ${_binary_test_dir}/cuda_compute_capability
+ RESULT_VARIABLE _result
+ OUTPUT_VARIABLE CUDA_COMPUTE_CAPABILITY)
+ IF(${_result} EQUAL 0)
+ ADD_FLAGS(DEAL_II_CUDA_FLAGS "-arch=sm_${CUDA_COMPUTE_CAPABILITY}")
+ MESSAGE(STATUS "Detected CUDA Compute Capability ${CUDA_COMPUTE_CAPABILITY}")
+ ELSE()
+ MESSAGE(STATUS "Couldn't detect CUDA Compute Capability! "
+ "The error message was: ${CUDA_COMPUTE_CAPABILITY}")
+ SET(CUDA_ADDITIONAL_ERROR_STRING
+ ${CUDA_ADDITIONAL_ERROR_STRING}
+ "Couldn't detect CUDA Compute Capability! "
+ "The error message was: ${CUDA_COMPUTE_CAPABILITY}\n"
+ "Please check the return value of ${_binary_test_dir}/cuda_compute_capability.\n"
+ "If you want to disable the autodetection, set the compute capability to be used manually."
+ )
+ SET(${var} FALSE)
+ ENDIF()
ELSE()
#
# Assume a cuda compute capability of 35
diff --git a/cmake/configure/configure_1_lapack.cmake b/cmake/configure/configure_1_lapack.cmake
index 2eb4519a15db..b773ccbb8d1e 100644
--- a/cmake/configure/configure_1_lapack.cmake
+++ b/cmake/configure/configure_1_lapack.cmake
@@ -18,6 +18,7 @@
#
MACRO(FEATURE_LAPACK_FIND_EXTERNAL var)
+ CLEAR_CMAKE_REQUIRED()
FIND_PACKAGE(LAPACK)
#
diff --git a/cmake/configure/configure_1_mpi.cmake b/cmake/configure/configure_1_mpi.cmake
index f5e349b1126e..a5929c590838 100644
--- a/cmake/configure/configure_1_mpi.cmake
+++ b/cmake/configure/configure_1_mpi.cmake
@@ -39,44 +39,6 @@ MACRO(FEATURE_MPI_FIND_EXTERNAL var)
SET(${var} FALSE)
ENDIF()
- CHECK_COMPILER_SETUP(
- "${DEAL_II_CXX_FLAGS} ${DEAL_II_CXX_FLAGS_SAVED} ${MPI_CXX_FLAGS}"
- "${DEAL_II_LINKER_FLAGS} ${DEAL_II_LINKER_FLAGS_SAVED} ${MPI_LINKER_FLAGS}"
- MPI_WORKING_COMPILER
- ${DEAL_II_LIBRARIES} ${MPI_LIBRARIES}
- )
-
- IF(NOT MPI_WORKING_COMPILER)
- #
- # Try a workaround and drop "-fuse-ld=gold" (if present) from the
- # linker invocation
- #
- MESSAGE(STATUS "Unable to compile a simple test program. "
- "Try to drop \"-fuse-ld=gold\" from the linker flags."
- )
- STRING(REPLACE "-fuse-ld=gold" "" _filtered_flags "${DEAL_II_LINKER_FLAGS}")
-
- CHECK_COMPILER_SETUP(
- "${DEAL_II_CXX_FLAGS} ${DEAL_II_CXX_FLAGS_SAVED} ${MPI_CXX_FLAGS}"
- "${_filtered_flags} ${DEAL_II_LINKER_FLAGS_SAVED} ${MPI_LINKER_FLAGS}"
- MPI_WORKING_COMPILER
- ${DEAL_II_LIBRARIES} ${MPI_LIBRARIES}
- )
-
- IF(MPI_WORKING_COMPILER)
- SET(DEAL_II_LINKER_FLAGS "${_filtered_flags}")
- ELSE()
- MESSAGE(STATUS "Could not find a sufficient MPI installation: "
- "Unable to compile a simple test program."
- )
- SET(MPI_ADDITIONAL_ERROR_STRING
- ${MPI_ADDITIONAL_ERROR_STRING}
- "Unable to compile and link a simple test program with your MPI installation. \n"
- )
- SET(${var} FALSE)
- ENDIF()
- ENDIF()
-
ENDIF()
ENDMACRO()
diff --git a/cmake/configure/configure_2_boost.cmake b/cmake/configure/configure_2_boost.cmake
index 5c6f9c35a806..1637b16695c8 100644
--- a/cmake/configure/configure_2_boost.cmake
+++ b/cmake/configure/configure_2_boost.cmake
@@ -58,7 +58,11 @@ MACRO(FEATURE_BOOST_CONFIGURE_COMMON)
# Older boost versions can't know about this but provide a possibility to
# circumvent the issue. Hence, we just check ourselves.
ADD_FLAGS(CMAKE_REQUIRED_FLAGS "${DEAL_II_CXX_VERSION_FLAG}")
- ADD_FLAGS(CMAKE_REQUIRED_FLAGS "-Werror")
+ IF(CMAKE_CXX_COMPILER_ID MATCHES "MSVC")
+ ADD_FLAGS(CMAKE_REQUIRED_FLAGS "/WX /EHsc")
+ ELSE()
+ ADD_FLAGS(CMAKE_REQUIRED_FLAGS "-Werror")
+ ENDIF()
CHECK_CXX_SOURCE_COMPILES(
"
diff --git a/cmake/configure/configure_2_ginkgo.cmake b/cmake/configure/configure_2_ginkgo.cmake
new file mode 100644
index 000000000000..5b778ae75a11
--- /dev/null
+++ b/cmake/configure/configure_2_ginkgo.cmake
@@ -0,0 +1,39 @@
+## ---------------------------------------------------------------------
+##
+## Copyright (C) 2018 by the deal.II authors
+##
+## This file is part of the deal.II library.
+##
+## The deal.II library is free software; you can use it, redistribute
+## it, and/or modify it under the terms of the GNU Lesser General
+## Public License as published by the Free Software Foundation; either
+## version 2.1 of the License, or (at your option) any later version.
+## The full text of the license can be found in the file LICENSE.md at
+## the top level directory of deal.II.
+##
+## ---------------------------------------------------------------------
+
+#
+# Configuration for the Ginkgo library:
+#
+
+MACRO(FEATURE_GINKGO_ERROR_MESSAGE)
+ MESSAGE(FATAL_ERROR "\n"
+ "Could not find Ginkgo and supporting libraries!\n"
+ "Please ensure that the libraries are installed on your computer.\n"
+ "If the libraries are not at a default location, either provide some hints\n"
+ "for the autodetection:\n"
+ " $ GINKGO_DIR=\"...\" cmake <...>\n"
+ " $ cmake -DGINKGO_DIR=\"...\" <...>\n"
+ "or set the relevant variables by hand in ccmake.\n"
+ "Relevant hints for GINKGO are GINKGO_DIR.\n"
+ )
+ENDMACRO()
+
+MACRO(FEATURE_GINKGO_CONFIGURE_EXTERNAL)
+ SET(DEAL_II_GINKGO_BUILT_REFERENCE ${GINKGO_BUILT_REFERENCE})
+ SET(DEAL_II_GINKGO_BUILT_OPENMP ${GINKGO_BUILT_OMP})
+ SET(DEAL_II_GINKGO_BUILT_CUDA ${GINKGO_BUILT_CUDA})
+ENDMACRO()
+
+CONFIGURE_FEATURE(GINKGO)
diff --git a/cmake/configure/configure_2_trilinos.cmake b/cmake/configure/configure_2_trilinos.cmake
index c53f105c0995..7a1f32cc5cc0 100644
--- a/cmake/configure/configure_2_trilinos.cmake
+++ b/cmake/configure/configure_2_trilinos.cmake
@@ -146,7 +146,7 @@ MACRO(FEATURE_TRILINOS_FIND_EXTERNAL var)
CHECK_MPI_INTERFACE(TRILINOS ${var})
IF (${var})
- FOREACH(_optional_module EpetraExt ROL Sacado Zoltan)
+ FOREACH(_optional_module EpetraExt ROL Sacado Tpetra Zoltan)
ITEM_MATCHES(_module_found ${_optional_module} ${Trilinos_PACKAGE_LIST})
IF(_module_found)
MESSAGE(STATUS "Found ${_optional_module}")
@@ -158,6 +158,49 @@ MACRO(FEATURE_TRILINOS_FIND_EXTERNAL var)
ENDFOREACH()
ENDIF()
+ IF(${DEAL_II_TRILINOS_WITH_TPETRA})
+ #
+ # Check if Tpetra is usable in fact.
+ #
+ LIST(APPEND CMAKE_REQUIRED_INCLUDES ${Trilinos_INCLUDE_DIRS})
+ LIST(APPEND CMAKE_REQUIRED_INCLUDES ${MPI_CXX_INCLUDE_PATH})
+ ADD_FLAGS(CMAKE_REQUIRED_FLAGS "${DEAL_II_CXX_VERSION_FLAG}")
+ CHECK_SYMBOL_EXISTS(
+ "KOKKOS_ENABLE_CUDA_LAMBDA"
+ "Kokkos_Macros.hpp"
+ DEAL_II_KOKKOS_LAMBDA_EXISTS)
+ IF(${DEAL_II_KOKKOS_LAMBDA_EXISTS})
+ ADD_FLAGS(CMAKE_REQUIRED_FLAGS "--expt-extended-lambda")
+ ENDIF()
+ LIST(APPEND CMAKE_REQUIRED_LIBRARIES "${Trilinos_LIBRARIES}")
+ CHECK_CXX_SOURCE_COMPILES(
+ "
+ #include
+ int
+ main()
+ {
+ using LO = int;
+ using GO = unsigned int;
+ using Node = Kokkos::Compat::KokkosDeviceWrapperNode;
+ using map_type = Tpetra::Map;
+ Teuchos::RCP dummy_map = Teuchos::rcp(new map_type());
+ Tpetra::Vector dummy_vector(dummy_map);
+ (void)dummy_vector;
+ return 0;
+ }
+ "
+ TRILINOS_TPETRA_IS_FUNCTIONAL
+ )
+ RESET_CMAKE_REQUIRED()
+ IF(NOT TRILINOS_TPETRA_IS_FUNCTIONAL)
+ MESSAGE(
+ STATUS
+ "Tpetra was found but is not usable! Disabling Tpetra support."
+ )
+ SET(DEAL_II_TRILINOS_WITH_TPETRA OFF)
+ ENDIF()
+ ENDIF()
+
IF(${DEAL_II_TRILINOS_WITH_SACADO})
#
# Look for Sacado_config.h - we'll query it to determine C++11 support:
@@ -227,6 +270,14 @@ MACRO(FEATURE_TRILINOS_CONFIGURE_EXTERNAL)
SET(DEAL_II_EXPAND_TRILINOS_MPI_VECTOR "TrilinosWrappers::MPI::Vector")
IF (TRILINOS_WITH_MPI)
SET(DEAL_II_EXPAND_EPETRA_VECTOR "LinearAlgebra::EpetraWrappers::Vector")
+ IF (${DEAL_II_TRILINOS_WITH_TPETRA})
+ SET(DEAL_II_EXPAND_TPETRA_VECTOR_DOUBLE "LinearAlgebra::TpetraWrappers::Vector")
+ SET(DEAL_II_EXPAND_TPETRA_VECTOR_FLOAT "LinearAlgebra::TpetraWrappers::Vector")
+ IF (${DEAL_II_WITH_COMPLEX_NUMBERS})
+ SET(DEAL_II_EXPAND_TPETRA_VECTOR_COMPLEX_DOUBLE "LinearAlgebra::TpetraWrappers::Vector>")
+ SET(DEAL_II_EXPAND_TPETRA_VECTOR_COMPLEX_FLOAT "LinearAlgebra::TpetraWrappers::Vector>")
+ ENDIF()
+ ENDIF()
ENDIF()
IF(${DEAL_II_TRILINOS_WITH_SACADO})
# Note: Only CMake 3.0 and greater support line continuation with the "\" character
diff --git a/cmake/configure/configure_scalapack.cmake b/cmake/configure/configure_scalapack.cmake
index 86ac09a3ab1a..08820ceecbbd 100644
--- a/cmake/configure/configure_scalapack.cmake
+++ b/cmake/configure/configure_scalapack.cmake
@@ -26,6 +26,36 @@ MACRO(FEATURE_SCALAPACK_FIND_EXTERNAL var)
IF(SCALAPACK_FOUND)
SET(${var} TRUE)
CHECK_MPI_INTERFACE(SCALAPACK ${var})
+
+ IF (${var})
+ SET(CMAKE_REQUIRED_LIBRARIES ${SCALAPACK_LIBRARIES} ${LAPACK_LIBRARIES})
+ CHECK_C_SOURCE_COMPILES("
+ void pdsyevr_();
+ void pssyevr_();
+ int main(){
+ pdsyevr_();
+ pssyevr_();
+ return 0;
+ }"
+ DEAL_II_SCALAPACK_HAS_PDSYEVR_PSSYEVR)
+ RESET_CMAKE_REQUIRED()
+
+ IF(NOT DEAL_II_SCALAPACK_HAS_PDSYEVR_PSSYEVR)
+ MESSAGE(STATUS "Could not find a sufficient SCALAPACK installation: "
+ "The required symbols pdsyevr_ and pssyevr_ were not found."
+ )
+ SET(SCALAPACK_ADDITIONAL_ERROR_STRING
+ ${SCALAPACK_ADDITIONAL_ERROR_STRING}
+ "Could not find a sufficient SCALAPACK installation: \n"
+ "SCALAPACK symbol check for pdsyevr_ and pssyevr_ failed! "
+ "This usually means that your SCALAPACK installation is incomplete "
+ "or the link line is broken. Consult\n"
+ " CMakeFiles/CMakeError.log\n"
+ "for further information.\n"
+ )
+ SET(${var} FALSE)
+ ENDIF()
+ ENDIF()
ENDIF()
ENDMACRO()
diff --git a/cmake/configure/configure_sundials.cmake b/cmake/configure/configure_sundials.cmake
index 4a67fdd8bcd7..0c62c14589de 100644
--- a/cmake/configure/configure_sundials.cmake
+++ b/cmake/configure/configure_sundials.cmake
@@ -17,6 +17,34 @@
# Configuration for the SUNDIALS library:
#
+MACRO(FEATURE_SUNDIALS_FIND_EXTERNAL var)
+ FIND_PACKAGE(SUNDIALS)
+
+ IF(SUNDIALS_FOUND)
+ SET(${var} TRUE)
+
+ #
+ # We don't support version 4.0.0 or later yet.
+ #
+ SET(_first_unsupported_sundials_version 4.0.0)
+ IF(NOT SUNDIALS_VERSION VERSION_LESS ${_first_unsupported_sundials_version})
+ MESSAGE(STATUS
+ "Insufficient SUNDIALS installation found: "
+ "version ${_first_unsupported_sundials_version} "
+ "or later is not yet supported, "
+ "but version ${SUNDIALS_VERSION} was found."
+ )
+ SET(SUNDIALS_ADDITIONAL_ERROR_STRING
+ "Insufficient SUNDIALS installation found!\n"
+ "Version ${_first_unsupported_sundials_version} "
+ "or later is not yet supported, "
+ "but version ${SUNDIALS_VERSION} was found.\n"
+ )
+ SET(${var} FALSE)
+ ENDIF()
+ ENDIF()
+ENDMACRO()
+
MACRO(FEATURE_SUNDIALS_CONFIGURE_EXTERNAL)
SET(DEAL_II_SUNDIALS_WITH_IDAS ${SUNDIALS_WITH_IDAS})
ENDMACRO()
diff --git a/cmake/configure/configure_symengine.cmake b/cmake/configure/configure_symengine.cmake
new file mode 100644
index 000000000000..20da3601c285
--- /dev/null
+++ b/cmake/configure/configure_symengine.cmake
@@ -0,0 +1,63 @@
+## ---------------------------------------------------------------------
+##
+## Copyright (C) 2019 by the deal.II authors
+##
+## This file is part of the deal.II library.
+##
+## The deal.II library is free software; you can use it, redistribute
+## it, and/or modify it under the terms of the GNU Lesser General
+## Public License as published by the Free Software Foundation; either
+## version 2.1 of the License, or (at your option) any later version.
+## The full text of the license can be found in the file LICENSE at
+## the top level of the deal.II distribution.
+##
+## ---------------------------------------------------------------------
+
+#
+# Configuration for the SymEngine library:
+#
+
+
+MACRO(FEATURE_SYMENGINE_FIND_EXTERNAL var)
+ FIND_PACKAGE(SYMENGINE)
+
+ IF(SYMENGINE_FOUND)
+ SET(${var} TRUE)
+
+ #
+ # We require at least version 0.4 of the symengine library:
+ #
+ SET(_version_required "0.4")
+
+ IF(SYMENGINE_VERSION VERSION_LESS ${_version_required})
+ MESSAGE(STATUS "Insufficient SymEngine installation found: "
+ "At least version ${_version_required} is required "
+ "but version ${SYMENGINE_VERSION} was found."
+ )
+ SET(SYMENGINE_ADDITIONAL_ERROR_STRING
+ "Insufficient SymEngine installation found!\n"
+ "At least version ${_version_required} is required "
+ "but version ${SYMENGINE_VERSION} was found.\n"
+ )
+ SET(${var} FALSE)
+ ENDIF()
+ ENDIF()
+ENDMACRO()
+
+MACRO(FEATURE_SYMENGINE_CONFIGURE_EXTERNAL)
+ SET(DEAL_II_SYMENGINE_WITH_LLVM ${SYMENGINE_WITH_LLVM})
+
+ IF(DEAL_II_SYMENGINE_WITH_LLVM)
+ MESSAGE(STATUS "Configured with SymEngine LLVM capabilities.")
+ ENDIF()
+
+ #
+ # Overwrite the compiler flags imported from SymEngine
+ #
+ SET(SYMENGINE_CXX_FLAGS)
+ SET(SYMENGINE_CXX_FLAGS_DEBUG)
+ SET(SYMENGINE_CXX_FLAGS_RELEASE)
+ENDMACRO()
+
+
+CONFIGURE_FEATURE(SYMENGINE)
diff --git a/cmake/macros/macro_deal_ii_invoke_autopilot.cmake b/cmake/macros/macro_deal_ii_invoke_autopilot.cmake
index 94fcbc8a4982..e93e95a0cf75 100644
--- a/cmake/macros/macro_deal_ii_invoke_autopilot.cmake
+++ b/cmake/macros/macro_deal_ii_invoke_autopilot.cmake
@@ -138,21 +138,37 @@ MACRO(DEAL_II_INVOKE_AUTOPILOT)
)
ENDIF()
+ #
# Define custom targets to easily switch the build type:
- ADD_CUSTOM_TARGET(debug
- COMMAND ${CMAKE_COMMAND} -DCMAKE_BUILD_TYPE=Debug ${CMAKE_SOURCE_DIR}
- COMMAND ${CMAKE_COMMAND} --build ${CMAKE_BINARY_DIR} --target all
- COMMENT "Switch CMAKE_BUILD_TYPE to Debug"
- )
+ #
- ADD_CUSTOM_TARGET(release
- COMMAND ${CMAKE_COMMAND} -DCMAKE_BUILD_TYPE=Release ${CMAKE_SOURCE_DIR}
- COMMAND ${CMAKE_COMMAND} --build ${CMAKE_BINARY_DIR} --target all
- COMMENT "Switch CMAKE_BUILD_TYPE to Release"
- )
+ IF(${DEAL_II_BUILD_TYPE} MATCHES "Debug")
+ ADD_CUSTOM_TARGET(debug
+ COMMAND ${CMAKE_COMMAND} -DCMAKE_BUILD_TYPE=Debug ${CMAKE_SOURCE_DIR}
+ COMMAND ${CMAKE_COMMAND} -E echo "***"
+ COMMAND ${CMAKE_COMMAND} -E echo "*** Switched to Debug mode. Now recompile with: ${_make_command}"
+ COMMAND ${CMAKE_COMMAND} -E echo "***"
+ COMMENT "Switching CMAKE_BUILD_TYPE to Debug"
+ VERBATIM
+ )
+ ENDIF()
+ IF(${DEAL_II_BUILD_TYPE} MATCHES "Release")
+ ADD_CUSTOM_TARGET(release
+ COMMAND ${CMAKE_COMMAND} -DCMAKE_BUILD_TYPE=Release ${CMAKE_SOURCE_DIR}
+ COMMAND ${CMAKE_COMMAND} -E echo "***"
+ COMMAND ${CMAKE_COMMAND} -E echo "*** Switched to Release mode. Now recompile with: ${_make_command}"
+ COMMAND ${CMAKE_COMMAND} -E echo "***"
+ COMMENT "Switching CMAKE_BUILD_TYPE to Release"
+ VERBATIM
+ )
+ ENDIF()
+
+ #
# Only mention release and debug targets if it is actually possible to
# switch between them:
+ #
+
IF(${DEAL_II_BUILD_TYPE} MATCHES "DebugRelease")
SET(_switch_targets
"# ${_make_command} debug - to switch the build type to 'Debug'
diff --git a/cmake/modules/FindBOOST.cmake b/cmake/modules/FindBOOST.cmake
index 423dcd047ee8..8274a3613ee7 100644
--- a/cmake/modules/FindBOOST.cmake
+++ b/cmake/modules/FindBOOST.cmake
@@ -50,6 +50,12 @@ ENDIF()
# temporarily disable ${CMAKE_SOURCE_DIR}/cmake/modules for module lookup
LIST(REMOVE_ITEM CMAKE_MODULE_PATH ${CMAKE_SOURCE_DIR}/cmake/modules/)
+
+# Work around a CMake compatibility issue with boost-1.70.0
+# compare https://gitlab.kitware.com/cmake/cmake/issues/18865
+# and https://lists.boost.org/Archives/boost/2019/02/245016.php
+SET(Boost_NO_BOOST_CMAKE ON)
+
IF(DEAL_II_WITH_ZLIB)
FIND_PACKAGE(Boost ${BOOST_VERSION_REQUIRED} COMPONENTS
iostreams serialization system thread
@@ -77,7 +83,7 @@ IF(NOT Boost_FOUND AND Boost_USE_STATIC_LIBS)
LIST(APPEND CMAKE_MODULE_PATH ${CMAKE_SOURCE_DIR}/cmake/modules/)
ENDIF()
-
+UNSET(Boost_NO_BOOST_CMAKE)
IF(Boost_FOUND)
#
diff --git a/cmake/modules/FindGINKGO.cmake b/cmake/modules/FindGINKGO.cmake
new file mode 100644
index 000000000000..d05c2528bf15
--- /dev/null
+++ b/cmake/modules/FindGINKGO.cmake
@@ -0,0 +1,72 @@
+## ---------------------------------------------------------------------
+##
+## Copyright (C) 2018 by the deal.II authors
+##
+## This file is part of the deal.II library.
+##
+## The deal.II library is free software; you can use it, redistribute
+## it, and/or modify it under the terms of the GNU Lesser General
+## Public License as published by the Free Software Foundation; either
+## version 2.1 of the License, or (at your option) any later version.
+## The full text of the license can be found in the file LICENSE.md at
+## the top level directory of deal.II.
+##
+## ---------------------------------------------------------------------
+
+#
+# Try to find the GINKGO library
+#
+# This module exports
+#
+# GINKGO_INCLUDE_DIRS
+#
+
+SET(GINKGO_DIR "" CACHE PATH "An optional hint to a GINKGO installation")
+SET_IF_EMPTY(GINKGO_DIR "$ENV{GINKGO_DIR}")
+
+DEAL_II_FIND_LIBRARY(GINKGO_LIBRARY
+ NAMES ginkgo
+ HINTS ${GINKGO_DIR}
+ PATH_SUFFIXES
+ lib${LIB_SUFFIX} lib64 lib
+ # This is a hint, isn't it?
+ build/${CMAKE_CXX_PLATFORM_ID}-${CMAKE_SYSTEM_PROCESSOR}/libginkgo
+ )
+DEAL_II_FIND_LIBRARY(GINKGO_REFERENCE_LIBRARY
+ NAMES ginkgo_reference
+ HINTS ${GINKGO_DIR}
+ PATH_SUFFIXES
+ lib${LIB_SUFFIX} lib64 lib
+ # This is a hint, isn't it?
+ build/${CMAKE_CXX_PLATFORM_ID}-${CMAKE_SYSTEM_PROCESSOR}/libginkgo_reference
+ )
+DEAL_II_FIND_LIBRARY(GINKGO_OMP_LIBRARY
+ NAMES ginkgo_omp
+ HINTS ${GINKGO_DIR}
+ PATH_SUFFIXES
+ lib${LIB_SUFFIX} lib64 lib
+ # This is a hint, isn't it?
+ build/${CMAKE_CXX_PLATFORM_ID}-${CMAKE_SYSTEM_PROCESSOR}/libginkgo_omp
+ )
+DEAL_II_FIND_LIBRARY(GINKGO_CUDA_LIBRARY
+ NAMES ginkgo_cuda
+ HINTS ${GINKGO_DIR}
+ PATH_SUFFIXES
+ lib${LIB_SUFFIX} lib64 lib
+ # This is a hint, isn't it?
+ build/${CMAKE_CXX_PLATFORM_ID}-${CMAKE_SYSTEM_PROCESSOR}/libginkgo_cuda
+ )
+
+DEAL_II_FIND_PATH(GINKGO_INCLUDE_DIR ginkgo/ginkgo.hpp
+ HINTS ${GINKGO_DIR}
+ PATH_SUFFIXES include
+ )
+
+DEAL_II_PACKAGE_HANDLE(GINKGO
+ LIBRARIES
+ REQUIRED GINKGO_LIBRARY GINKGO_REFERENCE_LIBRARY GINKGO_OMP_LIBRARY GINKGO_CUDA_LIBRARY
+ INCLUDE_DIRS REQUIRED GINKGO_INCLUDE_DIR
+ USER_INCLUDE_DIRS REQUIRED GINKGO_INCLUDE_DIR
+ CLEAR
+ GINKGO_INCLUDE_DIR
+ )
diff --git a/cmake/modules/FindLAPACK.cmake b/cmake/modules/FindLAPACK.cmake
index 0d79cc018e20..4227e2c13f09 100644
--- a/cmake/modules/FindLAPACK.cmake
+++ b/cmake/modules/FindLAPACK.cmake
@@ -90,6 +90,7 @@ FOREACH(_lib ${_fortran_libs})
ENDFOREACH()
+SET(_lapack_include_dirs ${LAPACK_INCLUDE_DIRS})
SET(_lapack_libraries ${LAPACK_LIBRARIES})
SET(_lapack_linker_flags ${LAPACK_LINKER_FLAGS})
DEAL_II_PACKAGE_HANDLE(LAPACK
@@ -97,6 +98,10 @@ DEAL_II_PACKAGE_HANDLE(LAPACK
REQUIRED _lapack_libraries
OPTIONAL BLAS_LIBRARIES ${_additional_libraries}
LINKER_FLAGS OPTIONAL _lapack_linker_flags BLAS_LINKER_FLAGS
+ INCLUDE_DIRS
+ OPTIONAL _lapack_include_dirs
+ USER_INCLUDE_DIRS
+ OPTIONAL _lapack_include_dirs
CLEAR
atlas_LIBRARY atlcblas_LIBRARY atllapack_LIBRARY blas_LIBRARY
eigen_blas_LIBRARY f77blas_LIBRARY gslcblas_LIBRARY lapack_LIBRARY
diff --git a/cmake/modules/FindSUNDIALS.cmake b/cmake/modules/FindSUNDIALS.cmake
index a23e113e7fc6..f7bd1a76ecca 100644
--- a/cmake/modules/FindSUNDIALS.cmake
+++ b/cmake/modules/FindSUNDIALS.cmake
@@ -21,6 +21,10 @@
# SUNDIALS_LIBRARIES
# SUNDIALS_INCLUDE_DIR
# SUNDIALS_WITH_IDAS
+# SUNDIALS_VERSION
+# SUNDIALS_VERSION_MAJOR
+# SUNDIALS_VERSION_MINOR
+# SUNDIALS_VERSION_PATCH
#
# Note that sundials headers are typically installed in several directories,
# e.g.,
@@ -86,16 +90,64 @@ ELSE()
SET(SUNDIALS_WITH_IDAS FALSE)
ENDIF()
+#
+# Extract SUNDIALS version.
+#
+DEAL_II_FIND_FILE(SUNDIALS_CONFIG_H
+ NAMES sundials_config.h
+ HINTS ${SUNDIALS_INCLUDE_DIR}/sundials
+ )
+IF(NOT SUNDIALS_CONFIG_H MATCHES "-NOTFOUND")
+ FILE(STRINGS "${SUNDIALS_CONFIG_H}" SUNDIALS_VERSION_MAJOR_STRING
+ REGEX "#define.*SUNDIALS_VERSION_MAJOR"
+ )
+ STRING(REGEX REPLACE "^.*SUNDIALS_VERSION_MAJOR.*([0-9]+).*" "\\1"
+ SUNDIALS_VERSION_MAJOR "${SUNDIALS_VERSION_MAJOR_STRING}"
+ )
+ FILE(STRINGS "${SUNDIALS_CONFIG_H}" SUNDIALS_VERSION_MINOR_STRING
+ REGEX "#define.*SUNDIALS_VERSION_MINOR"
+ )
+ STRING(REGEX REPLACE "^.*SUNDIALS_VERSION_MINOR.*([0-9]+).*" "\\1"
+ SUNDIALS_VERSION_MINOR "${SUNDIALS_VERSION_MINOR_STRING}"
+ )
+ FILE(STRINGS "${SUNDIALS_CONFIG_H}" SUNDIALS_VERSION_PATCH_STRING
+ REGEX "#define.*SUNDIALS_VERSION_PATCH"
+ )
+ STRING(REGEX REPLACE "^.*SUNDIALS_VERSION_PATCH.*([0-9]+).*" "\\1"
+ SUNDIALS_VERSION_PATCH "${SUNDIALS_VERSION_PATCH_STRING}"
+ )
+ENDIF()
+IF(NOT "${SUNDIALS_VERSION_MAJOR}")
+ SET(SUNDIALS_VERSION_MAJOR "0")
+ENDIF()
+IF(NOT "${SUNDIALS_VERSION_MINOR}")
+ SET(SUNDIALS_VERSION_MINOR "0")
+ENDIF()
+IF(NOT "${SUNDIALS_VERSION_PATCH}")
+ SET(SUNDIALS_VERSION_PATCH "0")
+ENDIF()
+SET(SUNDIALS_VERSION
+ "${SUNDIALS_VERSION_MAJOR}.${SUNDIALS_VERSION_MINOR}.${SUNDIALS_VERSION_PATCH}"
+ )
DEAL_II_PACKAGE_HANDLE(SUNDIALS
LIBRARIES REQUIRED
- ${_sundials_lib_ida} SUNDIALS_LIB_ARKODE
- SUNDIALS_LIB_KINSOL SUNDIALS_LIB_SER ${_sundials_lib_par}
+ ${_sundials_lib_ida}
+ SUNDIALS_LIB_ARKODE
+ SUNDIALS_LIB_KINSOL
+ SUNDIALS_LIB_SER
+ ${_sundials_lib_par}
INCLUDE_DIRS REQUIRED
SUNDIALS_INCLUDE_DIR
USER_INCLUDE_DIRS REQUIRED
SUNDIALS_INCLUDE_DIR
CLEAR
- SUNDIALS_LIB_IDA SUNDIALS_LIB_IDAS SUNDIALS_LIB_ARKODE
- SUNDIALS_LIB_KINSOL SUNDIALS_LIB_SER ${_sundials_lib_par}
+ SUNDIALS_LIB_IDA
+ SUNDIALS_LIB_IDAS
+ SUNDIALS_LIB_ARKODE
+ SUNDIALS_LIB_KINSOL
+ SUNDIALS_LIB_SER
+ ${_sundials_lib_par}
+ SUNDIALS_INCLUDE_DIR
+ SUNDIALS_CONFIG_H
)
diff --git a/cmake/modules/FindSYMENGINE.cmake b/cmake/modules/FindSYMENGINE.cmake
new file mode 100644
index 000000000000..20b73fa50b6e
--- /dev/null
+++ b/cmake/modules/FindSYMENGINE.cmake
@@ -0,0 +1,140 @@
+## ---------------------------------------------------------------------
+##
+## Copyright (C) 2019 by the deal.II authors
+##
+## This file is part of the deal.II library.
+##
+## The deal.II library is free software; you can use it, redistribute
+## it, and/or modify it under the terms of the GNU Lesser General
+## Public License as published by the Free Software Foundation; either
+## version 2.1 of the License, or (at your option) any later version.
+## The full text of the license can be found in the file LICENSE at
+## the top level of the deal.II distribution.
+##
+## ---------------------------------------------------------------------
+
+#
+# - Try to find SymEngine
+#
+# This module exports
+#
+# SYMENGINE_INCLUDE_DIR
+# SYMENGINE_LIBRARY
+# SYMENGINE_WITH_LLVM
+#
+
+SET(SYMENGINE_DIR "" CACHE PATH "An optional hint to a SymEngine installation")
+SET_IF_EMPTY(SYMENGINE_DIR "$ENV{SYMENGINE_DIR}")
+
+#
+# SymEngine overwrites the CMake module path, so we save
+# and restore it after this library is found and configured.
+#
+SET (DEAL_II_CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH})
+
+#
+# Include the SymEngine:
+#
+FIND_PACKAGE(SymEngine
+ CONFIG QUIET
+ HINTS ${SYMENGINE_DIR}
+ PATH_SUFFIXES lib/cmake/symengine
+ NO_SYSTEM_ENVIRONMENT_PATH
+ )
+
+#
+# Reset the CMake module path
+#
+SET (CMAKE_MODULE_PATH ${DEAL_II_CMAKE_MODULE_PATH})
+
+
+#
+# Look for symengine_config.h - we'll query it to determine supported features:
+#
+IF(SymEngine_FOUND)
+ DEAL_II_FIND_FILE(SYMENGINE_SETTINGS_H symengine_config.h
+ HINTS ${SYMENGINE_INCLUDE_DIRS}
+ PATH_SUFFIXES symengine
+ NO_DEFAULT_PATH
+ NO_CMAKE_ENVIRONMENT_PATH
+ NO_CMAKE_PATH
+ NO_SYSTEM_ENVIRONMENT_PATH
+ NO_CMAKE_SYSTEM_PATH
+ NO_CMAKE_FIND_ROOT_PATH
+ )
+ENDIF()
+
+#
+# Version check
+#
+IF(EXISTS ${SYMENGINE_SETTINGS_H})
+
+ FILE(STRINGS "${SYMENGINE_SETTINGS_H}" SYMENGINE_VERSION_MAJOR_STRING
+ REGEX "#define.*SYMENGINE_MAJOR_VERSION")
+ STRING(REGEX REPLACE "^.*SYMENGINE_MAJOR_VERSION.*([0-9]+).*" "\\1"
+ SYMENGINE_VERSION_MAJOR "${SYMENGINE_VERSION_MAJOR_STRING}"
+ )
+ FILE(STRINGS "${SYMENGINE_SETTINGS_H}" SYMENGINE_VERSION_MINOR_STRING
+ REGEX "#define.*SYMENGINE_MINOR_VERSION")
+ STRING(REGEX REPLACE "^.*SYMENGINE_MINOR_VERSION.*([0-9]+).*" "\\1"
+ SYMENGINE_VERSION_MINOR "${SYMENGINE_VERSION_MINOR_STRING}"
+ )
+ FILE(STRINGS "${SYMENGINE_SETTINGS_H}" SYMENGINE_VERSION_PATCH_STRING
+ REGEX "#define.*SYMENGINE_PATCH_VERSION")
+ STRING(REGEX REPLACE "^.*SYMENGINE_PATCH_VERSION.*([0-9]+).*" "\\1"
+ SYMENGINE_VERSION_PATCH "${SYMENGINE_VERSION_PATCH_STRING}"
+ )
+
+ SET(SYMENGINE_VERSION ${SymEngine_VERSION})
+ENDIF()
+
+#
+# Feature checks
+#
+
+MACRO(_symengine_feature_check _var _regex)
+ IF(EXISTS ${SYMENGINE_SETTINGS_H})
+ FILE(STRINGS "${SYMENGINE_SETTINGS_H}" SYMENGINE_${_var}_STRING
+ REGEX "${_regex}")
+ IF("${SYMENGINE_${_var}_STRING}" STREQUAL "")
+ SET(SYMENGINE_WITH_${_var} FALSE)
+ ELSE()
+ SET(SYMENGINE_WITH_${_var} TRUE)
+ ENDIF()
+ ENDIF()
+ENDMACRO()
+
+# Other possible features of interest: BOOST, GMP
+_symengine_feature_check(LLVM "#define.*HAVE_SYMENGINE_LLVM")
+
+#
+# Sanitize include dirs:
+#
+
+STRING(REGEX REPLACE
+ "(lib64|lib)\\/cmake\\/symengine\\/\\.\\.\\/\\.\\.\\/\\.\\.\\/" ""
+ _symengine_include_dirs "${SYMENGINE_INCLUDE_DIRS}"
+ )
+REMOVE_DUPLICATES(_symengine_include_dirs)
+
+#
+# Get the full path for the SYMENGINE_LIBRARIES. Some of these libraries are
+# CMake targets, so we can query them directly for this information.
+#
+FOREACH(SYMENGINE_LIBRARY_NAME ${SYMENGINE_LIBRARIES})
+ IF (TARGET ${SYMENGINE_LIBRARY_NAME})
+ GET_PROPERTY(SYMENGINE_LIBRARY TARGET ${SYMENGINE_LIBRARY_NAME} PROPERTY LOCATION)
+ ELSE ()
+ SET(SYMENGINE_LIBRARY ${SYMENGINE_LIBRARY_NAME})
+ ENDIF()
+
+ SET(_symengine_libraries ${_symengine_libraries} ${SYMENGINE_LIBRARY})
+ENDFOREACH()
+
+
+DEAL_II_PACKAGE_HANDLE(SYMENGINE
+ LIBRARIES REQUIRED _symengine_libraries
+ INCLUDE_DIRS REQUIRED _symengine_include_dirs
+ USER_INCLUDE_DIRS REQUIRED _symengine_include_dirs
+ CLEAR SYMENGINE_SETTINGS_H SYMENGINE_SKIP_DEPENDENCIES SymEngine_DIR
+)
diff --git a/cmake/setup_finalize.cmake b/cmake/setup_finalize.cmake
index da1d0b9e77da..716093a7b3b5 100644
--- a/cmake/setup_finalize.cmake
+++ b/cmake/setup_finalize.cmake
@@ -84,12 +84,54 @@ ENDFOREACH()
#
FOREACH(build ${DEAL_II_BUILD_TYPES})
- CHECK_COMPILER_SETUP(
- "${DEAL_II_CXX_FLAGS} ${DEAL_II_CXX_FLAGS_${build}}"
- "${DEAL_II_LINKER_FLAGS} ${DEAL_II_LINKER_FLAGS_${build}}"
- DEAL_II_HAVE_USABLE_FLAGS_${build}
- ${DEAL_II_LIBRARIES} ${DEAL_II_LIBRARIES_${build}}
- )
+
+ MACRO(_check_linker_flags)
+ CHECK_COMPILER_SETUP(
+ "${DEAL_II_CXX_FLAGS} ${DEAL_II_CXX_FLAGS_${build}}"
+ "${DEAL_II_LINKER_FLAGS} ${DEAL_II_LINKER_FLAGS_${build}}"
+ DEAL_II_HAVE_USABLE_FLAGS_${build}
+ ${DEAL_II_LIBRARIES} ${DEAL_II_LIBRARIES_${build}}
+ )
+ ENDMACRO()
+
+ MACRO(_drop_linker_flag _linker_flag _replacement_flag _variable)
+ MESSAGE(STATUS
+ "Unable to compile a simple test program. "
+ "Trying to drop \"${_linker_flag}\" from the linker flags."
+ )
+ FOREACH(_flags
+ DEAL_II_LINKER_FLAGS DEAL_II_LINKER_FLAGS_${build}
+ BASE_LINKER_FLAGS BASE_LINKER_FLAGS_${build}
+ )
+ STRING(REPLACE "${_linker_flag}" "${_replacement_flag}"
+ ${_flags} "${${_flags}}"
+ )
+ ENDFOREACH()
+ SET(${_variable} FALSE CACHE INTERNAL "" FORCE)
+ SET(${_variable} FALSE)
+ ENDMACRO()
+
+ _check_linker_flags()
+
+ IF(NOT DEAL_II_HAVE_USABLE_FLAGS_${build} AND DEAL_II_COMPILER_HAS_FUSE_LD_LLD)
+ SET(_replacement "")
+ IF(DEAL_II_COMPILER_HAS_FUSE_LD_GOLD)
+ SET(_replacement "-fuse-ld=gold")
+ ENDIF()
+ _drop_linker_flag(
+ "-fuse-ld=lld" ${_replacement}
+ DEAL_II_COMPILER_HAS_FUSE_LD_LLD
+ )
+ _check_linker_flags()
+ ENDIF()
+
+ IF(NOT DEAL_II_HAVE_USABLE_FLAGS_${build} AND DEAL_II_COMPILER_HAS_FUSE_LD_GOLD)
+ _drop_linker_flag(
+ "-fuse-ld=gold" ""
+ DEAL_II_COMPILER_HAS_FUSE_LD_GOLD
+ )
+ _check_linker_flags()
+ ENDIF()
IF(NOT DEAL_II_HAVE_USABLE_FLAGS_${build})
MESSAGE(FATAL_ERROR "
diff --git a/contrib/ci/Jenkinsfile.mark b/contrib/ci/Jenkinsfile.mark
new file mode 100644
index 000000000000..78f8e02c676c
--- /dev/null
+++ b/contrib/ci/Jenkinsfile.mark
@@ -0,0 +1,63 @@
+#!groovy
+
+/*
+
+This Jenkinsfile is used to mark jobs as "pending" as quickly as possible. The
+other longer running jobs only set the status to pending once they start
+running.
+
+See https://jenkins.tjhei.info/job/dealii-mark/ for details.
+
+*/
+
+/*
+Settings to apply inside Jenkins:
+ - discover pull requests (remove branches/master)
+ - Strategy: merged PR
+ - enable "Disable GitHub Multibranch Status Plugin"
+ - trigger build on pull request comment: .* /rebuild.* (without space)
+ - Jenkinsfile: choose contrib/ci/Jenkinsfile.mark
+ - scan: every 4 hours
+ - discard: 5+ items
+ - docker label: small
+*/
+
+pipeline
+{
+ agent
+ {
+ docker
+ {
+ image 'dealii/indent'
+ }
+ }
+
+ post { cleanup { cleanWs() } }
+
+ stages
+ {
+ stage("check")
+ {
+ steps
+ {
+ githubNotify context: 'ready', description: 'please be patient, testers are spinning up...', status: 'PENDING'
+ sh '''
+ wget -q -O - https://api.github.com/repos/dealii/dealii/issues/${CHANGE_ID}/labels | grep 'ready to test' || \
+ { echo "This commit will only be tested when it has the label 'ready to test'. Trigger a rebuild by adding a comment that contains '/rebuild'..."; exit 1; }
+ '''
+ }
+ post
+ {
+ failure
+ {
+ githubNotify context: 'ready', description: 'need ready to test label and /rebuild', status: 'PENDING'
+ script
+ {
+ currentBuild.result='NOT_BUILT'
+ }
+ }
+ }
+ }
+
+ }
+}
diff --git a/contrib/ci/Jenkinsfile.osx b/contrib/ci/Jenkinsfile.osx
new file mode 100644
index 000000000000..5c0d92165ca2
--- /dev/null
+++ b/contrib/ci/Jenkinsfile.osx
@@ -0,0 +1,126 @@
+#!groovy
+
+/*
+
+This Jenkins job runs a build on OSX
+
+See https://jenkins.tjhei.info/job/dealii-osx/ for details.
+
+*/
+
+/*
+Settings to apply inside Jenkins:
+ - discover pull requests (remove branches/master)
+ - Strategy: merged PR
+ - enable "Disable GitHub Multibranch Status Plugin"
+ - trigger build on pull request comment: .* /rebuild.* (without space)
+ - Jenkinsfile: choose contrib/ci/Jenkinsfile.osx
+ - scan: every 4 hours
+ - discard: 5+ items
+*/
+
+// load library https://github.com/tjhei/jenkins-stuff to provide
+// killold.killOldBuilds() function:
+@Library('tjhei') _
+
+pipeline
+{
+ agent none
+
+ stages
+ {
+ stage("abort old")
+ {
+ agent none
+ steps
+ {
+ // kill older builds in this PR:
+ script { killold.killOldBuilds() }
+ }
+ }
+
+ stage("main")
+ {
+ agent {
+ node {
+ label 'osx'
+ }
+ }
+
+ post { cleanup { cleanWs() } }
+
+ stages
+ {
+ stage("check")
+ {
+ when {
+ allOf {
+ not {branch 'master'}
+ }
+ }
+
+ steps
+ {
+ githubNotify context: 'OSX', description: 'pending...', status: 'PENDING'
+ sh '''
+ wget -q -O - https://api.github.com/repos/dealii/dealii/issues/${CHANGE_ID}/labels | grep 'ready to test' || \
+ { echo "This commit will only be tested when it has the label 'ready to test'. Trigger a rebuild by adding a comment that contains '/rebuild'..."; exit 1; }
+ '''
+ }
+ post
+ {
+ failure
+ {
+ githubNotify context: 'OSX', description: 'need ready to test label and /rebuild', status: 'PENDING'
+ script
+ {
+ currentBuild.result='NOT_BUILT'
+ }
+ }
+ }
+ }
+
+ stage('build')
+ {
+ steps
+ {
+ timeout(time: 1, unit: 'HOURS')
+ {
+ sh "echo \"building on node ${env.NODE_NAME}\""
+ sh '''#!/bin/bash
+ set -e
+ mkdir build && cd build
+ cmake \
+ -D DEAL_II_WITH_MPI=OFF \
+ -D DEAL_II_CXX_FLAGS='-Werror' \
+ -D CMAKE_BUILD_TYPE=Debug \
+ $WORKSPACE/ && make -j 4
+ make test # quicktests
+ '''
+ }
+ }
+
+ post
+ {
+ always
+ {
+ archiveArtifacts artifacts: 'build/detailed.log', fingerprint: true
+ }
+ failure
+ {
+ githubNotify context: 'OSX', description: 'build failed', status: 'FAILURE'
+ }
+ }
+ }
+
+ stage("finalize")
+ {
+ steps
+ {
+ githubNotify context: 'OSX', description: 'OK', status: 'SUCCESS'
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/contrib/ci/Jenkinsfile.tidy b/contrib/ci/Jenkinsfile.tidy
new file mode 100644
index 000000000000..6aa1179e43dd
--- /dev/null
+++ b/contrib/ci/Jenkinsfile.tidy
@@ -0,0 +1,113 @@
+#!groovy
+
+/*
+
+This Jenkins job runs clang-tidy on the code base
+
+See https://jenkins.tjhei.info/job/dealii-tidy/ for details.
+
+*/
+
+/*
+Settings to apply inside Jenkins:
+ - discover pull requests (remove branches/master)
+ - Strategy: merged PR
+ - enable "Disable GitHub Multibranch Status Plugin"
+ - trigger build on pull request comment: .* /rebuild.* (without space!)
+ - Jenkinsfile: choose contrib/ci/Jenkinsfile.tidy
+ - scan: every 4 hours
+ - discard: 5+ items
+*/
+
+// load library https://github.com/tjhei/jenkins-stuff to provide
+// killold.killOldBuilds() function:
+@Library('tjhei') _
+
+pipeline
+{
+ agent none
+
+ stages
+ {
+ stage("abort old")
+ {
+ agent none
+ steps
+ {
+ githubNotify context: 'tidy', description: 'initializing...', status: 'PENDING'
+ // kill older builds in this PR:
+ script { killold.killOldBuilds() }
+ }
+ }
+
+ stage("main")
+ {
+ agent
+ {
+ docker
+ {
+ image 'tjhei/candi-base-clang'
+ }
+ }
+
+ post { cleanup { cleanWs() } }
+
+ stages
+ {
+ stage("check")
+ {
+ when {
+ allOf {
+ not {branch 'master'}
+ }
+ }
+
+ steps
+ {
+ githubNotify context: 'tidy', description: 'pending...', status: 'PENDING'
+ sh '''
+ wget -q -O - https://api.github.com/repos/dealii/dealii/issues/${CHANGE_ID}/labels | grep 'ready to test' || \
+ { echo "This commit will only be tested when it has the label 'ready to test'. Trigger a rebuild by adding a comment that contains '/rebuild'..."; exit 1; }
+ '''
+ }
+ post
+ {
+ failure
+ {
+ githubNotify context: 'tidy', description: 'need ready to test label and /rebuild', status: 'PENDING'
+ script
+ {
+ currentBuild.result='NOT_BUILT'
+ }
+ }
+ }
+ }
+
+ stage('build')
+ {
+ steps
+ {
+ timeout(time: 2, unit: 'HOURS')
+ {
+ sh "echo \"building on node ${env.NODE_NAME}\""
+ sh '''#!/bin/bash
+ mkdir build && cd build
+ $WORKSPACE/contrib/utilities/run_clang_tidy.sh $WORKSPACE
+ '''
+ githubNotify context: 'tidy', description: 'OK', status: 'SUCCESS'
+ }
+ }
+
+ post
+ {
+ failure
+ {
+ githubNotify context: 'tidy', description: 'build failed', status: 'FAILURE'
+ }
+ }
+ }
+
+ }
+ }
+ }
+}
diff --git a/contrib/utilities/dotgdbinit.py b/contrib/utilities/dotgdbinit.py
index a4fb32598441..57d6ae956921 100644
--- a/contrib/utilities/dotgdbinit.py
+++ b/contrib/utilities/dotgdbinit.py
@@ -1,6 +1,6 @@
# ---------------------------------------------------------------------
#
-# Copyright (C) 2015 - 2017 by the deal.II authors
+# Copyright (C) 2015 - 2018 by the deal.II authors
#
# This file is part of the deal.II library.
#
@@ -25,7 +25,7 @@
# slightly older versions of GDB (the Python interface was added in 7.0,
# released in 2009).
#
-# Authors: Wolfgang Bangerth, 2015, David Wells, 2015 - 2017
+# Authors: Wolfgang Bangerth, 2015, David Wells, 2015 - 2018
#
set print pretty 1
@@ -52,6 +52,31 @@ def build_output_string(keys, accessor):
"\n}")
+class AlignedVectorPrinter(object):
+ """Print a deal.II AlignedVector instance."""
+ def __init__(self, typename, val):
+ self.typename = typename
+ self.val = val
+ self.length = int(self.val['data_end'] - self.val['data_begin'])
+
+ def children(self):
+ # The first entry (see the "Pretty Printing API" documentation of GDB)
+ # in the tuple should be a name for the child, which should be nothing
+ # (the array elements don't have individual names) here.
+ return (("", (self.val['data_begin'] + count).dereference())
+ for count in range(self.length))
+
+ def to_string(self):
+ return "AlignedVector<{}>({})".format(self.val.type.template_argument(0),
+ self.length)
+
+ @staticmethod
+ def display_hint():
+ """Provide a hint to GDB that this is an array-like container
+ (so print values in sequence)."""
+ return "array"
+
+
class PointPrinter(object):
"""Print a deal.II Point instance."""
def __init__(self, typename, val):
@@ -94,23 +119,17 @@ class VectorPrinter(object):
def __init__(self, typename, val):
self.typename = typename
self.val = val
+ a_vec = self.val['values']
+ self.length = int(a_vec['data_end'] - a_vec['data_begin'])
def children(self):
- # The first entry (see the "Pretty Printing API" documentation of GDB)
- # in the tuple should be a name for the child, which should be nothing
- # (an empty string) here.
- return (("", (self.val['values'] + count).dereference())
- for count in range(int(self.val['vec_size'])))
+ return (("values", self.val['values']),
+ ("thread_loop_partitioner",
+ self.val['thread_loop_partitioner']))
def to_string(self):
- return "{}[{}]".format(self.val.type.template_argument(0),
- self.val['vec_size'])
-
- @staticmethod
- def display_hint():
- """Provide a hint to GDB that this is an array-like container
- (so print values in sequence)."""
- return "array"
+ return "Vector<{}>({})".format(self.val.type.template_argument(0),
+ self.length)
class QuadraturePrinter(object):
@@ -184,6 +203,7 @@ def __call__(self, val):
def register_dealii_printers():
"""Register deal.II pretty-printers with gdb."""
printers = {
+ AlignedVectorPrinter: ['AlignedVector'],
PointPrinter: ['Point'],
TensorPrinter: ['Tensor'],
VectorPrinter: ['Vector'],
diff --git a/contrib/utilities/generate_lapack_templates b/contrib/utilities/generate_lapack_templates
index af3e009e7e99..d86e466800a3 100755
--- a/contrib/utilities/generate_lapack_templates
+++ b/contrib/utilities/generate_lapack_templates
@@ -557,7 +557,7 @@ def main():
routines.append(FortranRoutine(file_name, guard_fp))
except NotImplementedError:
# not a lot we can do. If its in the whitelist we will fail
- # again later anway
+ # again later anyway
continue
routines.sort(key=lambda u: u.general_name)
diff --git a/contrib/utilities/indent b/contrib/utilities/indent
index ca889f7fcd36..9cb6892274d1 100755
--- a/contrib/utilities/indent
+++ b/contrib/utilities/indent
@@ -17,7 +17,7 @@
#
# This script does the same thing as contrib/utilities/indent-all but only
# reformats files which have changed (or have been added but neither
-# staged/commited) since the last merge commit to the master branch.
+# staged/committed) since the last merge commit to the master branch.
#
# The script needs to be executed as
# ./contrib/utilities/indent
diff --git a/contrib/utilities/indent.py b/contrib/utilities/indent.py
index 0ca683d70891..82ba521487e7 100644
--- a/contrib/utilities/indent.py
+++ b/contrib/utilities/indent.py
@@ -112,7 +112,7 @@ def parse_arguments():
"By default only \"examples\", \"include\", "
"\"source\" and \"tests\" "
"directories are chosen to work on."
- "Path to directories can be both abosulte or relative.")
+ "Path to directories can be both absolute or relative.")
parser.add_argument("-j", metavar="THREAD_COUNT", type=int, default=0,
help="Number of clang-format instances to be run "
diff --git a/contrib/utilities/indent_common.sh b/contrib/utilities/indent_common.sh
index 6bbeb92115ab..c8e4a26f9607 100644
--- a/contrib/utilities/indent_common.sh
+++ b/contrib/utilities/indent_common.sh
@@ -205,7 +205,7 @@ export -f fix_permissions
# serves as a good candidate to separate individual file names.
# - For 'xargs', -0 does the opposite: it separates filenames that are
# delimited by \0
-# - the options "-n 1 -P 10" make sure that the following script with be
+# - the options "-n 1 -P 10" make sure that the following script will be
# called exactly with one file name as argument at a time, but we allow
# execution for up to 10 times in parallel
#
diff --git a/contrib/utilities/run_clang_tidy.sh b/contrib/utilities/run_clang_tidy.sh
index d292ef84a084..70dc612c6621 100755
--- a/contrib/utilities/run_clang_tidy.sh
+++ b/contrib/utilities/run_clang_tidy.sh
@@ -21,7 +21,7 @@
# Usage:
# /contrib/utilities/run_clang_tidy.sh SRC_DIR OPTIONAL_CMAKE_ARGS
# with:
-# SRC_DIR is an absolute path to a deal.II source directory
+# SRC_DIR points to a deal.II source directory
# OPTIONAL_CMAKE_ARGS are optional arguments to pass to CMake
# make sure to run this script in an empty build directory
#
@@ -29,8 +29,9 @@
# Clang 5.0.1+ and have clang, clang++, and run-clang-tidy.py in
# your path.
-# grab first argument:
+# grab first argument and make relative path an absolute one:
SRC=$1
+SRC=$(cd "$SRC";pwd)
shift
if test ! -d "$SRC/source" -o ! -d "$SRC/include" -o ! -d "$SRC/examples" -o ! -f "$SRC/CMakeLists.txt" ; then
@@ -40,36 +41,32 @@ if test ! -d "$SRC/source" -o ! -d "$SRC/include" -o ! -d "$SRC/examples" -o ! -
fi
echo "SRC-DIR=$SRC"
-# do not allow bundled packages, otherwise we get too many warnings from TBB/UMFPACK/etc.
# enable MPI (to get MPI warnings)
# export compile commands (so that run-clang-tidy.py works)
-ARGS="-D DEAL_II_ALLOW_BUNDLED=OFF -D DEAL_II_WITH_MPI=ON -D CMAKE_EXPORT_COMPILE_COMMANDS=ON $@"
+ARGS=("-D" "DEAL_II_WITH_MPI=ON" "-D" "CMAKE_EXPORT_COMPILE_COMMANDS=ON" "-D" "CMAKE_BUILD_TYPE=Debug" "$@")
-# disable performance-inefficient-string-concatenation because we don't care about "a"+to_string(5)+...
-CHECKS="-*,
- cppcoreguidelines-pro-type-static-cast-downcast,
- google-readability-casting,
- modernize-*,
- -modernize-pass-by-value,
- -modernize-raw-string-literal,
- -modernize-use-auto,
- -modernize-use-override,
- -modernize-use-default-member-init,
- -modernize-use-transparent-functors,
- mpi-*,
- performance-*,
- -performance-inefficient-string-concatenation"
-
-CHECKS="$(echo "${CHECKS}" | tr -d '[:space:]')"
-echo "$CHECKS"
+# for a list of checks, see /.clang-tidy
+cat "$SRC/.clang-tidy"
if ! [ -x "$(command -v run-clang-tidy.py)" ] || ! [ -x "$(command -v clang++)" ]; then
echo "make sure clang, clang++, and run-clang-tidy.py (part of clang) are in the path"
exit 2
fi
-CC=clang CXX=clang++ cmake $ARGS "$SRC" || (echo "cmake failed!"; false) || exit 2
+CC=clang CXX=clang++ cmake "${ARGS[@]}" "$SRC" || (echo "cmake failed!"; false) || exit 2
cmake --build . --target expand_all_instantiations || (echo "make expand_all_instantiations failed!"; false) || exit 3
-run-clang-tidy.py -p . -checks="$CHECKS" -quiet -header-filter="$SRC/include/*" -fix
+# finally run it:
+# pipe away stderr (just contains nonsensical "x warnings generated")
+# pipe output to output.txt
+run-clang-tidy.py -p . -quiet -header-filter="$SRC/include/*" 2>error.txt >output.txt
+
+if grep -E -q '(warning|error): ' output.txt; then
+ grep -E '(warning|error): ' output.txt
+ exit 4
+fi
+
+echo "OK"
+exit 0
+
diff --git a/doc/CMakeLists.txt b/doc/CMakeLists.txt
index ddfdd2977e34..97edbbb32c8d 100644
--- a/doc/CMakeLists.txt
+++ b/doc/CMakeLists.txt
@@ -32,20 +32,9 @@ IF(DEAL_II_COMPONENT_DOCUMENTATION)
COMPONENT documentation
PATTERN "CMakeLists.txt" EXCLUDE
PATTERN "doxygen" EXCLUDE
- PATTERN "*.in" EXCLUDE
PATTERN "news" EXCLUDE
)
- CONFIGURE_FILE(
- ${CMAKE_CURRENT_SOURCE_DIR}/title.html.in
- ${CMAKE_CURRENT_BINARY_DIR}/title.html
- )
- INSTALL(FILES
- ${CMAKE_CURRENT_BINARY_DIR}/title.html
- DESTINATION ${DEAL_II_DOCHTML_RELDIR}
- COMPONENT documentation
- )
-
MESSAGE(STATUS "Setting up documentation - Done")
MESSAGE(STATUS "")
diff --git a/doc/documentation.html b/doc/documentation.html
deleted file mode 100644
index ee8bee879069..000000000000
--- a/doc/documentation.html
+++ /dev/null
@@ -1,75 +0,0 @@
-
-
-
- The deal.II Online Documentation
-
-
-
-
-
-
-
-
-
-
- deal.II comes with extensive online
- documentation that can be grouped into the following categories:
-
-
-
-
-
-
- Information for users
-
-
-
-
-
-
- Information for developers
-
-
-
-
-
-
-
-
-
-
diff --git a/doc/doxygen/headers/automatic_and_symbolic_differentiation.h b/doc/doxygen/headers/automatic_and_symbolic_differentiation.h
index ae5178e2c8bc..77fc8f6234e3 100644
--- a/doc/doxygen/headers/automatic_and_symbolic_differentiation.h
+++ b/doc/doxygen/headers/automatic_and_symbolic_differentiation.h
@@ -14,13 +14,13 @@
// ---------------------------------------------------------------------
/**
- * @defgroup auto_symb_diff Automatic differentiation
+ * @defgroup auto_symb_diff Automatic and symbolic differentiation
*
* @brief A module dedicated to the implementation of functions and classes that relate
- * to automatic differentiation.
+ * to automatic and symbolic differentiation.
*
- * Below we provide a very brief introduction as to what automatic differentiation is,
- * what variations of this computational / numerical scheme exist, and how it is integrated
+ * Below we provide a very brief introduction as to what automatic and symbolic differentiation are,
+ * what variations of these computational / numerical schemes exist, and how they are integrated
* within deal.II's framework.
*
* @section auto_diff_1 Automatic differentiation
@@ -96,7 +96,7 @@
* that, with the appropriate implementation, both first and second derivatives can be computed exactly.
* -# With taped approaches, a specified subregion of code is selected as one for which all
* operations executed with active (marked) input variables are tracked and recorded in a data structure
- * referred to as a tape. At the end of the taped region, the recorded function(s) may be revaluated
+ * referred to as a tape. At the end of the taped region, the recorded function(s) may be reevaluated
* by "replaying" the tape with a different set of input variables instead of recomputing the function
* directly. Assuming that the taped region represents a smooth function, arbitrarily high-order
* derivatives of the function then can be computed by referring to the code path tracked and stored on
@@ -232,7 +232,7 @@
* graph is required.
*
* In reverse-mode, the chain-rule is computed somewhat unnaturally from the "outside in". The
- * values of the dependent variables first get computed and fixed, and then the preceeding
+ * values of the dependent variables first get computed and fixed, and then the preceding
* differential operations are evaluated and multiplied in succession with the previous results
* from left to right. Again, if we encapsulate and fix the order of operations using parentheses,
* this implies that the reverse calculation is performed by
@@ -450,9 +450,23 @@
*
* @subsubsection auto_diff_1_3 User interface to the automatic differentiation libraries
*
- * As of the current release, there is no formal, unified interface to the automatic
- * differentation libraries that we support. It is therefore necessary for users to
- * manage the initialization and derivative computations themselves.
+ * The deal.II library offers a unified interface to the automatic differentiation libraries that
+ * we support. To date, the helper classes have been developed for the following contexts:
+ *
+ * - Classes designed to operate at the quadrature point level (or any general continuum point):
+ * - ScalarFunction: Differentiation of a scalar-valued function. One typical use would be the
+ * the development of constitutive laws directly from a strain energy function.
+ * - VectorFunction: Differentiation of a vector-valued function. This could be used to
+ * linearize the kinematic variables of a constitutive law, or assist in solving
+ * the evolution equations of local internal variables.
+ * - Classes designed to operate at the cell level:
+ * - EnergyFunctional: Differentiation of a scalar-valued energy functional, such as might arise
+ * from variational formulations.
+ * - ResidualLinearization: Differentiation of a vector-valued finite element residual, leading to
+ * its consistent linearization.
+ *
+ * Naturally, it is also possible for users to manage the initialization and derivative
+ * computations themselves.
*
* The most up-to-date examples of how this is done using ADOL-C can be found in
* - their user manual ,
@@ -464,4 +478,10 @@
* - a code-gallery example , and
* - our test-suite .
*
+ *
+ * @section symb_diff_1 Symbolic differentiation
+ *
+ * TODO. As a temporary placeholder, here is a link to the Wikipedia article on
+ * symbolic differentiation and the
+ * SymPy library.
*/
diff --git a/doc/doxygen/headers/hp.h b/doc/doxygen/headers/hp.h
index 5fceed9e248c..b9c69531681f 100644
--- a/doc/doxygen/headers/hp.h
+++ b/doc/doxygen/headers/hp.h
@@ -89,6 +89,21 @@
* collection of mappings is used, the same holds for hp::MappingCollection
* objects as well.
*
+ * Whenever p adaptivity is considered in an hp finite element program,
+ * a hierarchy of finite elements needs to be established to determine
+ * succeeding finite elements for refinement and preceding ones for coarsening.
+ * Typically, this hierarchy considers how finite element spaces are nested:
+ * for example, a $Q_1$ element describes a sub-space of a $Q_2$ element,
+ * and so doing $p$ refinement usually means using a larger (more accurate)
+ * finite element space. In other words, the hierarchy of finite elements is built
+ * by considering whether some elements of the collection are sub- or
+ * super-spaces of others.
+ *
+ * By default, we assume that finite elements are stored in an ascending order
+ * based on their polynomial degree. If the order of elements differs,
+ * a corresponding hierarchy needs to be supplied to the collection via the
+ * hp::FECollection::set_hierarchy() member function.
+ *
* @ingroup hp
*/
diff --git a/doc/doxygen/headers/laoperators.h b/doc/doxygen/headers/laoperators.h
index 64d8bfe40fad..49d2a122bda0 100644
--- a/doc/doxygen/headers/laoperators.h
+++ b/doc/doxygen/headers/laoperators.h
@@ -89,6 +89,9 @@
* encapsulation of individual linear operators into blocked linear
* operator variants.
*
+ * The step-20 tutorial program has a detailed usage example of the
+ * LinearOperator class.
+ *
* @note As explained below, when using LinearOperator as res = op_a*x
* a PackagedOperation class instance is generated behind-the-curtains.
* Consequently, the user program has to include header files for both classes
@@ -175,6 +178,9 @@
* Vector residual = b - op_a * x; // computes the residual at this point
* @endcode
*
+ * The step-20 tutorial program has a detailed usage example of the
+ * PackagedOperation class.
+ *
*
* @ingroup LAC
* @ingroup MATRICES
diff --git a/doc/doxygen/headers/manifold.h b/doc/doxygen/headers/manifold.h
index 0eb3b05b48c5..84e04ef3795b 100644
--- a/doc/doxygen/headers/manifold.h
+++ b/doc/doxygen/headers/manifold.h
@@ -60,7 +60,7 @@
* in the Mapping class (see the @ref mapping module), which however obtains
* its information about the boundary of the domain from the classes
* described here. The same is, of course, true when integrating boundary
- * terms (e.g., inhomogenous Neumann boundary conditions).
+ * terms (e.g., inhomogeneous Neumann boundary conditions).
*
* Domains with nonzero codimension: In cases where a Triangulation is
* embedded into a higher dimensional space, i.e., whenever the second
diff --git a/doc/doxygen/images/dofinfo_get_dof_indices.png b/doc/doxygen/images/dofinfo_get_dof_indices.png
new file mode 100644
index 000000000000..8366cacc6330
Binary files /dev/null and b/doc/doxygen/images/dofinfo_get_dof_indices.png differ
diff --git a/doc/doxygen/options.dox.in b/doc/doxygen/options.dox.in
index 14fd142f2385..133ed43761fe 100644
--- a/doc/doxygen/options.dox.in
+++ b/doc/doxygen/options.dox.in
@@ -199,6 +199,8 @@ PREDEFINED = DOXYGEN=1 \
DEAL_II_WITH_SCALAPACK=1 \
DEAL_II_WITH_SLEPC=1 \
DEAL_II_WITH_SUNDIALS=1 \
+ DEAL_II_WITH_SYMENGINE=1 \
+ DEAL_II_SYMENGINE_WITH_LLVM=1 \
DEAL_II_WITH_THREADS=1 \
DEAL_II_WITH_TRILINOS=1 \
DEAL_II_TRILINOS_WITH_ROL=1 \
@@ -213,6 +215,8 @@ PREDEFINED = DOXYGEN=1 \
DEAL_II_ALWAYS_INLINE= \
__device__= \
DEAL_II_P4EST_VERSION_GTE=1 \
+ DEAL_II_SUNDIALS_VERSION_GTE=1 \
+ DEAL_II_SUNDIALS_VERSION_LT=0 \
DEAL_II_TRILINOS_VERSION_GTE=1
# do not expand exception declarations
diff --git a/doc/external-libs/cuda.html b/doc/external-libs/cuda.html
index 8ba33ae33a88..345865b6a903 100644
--- a/doc/external-libs/cuda.html
+++ b/doc/external-libs/cuda.html
@@ -39,8 +39,8 @@ Installing deal.II with CUDA
If you are using CUDA 9 or CUDA 10, you will need to turn off support for
C++17 similarly.
- By default, we assume that your GPU has compute capability 3.5 but you
- can easily set your own CUDA flags:
+ By default, we try to detect the compute capability of your device
+ but you can easily set your own CUDA flags:
-DDEAL_II_CUDA_FLAGS="-arch=sm_60"
diff --git a/doc/external-libs/ginkgo.html b/doc/external-libs/ginkgo.html
new file mode 100644
index 000000000000..d918e17f7357
--- /dev/null
+++ b/doc/external-libs/ginkgo.html
@@ -0,0 +1,96 @@
+
+
+
+ The deal.II Readme on interfacing to Ginkgo
+
+
+
+
+
+
+
+ Interfacing deal.II to Ginkgo
+
+
+ Ginkgo is
+ A numerical linear algebra software package that provides its users with
+ highly optimized fine grid level linear algebra operations. It currently has
+ the capability to solve on the CPU with the support of OpenMP and on the GPU
+ with NVIDIA's CUDA libraries. For some operations it uses NVIDIA's own libraries, CuSparse
+ and CuBLAS, but some routines also have self implemented CUDA functions.See the Ginkgo documentation
+ for more details.
+
+
+
+ deal.II has wrapper classes to the linear algebra
+ parts of Ginkgo that provide almost the
+ same interfaces as the built-in deal.II linear algebra operations.
+
+
+ Installing deal.II with Ginkgo
+
+
+ During the CMake configuration, the following flags should be specified:
+
+ -DDEAL_II_WITH_GINKGO=ON
, to enable the Ginkgo library.
+ -DGINKGO_DIR=
, to specify the path where Ginkgo has been installed.
+
+
+
+ Installing Ginkgo
+
+
+ Installing Ginkgo is quite simple. Ginkgo currently uses CMake and hence one can use
+ the following commands to easily install Ginkgo. For different dependencies and compatible
+ libraries, please refer to Ginkgo .
+ The different flags that can be added are:
+
+ -DBUILD_REFERENCE={ON,OFF}
: Builds the reference single thread implementation
+ used to check the more sophisticated implementations of the GPU/CPU.
+ -DBUILD_CUDA={ON,OFF}
: Builds the GPU implementation, specifically the NVIDIA
+ CUDA implementations. This needs CUDA to be installed on the machine.
+ -DBUILD_OMP={ON,OFF}
: Builds the multithreaded OpenMP implementations.
+
+ To install Ginkgo, you would need to:
+
+ git clone https://github.com/ginkgo-project/ginkgo.git
+ mkdir build; cd build
+ cmake -DBUILD_REFERENCE=on/off -DBUILD_CUDA=on/off -DBUILD_OMP=on/off -DCMAKE_INSTALL_PREFIX=/path/to/install/ -DCMAKE_BUILD_TYPE=Debug
+ make install
+
+
+
+ Running the Ginkgo tests
+
+
+ Ginkgo uses gtests
to run unit tests. The reference implementations are
+ non-optimized versions to which the OpenMP and CUDA versions are compared with for
+ correctness.
+
+ The following command, executed in Ginkgo's build directory, launches its test suite.
+
+ make test
+
+ The output should contain several lines of the form:
+
+ Start 1: path/to/test
+ 1/13 Test #1: path/to/test ............................. Passed 0.01 sec
+
+ To run only a specific test for more details, from the build directory, run
+
+ ./path/to/test
+
+
+
+
+
+
+
diff --git a/doc/external-libs/petsc.html b/doc/external-libs/petsc.html
index 68a2ac02dbd4..c665079d1764 100644
--- a/doc/external-libs/petsc.html
+++ b/doc/external-libs/petsc.html
@@ -37,7 +37,7 @@ Installing deal.II with PETSc
Note: The most recent version of PETSc
that has been reported to be compatible with
- deal.II is version 3.9.0. If you use a later
+ deal.II is version 3.11.0. If you use a later
version than this and encounter problems, let us
know. deal.II does not support versions of PETSc prior
to 3.3.0.
diff --git a/doc/external-libs/trilinos.html b/doc/external-libs/trilinos.html
index 9579c9a62a0a..09e13e35a04e 100644
--- a/doc/external-libs/trilinos.html
+++ b/doc/external-libs/trilinos.html
@@ -67,6 +67,7 @@
Installing Trilinos
ROL (optional),
Sacado (optional),
Teuchos,
+ Tpetra (optional),
Zoltan (optional).
@@ -92,6 +93,9 @@ Installing Trilinos
-DTrilinos_ENABLE_MueLu=ON \
-DTrilinos_ENABLE_ML=ON \
-DTrilinos_ENABLE_ROL=ON \
+ -DTrilinos_ENABLE_Tpetra=ON \
+ -DTpetra_INST_COMPLEX_DOUBLE=ON \
+ -DTpetra_INST_COMPLEX_FLOAT=ON \
-DTrilinos_ENABLE_Zoltan=ON \
-DTrilinos_VERBOSE_CONFIGURE=OFF \
-DTPL_ENABLE_MPI=ON \
diff --git a/doc/index.html b/doc/index.html
index 98b547352979..b720f2cbd532 100644
--- a/doc/index.html
+++ b/doc/index.html
@@ -18,19 +18,83 @@
-
-
-
-
-
-
-
- The deal.II Documentation Page
- Your browser does not seem to understand frames. A version of this
- page that does not use frames can be found at
- documentation.html .
-
-
+
+
+
+
+
+ Information for starters
+
+
+
+
+
+
+ Information for advanced users
+
+
+
+
+
+
+
+
+
+ Information for developers
+
+
+
+
+
+
+ Further resources
+
+
+
+
+
+