Commit 1d61f88b authored by oliver.sander_at_tu-dresden.de's avatar oliver.sander_at_tu-dresden.de
Browse files

Use Functional::VectorType only if USE_OLD_TNNMG is set

Otherwise use Functional::Vector
parent 9ad49eff
Pipeline #28051 passed with stage
in 8 minutes and 42 seconds
...@@ -76,7 +76,11 @@ void testConvexity(const Functional& functional, ...@@ -76,7 +76,11 @@ void testConvexity(const Functional& functional,
*/ */
template <class Functional> template <class Functional>
void testHomogeneity(const Functional& functional, void testHomogeneity(const Functional& functional,
#ifdef USE_OLD_TNNMG
const std::vector<typename Functional::VectorType>& testDirections) const std::vector<typename Functional::VectorType>& testDirections)
#else
const std::vector<typename Functional::Vector>& testDirections)
#endif
{ {
for (auto&& testDirection : testDirections) for (auto&& testDirection : testDirections)
{ {
...@@ -105,12 +109,20 @@ void testHomogeneity(const Functional& functional, ...@@ -105,12 +109,20 @@ void testHomogeneity(const Functional& functional,
*/ */
template <class Functional> template <class Functional>
void testGradient(Functional& functional, void testGradient(Functional& functional,
#ifdef USE_OLD_TNNMG
const std::vector<typename Functional::VectorType>& testPoints) const std::vector<typename Functional::VectorType>& testPoints)
#else
const std::vector<typename Functional::Vector>& testPoints)
#endif
{ {
for (auto&& testPoint : testPoints) for (auto&& testPoint : testPoints)
{ {
// Get the gradient at the current test point as computed by 'functional' // Get the gradient at the current test point as computed by 'functional'
#ifdef USE_OLD_TNNMG
typename Functional::VectorType gradient(testPoint.size()); typename Functional::VectorType gradient(testPoint.size());
#else
typename Functional::Vector gradient(testPoint.size());
#endif
gradient = 0; gradient = 0;
functional.addGradient(testPoint, gradient); functional.addGradient(testPoint, gradient);
...@@ -158,7 +170,11 @@ void testGradient(Functional& functional, ...@@ -158,7 +170,11 @@ void testGradient(Functional& functional,
*/ */
template <class Functional> template <class Functional>
void testHessian(Functional& functional, void testHessian(Functional& functional,
#ifdef USE_OLD_TNNMG
const std::vector<typename Functional::VectorType>& testPoints) const std::vector<typename Functional::VectorType>& testPoints)
#else
const std::vector<typename Functional::Vector>& testPoints)
#endif
{ {
for (auto&& testPoint : testPoints) for (auto&& testPoint : testPoints)
{ {
...@@ -251,7 +267,11 @@ void testHessian(Functional& functional, ...@@ -251,7 +267,11 @@ void testHessian(Functional& functional,
*/ */
template <class Functional> template <class Functional>
void testDirectionalSubdifferential(const Functional& functional, void testDirectionalSubdifferential(const Functional& functional,
#ifdef USE_OLD_TNNMG
const std::vector<typename Functional::VectorType>& testPoints) const std::vector<typename Functional::VectorType>& testPoints)
#else
const std::vector<typename Functional::Vector>& testPoints)
#endif
{ {
// Step size. Best value: square root of the machine precision // Step size. Best value: square root of the machine precision
const double eps = std::sqrt(std::numeric_limits<double>::epsilon()); const double eps = std::sqrt(std::numeric_limits<double>::epsilon());
...@@ -300,7 +320,11 @@ void testDirectionalSubdifferential(const Functional& functional, ...@@ -300,7 +320,11 @@ void testDirectionalSubdifferential(const Functional& functional,
*/ */
template <class Functional> template <class Functional>
void testSubDiff(Functional& functional, void testSubDiff(Functional& functional,
#ifdef USE_OLD_TNNMG
const std::vector<typename Functional::VectorType>& testPoints) const std::vector<typename Functional::VectorType>& testPoints)
#else
const std::vector<typename Functional::Vector>& testPoints)
#endif
{ {
for (auto&& testPoint : testPoints) for (auto&& testPoint : testPoints)
{ {
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment