From 4d0f0d4ad1a5a7193c73bff0e910f3833fcd450a Mon Sep 17 00:00:00 2001 From: Elias Pipping <elias.pipping@fu-berlin.de> Date: Mon, 30 Jul 2012 10:13:11 +0200 Subject: [PATCH] New test: parabola --- src/Makefile.am | 1 + src/test-gradient-method-nicefunction.hh | 14 +++++ src/test-gradient-parabola.cc | 66 ++++++++++++++++++++++++ 3 files changed, 81 insertions(+) create mode 100644 src/test-gradient-parabola.cc diff --git a/src/Makefile.am b/src/Makefile.am index 299b4e70..49700b93 100644 --- a/src/Makefile.am +++ b/src/Makefile.am @@ -2,6 +2,7 @@ check_PROGRAMS = \ test-gradient-horrible \ test-gradient-horrible-logarithmic \ test-gradient-identity \ + test-gradient-parabola \ test-gradient-sample \ test-gradient-sample-3d \ test-gradient-sample-nonsmooth \ diff --git a/src/test-gradient-method-nicefunction.hh b/src/test-gradient-method-nicefunction.hh index eb62eff3..7ef9e642 100644 --- a/src/test-gradient-method-nicefunction.hh +++ b/src/test-gradient-method-nicefunction.hh @@ -18,6 +18,20 @@ class MyFunction : public NiceFunction { } }; +class Parabola : public MyFunction { + void virtual evaluate(double const &x, double &y) const { y = x * x; } + + double virtual leftDifferential(double s) const { return 2 * s; } + + double virtual rightDifferential(double s) const { return 2 * s; } + + double virtual second_deriv(double s) const { return 2; } + + double virtual regularity(double s) const { return 2; } + + bool virtual smoothesNorm() const { return true; } +}; + class LinearFunction : public MyFunction { public: LinearFunction(double a) : coefficient(a) {} diff --git a/src/test-gradient-parabola.cc b/src/test-gradient-parabola.cc new file mode 100644 index 00000000..03b396fd --- /dev/null +++ b/src/test-gradient-parabola.cc @@ -0,0 +1,66 @@ +/* Checks if the descent direction is computed correctly using the + analytic solution; also checks if the algorithm converges + to the right solution regardless of where it starts */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include <cassert> + +#include <dune/common/shared_ptr.hh> + +#include <dune/tectonic/samplefunctional.hh> + +#include "test-gradient-method-helper.hh" +#include "test-gradient-method-nicefunction.hh" + +int main() { + int const dim = 2; + typedef Dune::SampleFunctional<dim> Functional; + + Functional::SmallMatrix A; + A[0][0] = 3; + A[0][1] = A[1][0] = 1.5; + A[1][1] = 4; + Functional::SmallVector b; + b[0] = 1; + b[1] = 2; + + // |x|^2 as the nonlinearity is the same as having no nonlinearity + // but twice the identity matrix added to A. In other words, we're + // solving A + 2*id = b + auto f = Dune::make_shared<Dune::Parabola const>(); + auto phi = Dune::make_shared<Functional::NonlinearityType const>(f); + Functional J(A, b, phi); + + Functional::SmallVector solution; // Analytic solution + solution[0] = 4.0 / 37.0; + solution[1] = 34.0 / 111.0; + + Functional::SmallMatrix M = A; + M[0][0] += 2; + M[1][1] += 2; + + Functional::SmallVector start = b; + start *= 17; + + Functional::SmallVector analytic_descent = b; + M.mmv(start, analytic_descent); + + Functional::SmallVector numerical_descent; + J.descentDirection(start, numerical_descent); + assert(two_distance<dim>(numerical_descent, analytic_descent) < 1e-10); + + double const ret1 = functionTester(J, start, 6); + assert(two_distance<dim>(start, solution) < 1e-6); + + // Something random + start[0] = 279; + start[1] = -96; + + double const ret2 = functionTester(J, start, 15); + assert(two_distance<dim>(start, solution) < 1e-6); + + assert(std::abs(ret1 - ret2) < 1e-11); +} -- GitLab