Skip to content
Snippets Groups Projects
Forked from agnumpde / dune-tectonic
1462 commits behind the upstream repository.
Code owners
Assign users and groups as approvers for specific file changes. Learn more.
samplefunctional.hh 5.17 KiB
/* -*- mode:c++; mode:semantic -*- */

#ifndef SAMPLE_FUNCTIONAL_HH
#define SAMPLE_FUNCTIONAL_HH

#include <dune/common/stdstreams.hh>

#include <dune/fufem/interval.hh>

#include <dune/tnnmg/problem-classes/bisection.hh>
#include <dune/tnnmg/problem-classes/directionalconvexfunction.hh>

#include "mynonlinearity.hh"
#include "nicefunction.hh"

namespace Dune {
template <int dimension, class Function = TrivialFunction>
class SampleFunctional {
public:
  typedef Dune::FieldVector<double, dimension> SmallVector;
  typedef Dune::FieldMatrix<double, dimension, dimension> SmallMatrix;

  typedef MyNonlinearity<dimension, Function> NonlinearityType;

  SampleFunctional(SmallMatrix _A, SmallVector _b) : A(_A), b(_b) {}

  double operator()(const SmallVector v) const {
    SmallVector y;
    A.mv(v, y);            // y = Av
    y /= 2;                // y = 1/2 Av
    y -= b;                // y = 1/2 Av - b
    return y * v + phi(v); // <1/2 Av - b,v> + H(|v|)
  }

  void descentDirection(const SmallVector x, SmallVector &ret) const {
    if (x == SmallVector(0.0)) {
      // If there is a direction of descent, this is it
      SmallVector const d = smoothGradient(x);

      Interval<double> D;
      phi.directionalSubDiff(x, d, D);
      double const nonlinearDecline = D[1];
      double const smoothDecline = -(d * d);
      double const combinedDecline = smoothDecline + nonlinearDecline;

      if (combinedDecline < 0) {
        ret = d;
        ret *= -1;
      } else {
        ret = SmallVector(0.0);
      }
      return;
    }

    SmallVector const pg = upperGradient(x);
    SmallVector const mg = lowerGradient(x);
    double const pgx = pg * x;
    double const mgx = mg * x;
    // TODO: collinearity checks suck
    if (pgx == pg.two_norm() * x.two_norm() &&
        -mgx == mg.two_norm() * x.two_norm()) {
      ret = SmallVector(0.0);
      return;
    } else if (pgx >= 0 && mgx >= 0) {
      ret = pg;
      Dune::dverb << "## Directional derivative (as per scalar product w/ "
                     "semigradient): " << -(ret * mg)
                  << " (coordinates of the restriction)" << std::endl;
    } else if (pgx <= 0 && mgx <= 0) {
      ret = mg;
      Dune::dverb << "## Directional derivative (as per scalar product w/ "
                     "semigradient): " << -(ret * pg)
                  << " (coordinates of the restriction)" << std::endl;
    } else {
      ret = project(smoothGradient(x), x);
      Dune::dverb << "## Directional derivative (as per scalar product w/ "
                     "semigradient): " << -(ret * ret)
                  << " (coordinates of the restriction)" << std::endl;
    }
    ret *= -1;
  }

  SmallMatrix A;
  SmallVector b;
  NonlinearityType phi;

private:
  // Gradient of the smooth part
  SmallVector smoothGradient(const SmallVector x) const {
    SmallVector y;
    A.mv(x, y); // y = Av
    y -= b;     // y = Av - b
    return y;
  }

  SmallVector upperGradient(const SmallVector x) const {
    SmallVector y;
    phi.upperGradient(x, y);
    y += smoothGradient(x);
    return y;
  }

  SmallVector lowerGradient(const SmallVector x) const {
    SmallVector y;
    phi.lowerGradient(x, y);
    y += smoothGradient(x);
    return y;
  }

  // No normalising is done!
  SmallVector project(const SmallVector z, const SmallVector x) const {
    SmallVector y = z;
    y.axpy(-(z * x) / x.two_norm2(), x);
    return y;
  }
};

template <class Functional>
void minimise(const Functional J, const typename Functional::SmallVector x,
              typename Functional::SmallVector &corr) {
  typedef typename Functional::SmallVector SmallVector;
  SmallVector descDir;
  J.descentDirection(x, descDir);

  if (descDir == SmallVector(0.0)) {
    corr = SmallVector(0.0);
    return;
  }

  // {{{ Construct a restriction of J to the line x + t * descDir

  /* We have

     1/2 <A(u+xv),u+xv>-<b,u+xv> = 1/2 <Av,v> x^2 - <b-Au,v> x + <1/2 Au-b,u>

     since A is symmetric.
  */
  SmallVector tmp;

  J.A.mv(descDir, tmp);                // Av
  double const JRestA = tmp * descDir; // <Av,v>

  J.A.mv(x, tmp);                              // Au
  double const JRestb = (J.b - tmp) * descDir; // <b-Au,v>

  typedef typename Functional::NonlinearityType MyNonlinearityType;
  MyNonlinearityType phi = J.phi;
  typedef DirectionalConvexFunction<MyNonlinearityType>
  MyDirectionalConvexFunctionType;
  // FIXME: We cannot pass J.phi directly because the constructor
  // does not allow for constant arguments
  MyDirectionalConvexFunctionType JRest(JRestA, JRestb, phi, x, descDir);
  // }}}

  { // Debug
    Interval<double> D;
    JRest.subDiff(0, D);
    Dune::dverb
        << "## Directional derivative (as per subdifferential of restriction): "
        << D[1] << " (coordinates of the restriction)" << std::endl;
    assert(D[1] <=
           0); // We should not be minimising in this direction otherwise
  }

  // FIXME: default values are used
  Bisection bisection;
  int count;
  // FIXME: The value of x_old should not matter if the factor is 1.0, correct?
  double const stepsize = bisection.minimize(JRest, 0.0, 1.0, count);
  Dune::dverb << "Number of iterations in the bisection method: " << count
              << std::endl;
  ;

  corr = descDir;
  corr *= stepsize;
}
}
#endif