Skip to content
Snippets Groups Projects
Commit f983033e authored by Elias Pipping's avatar Elias Pipping Committed by Elias Pipping
Browse files

Move SampleFunction to separate header file

parent cb619c10
No related branches found
No related tags found
No related merge requests found
......@@ -7,7 +7,8 @@ noinst_PROGRAMS = \
bisection_example_new_SOURCES = \
bisection-example-new.cc \
mynonlinearity.cc \
properscalarincreasingconvexfunction.hh
properscalarincreasingconvexfunction.hh \
samplefunctional.hh
check-am:
./bisection-example-new
......
......@@ -5,142 +5,8 @@
#endif
#include <dune/common/exceptions.hh>
#include <dune/common/stdstreams.hh>
#include <dune/tnnmg/problem-classes/bisection.hh>
#include <dune/tnnmg/problem-classes/directionalconvexfunction.hh>
#include "mynonlinearity.cc"
#include "properscalarincreasingconvexfunction.hh"
template <int dimension, class Function = TrivialFunction>
class SampleFunctional {
public:
typedef Dune::FieldVector<double, dimension> SmallVector;
typedef Dune::FieldMatrix<double, dimension, dimension> SmallMatrix;
typedef Function FunctionType;
SampleFunctional(SmallMatrix _A, SmallVector _b) : A(_A), b(_b), func_() {}
double operator()(const SmallVector v) const {
SmallVector y;
A.mv(v, y); // y = Av
y /= 2; // y = 1/2 Av
y -= b; // y = 1/2 Av - b
return y * v + func_(v.two_norm()); // <1/2 Av - b,v> + H(|v|)
}
SmallVector descentDirection(const SmallVector x) const {
if (x == SmallVector(0.0)) {
SmallVector d = SmoothGrad(x);
// Decline of the smooth part in the negative gradient direction
double smoothDecline = -(d * d);
double nonlinearDecline =
func_.rightDifferential(0.0) * d.two_norm(); // TODO: is this correct?
double combinedDecline = smoothDecline + nonlinearDecline;
return (combinedDecline < 0) ? d : SmallVector(0.0);
}
SmallVector const pg = PlusGrad(x);
SmallVector const mg = MinusGrad(x);
SmallVector ret;
// TODO: collinearity checks suck
if (pg * x == pg.two_norm() * x.two_norm() &&
-(mg * x) == mg.two_norm() * x.two_norm()) {
return SmallVector(0);
} else if (pg * x >= 0 && mg * x >= 0) {
ret = pg;
} else if (pg * x <= 0 && mg * x <= 0) {
ret = mg;
} else {
ret = project(SmoothGrad(x), x);
}
ret *= -1;
return ret;
}
SmallMatrix A;
SmallVector b;
private:
Function func_;
// Gradient of the smooth part
SmallVector SmoothGrad(const SmallVector x) const {
SmallVector y;
A.mv(x, y); // y = Av
y -= b; // y = Av - b
return y;
}
SmallVector PlusGrad(const SmallVector x) const {
SmallVector y = SmoothGrad(x);
y.axpy(func_.rightDifferential(x.two_norm()) / x.two_norm(), x);
return y;
}
SmallVector MinusGrad(const SmallVector x) const {
SmallVector y = SmoothGrad(x);
y.axpy(func_.leftDifferential(x.two_norm()) / x.two_norm(), x);
return y;
}
// No normalising is done!
SmallVector project(const SmallVector z, const SmallVector x) const {
SmallVector y = z;
y.axpy(-(z * x) / x.two_norm2(), x);
return y;
}
};
template <class Functional>
void minimise(const Functional J, const typename Functional::SmallVector x,
typename Functional::SmallVector &corr) {
typedef typename Functional::SmallVector SmallVector;
SmallVector descDir = J.descentDirection(x);
if (descDir == SmallVector(0.0)) {
corr = SmallVector(0.0);
return;
}
// {{{ Construct a restriction of J to the line x + t * descDir
/* We have
1/2 <A(u+xv),u+xv>-<b,u+xv> = 1/2 <Av,v> x^2 - <b-Au,v> x + <1/2 Au-b,u>
since A is symmetric.
*/
SmallVector tmp;
J.A.mv(descDir, tmp); // Av
double const JRestA = tmp * descDir; // <Av,v>
J.A.mv(x, tmp); // Au
double const JRestb = (J.b - tmp) * descDir; // <b-Au,v>
typedef MyNonlinearity<SmallVector::dimension,
typename Functional::FunctionType> MyNonlinearityType;
MyNonlinearityType phi;
typedef DirectionalConvexFunction<MyNonlinearityType>
MyDirectionalConvexFunctionType;
MyDirectionalConvexFunctionType JRest(JRestA, JRestb, phi, x, descDir);
// }}}
// FIXME: default values are used
Bisection bisection;
int count;
// FIXME: does x_old = 1 make any sense?!
double const stepsize = bisection.minimize(JRest, 0.0, 1.0, count);
Dune::dverb << "Number of iterations in the bisection method: " << count
<< std::endl;
;
corr = descDir;
corr *= stepsize;
}
#include "samplefunctional.hh"
template <int dim, class Function>
void functionTester(
......
/* -*- mode:c++; mode:semantic -*- */
#ifndef SAMPLE_FUNCTIONAL_HH
#define SAMPLE_FUNCTIONAL_HH
#include <dune/common/stdstreams.hh>
#include <dune/tnnmg/problem-classes/bisection.hh>
#include <dune/tnnmg/problem-classes/directionalconvexfunction.hh>
#include "mynonlinearity.cc"
#include "properscalarincreasingconvexfunction.hh"
template <int dimension, class Function = TrivialFunction>
class SampleFunctional {
public:
typedef Dune::FieldVector<double, dimension> SmallVector;
typedef Dune::FieldMatrix<double, dimension, dimension> SmallMatrix;
typedef Function FunctionType;
SampleFunctional(SmallMatrix _A, SmallVector _b) : A(_A), b(_b), func_() {}
double operator()(const SmallVector v) const {
SmallVector y;
A.mv(v, y); // y = Av
y /= 2; // y = 1/2 Av
y -= b; // y = 1/2 Av - b
return y * v + func_(v.two_norm()); // <1/2 Av - b,v> + H(|v|)
}
SmallVector descentDirection(const SmallVector x) const {
if (x == SmallVector(0.0)) {
SmallVector d = SmoothGrad(x);
// Decline of the smooth part in the negative gradient direction
double smoothDecline = -(d * d);
double nonlinearDecline =
func_.rightDifferential(0.0) * d.two_norm(); // TODO: is this correct?
double combinedDecline = smoothDecline + nonlinearDecline;
return (combinedDecline < 0) ? d : SmallVector(0.0);
}
SmallVector const pg = PlusGrad(x);
SmallVector const mg = MinusGrad(x);
SmallVector ret;
// TODO: collinearity checks suck
if (pg * x == pg.two_norm() * x.two_norm() &&
-(mg * x) == mg.two_norm() * x.two_norm()) {
return SmallVector(0);
} else if (pg * x >= 0 && mg * x >= 0) {
ret = pg;
} else if (pg * x <= 0 && mg * x <= 0) {
ret = mg;
} else {
ret = project(SmoothGrad(x), x);
}
ret *= -1;
return ret;
}
SmallMatrix A;
SmallVector b;
private:
Function func_;
// Gradient of the smooth part
SmallVector SmoothGrad(const SmallVector x) const {
SmallVector y;
A.mv(x, y); // y = Av
y -= b; // y = Av - b
return y;
}
SmallVector PlusGrad(const SmallVector x) const {
SmallVector y = SmoothGrad(x);
y.axpy(func_.rightDifferential(x.two_norm()) / x.two_norm(), x);
return y;
}
SmallVector MinusGrad(const SmallVector x) const {
SmallVector y = SmoothGrad(x);
y.axpy(func_.leftDifferential(x.two_norm()) / x.two_norm(), x);
return y;
}
// No normalising is done!
SmallVector project(const SmallVector z, const SmallVector x) const {
SmallVector y = z;
y.axpy(-(z * x) / x.two_norm2(), x);
return y;
}
};
template <class Functional>
void minimise(const Functional J, const typename Functional::SmallVector x,
typename Functional::SmallVector &corr) {
typedef typename Functional::SmallVector SmallVector;
SmallVector descDir = J.descentDirection(x);
if (descDir == SmallVector(0.0)) {
corr = SmallVector(0.0);
return;
}
// {{{ Construct a restriction of J to the line x + t * descDir
/* We have
1/2 <A(u+xv),u+xv>-<b,u+xv> = 1/2 <Av,v> x^2 - <b-Au,v> x + <1/2 Au-b,u>
since A is symmetric.
*/
SmallVector tmp;
J.A.mv(descDir, tmp); // Av
double const JRestA = tmp * descDir; // <Av,v>
J.A.mv(x, tmp); // Au
double const JRestb = (J.b - tmp) * descDir; // <b-Au,v>
typedef MyNonlinearity<SmallVector::dimension,
typename Functional::FunctionType> MyNonlinearityType;
MyNonlinearityType phi;
typedef DirectionalConvexFunction<MyNonlinearityType>
MyDirectionalConvexFunctionType;
MyDirectionalConvexFunctionType JRest(JRestA, JRestb, phi, x, descDir);
// }}}
// FIXME: default values are used
Bisection bisection;
int count;
// FIXME: does x_old = 1 make any sense?!
double const stepsize = bisection.minimize(JRest, 0.0, 1.0, count);
Dune::dverb << "Number of iterations in the bisection method: " << count
<< std::endl;
;
corr = descDir;
corr *= stepsize;
}
#endif
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment