Skip to content
Snippets Groups Projects
Commit 2e1d937b authored by Elias Pipping's avatar Elias Pipping Committed by Elias Pipping
Browse files

Handle case x = 0 in the modified gradient method

parent 53cb7ff4
No related branches found
No related tags found
No related merge requests found
...@@ -49,6 +49,8 @@ class SampleFunctional { ...@@ -49,6 +49,8 @@ class SampleFunctional {
SmallVector minimise(const SmallVector x, unsigned int iterations) const { SmallVector minimise(const SmallVector x, unsigned int iterations) const {
SmallVector descDir = ModifiedGradient(x); SmallVector descDir = ModifiedGradient(x);
if (descDir == SmallVector(0.0))
return SmallVector(0.0);
Dune::dverb << "Starting at x with J(x) = " << operator()(x) << std::endl; Dune::dverb << "Starting at x with J(x) = " << operator()(x) << std::endl;
Dune::dverb << "Minimizing in direction w with dJ(x,w) = " Dune::dverb << "Minimizing in direction w with dJ(x,w) = "
...@@ -129,9 +131,16 @@ class SampleFunctional { ...@@ -129,9 +131,16 @@ class SampleFunctional {
} }
SmallVector ModifiedGradient(const SmallVector x) const { SmallVector ModifiedGradient(const SmallVector x) const {
if (x == SmallVector(0.0)) if (x == SmallVector(0.0)) {
// TODO SmallVector d = SmoothGrad(x);
DUNE_THROW(Dune::Exception, "The case x = 0 is not yet handled."); // Decline of the smooth part in the negative gradient direction
double smoothDecline = -(d * d);
double nonlinearDecline =
func_.rightDifferential(0.0) * d.two_norm(); // TODO: is this correct?
double combinedDecline = smoothDecline + nonlinearDecline;
return (combinedDecline < 0) ? d : SmallVector(0.0);
}
SmallVector const pg = PlusGrad(x); SmallVector const pg = PlusGrad(x);
SmallVector const mg = MinusGrad(x); SmallVector const mg = MinusGrad(x);
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment