From dd6d89623492d350391744a877b5d4484888acff Mon Sep 17 00:00:00 2001 From: Elias Pipping <elias.pipping@fu-berlin.de> Date: Fri, 9 Sep 2011 17:20:18 +0200 Subject: [PATCH] Use DirectionalConvexFunction<MyNonlinearityType> where MyNonlinearityType is MyNonlinearity<dimension, Function> In doing this, * Make directionalDerivative() assume |dir| = 1 and * Kill pseudoDirectionalDerivative --- src/bisection-example-new.cc | 110 +++++++++++++++-------------------- 1 file changed, 47 insertions(+), 63 deletions(-) diff --git a/src/bisection-example-new.cc b/src/bisection-example-new.cc index ee6e3cb6..2215aea3 100644 --- a/src/bisection-example-new.cc +++ b/src/bisection-example-new.cc @@ -38,52 +38,44 @@ class SampleFunctional { return y * v + func_(v.two_norm()); // <1/2 Av - b,v> + H(|v|) } + // Assumes |dir| = 1. double directionalDerivative(const SmallVector x, const SmallVector dir) const { - double norm = dir.two_norm(); + assert(abs(dir.two_norm() - 1) < 1e-8); - if (norm == 0) - DUNE_THROW(Dune::Exception, "Directional derivatives cannot be computed " - "w.r.t. the zero-direction."); - - SmallVector tmp = dir; - tmp /= norm; + if (x == SmallVector(0.0)) + return func_.rightDifferential(0); - return pseudoDirectionalDerivative(x, tmp); + return (x * dir > 0) ? PlusGrad(x) * dir : MinusGrad(x) * dir; } SmallVector minimise(const SmallVector x, unsigned int iterations) const { SmallVector descDir = ModifiedGradient(x); + if (descDir == SmallVector(0.0)) + return SmallVector(0.0); + /* We have 1/2 <A(u+xv),u+xv>-<b,u+xv> = 1/2 <Av,v> x^2 - <b-Au,v> x + <1/2 Au-b,u> since A is symmetric. */ + SmallVector tmp2; + A_.mv(descDir, tmp2); + double const rest_A = tmp2 * descDir; - { - SmallVector tmp2; - A_.mv(descDir, tmp2); - double const rest_A = tmp2 * descDir; + SmallVector tmp3; + A_.mv(x, tmp3); + double const rest_b = (b_ - tmp3) * descDir; - SmallVector tmp3; - A_.mv(x, tmp3); - double const rest_b = (b_ - tmp3) * descDir; + typedef MyNonlinearity<dimension, Function> MyNonlinearityType; + MyNonlinearityType phi; + typedef DirectionalConvexFunction<MyNonlinearityType> + MyDirectionalConvexFunctionType; + MyDirectionalConvexFunctionType rest(rest_A, rest_b, phi, x, descDir); - typedef MyNonlinearity<dimension, SampleFunction> MyNonlinearityType; - MyNonlinearityType phi; - typedef DirectionalConvexFunction<MyNonlinearityType> - MyDirectionalConvexFunctionType; - MyDirectionalConvexFunctionType rest(rest_A, rest_b, phi, x, descDir); - - // Experiment a bit - Interval<double> D; - rest.subDiff(0, D); - } - - if (descDir == SmallVector(0.0)) - return SmallVector(0.0); + Interval<double> D; // { Debugging Dune::dverb << "Starting at x with J(x) = " << operator()(x) << std::endl; @@ -95,11 +87,9 @@ class SampleFunctional { double l = 0; double r = 1; - SmallVector tmp; while (true) { - tmp = x; - tmp.axpy(r, descDir); - if (pseudoDirectionalDerivative(tmp, descDir) >= 0) + rest.subDiff(r, D); + if (D[1] >= 0) break; l = r; @@ -118,26 +108,26 @@ class SampleFunctional { } double m = l / 2 + r / 2; - SmallVector middle = SmallVector(0.0); for (unsigned int count = 0; count < iterations; ++count) { - Dune::dverb << "now at m = " << m << std::endl; - Dune::dverb << "Value of J here: " << operator()(x + middle) << std::endl; - - middle = descDir; - middle *= m; - - double pseudoDerivative = - pseudoDirectionalDerivative(x + middle, descDir); - - if (pseudoDerivative < 0) { + { // Debugging + Dune::dverb << "now at m = " << m << std::endl; + SmallVector tmp = x; + tmp.axpy(m, descDir); + Dune::dverb << "Value of J here: " << operator()(tmp) << std::endl; + } + + rest.subDiff(m, D); + if (D[1] < 0) { l = m; m = (m + r) / 2; - } else if (pseudoDerivative > 0) { + } else if (D[1] > 0) { r = m; m = (l + m) / 2; } else break; } + SmallVector middle = descDir; + middle *= m; return middle; } @@ -167,20 +157,6 @@ class SampleFunctional { return y; } - // |dir|-times the directional derivative wrt dir/|dir|. If only - // the sign of the directionalDerivative matters, this saves the - // cost of normalising. - double pseudoDirectionalDerivative(const SmallVector x, - const SmallVector dir) const { - if (x == SmallVector(0.0)) - return func_.rightDifferential(0) * dir.two_norm(); - - if (x * dir > 0) - return PlusGrad(x) * dir; - else - return MinusGrad(x) * dir; - } - SmallVector ModifiedGradient(const SmallVector x) const { if (x == SmallVector(0.0)) { SmallVector d = SmoothGrad(x); @@ -234,14 +210,20 @@ void testSampleFunction() { SampleFunctional J(A, b); - std::cout << J.directionalDerivative(b, b) << std::endl; - assert(J.directionalDerivative(b, b) == 2 * sqrt(5) + 2); + SampleFunctional::SmallVector c = b; + c /= c.two_norm(); + + assert(J.directionalDerivative(b, c) == 2 * sqrt(5) + 2); SampleFunctional::SmallVector start = b; start *= 17; SampleFunctional::SmallVector correction = J.minimise(start, 20); assert(J(start + correction) <= J(start)); - assert(std::abs(J(start + correction) + 0.254644) < 1e-8); + + start += correction; + correction = J.minimise(start, 20); + + assert(std::abs(J(start + correction) + 0.254631) < 1e-6); std::cout << "Arrived at J(...) = " << J(start + correction) << std::endl; } @@ -260,8 +242,10 @@ void testTrivialFunction() { SampleFunctional J(A, b); - std::cout << J.directionalDerivative(b, b) << std::endl; - assert(J.directionalDerivative(b, b) == 2 * sqrt(5)); + SampleFunctional::SmallVector c = b; + c /= c.two_norm(); + + assert(J.directionalDerivative(b, c) == 2 * sqrt(5)); SampleFunctional::SmallVector start = b; start *= 17; -- GitLab