Skip to content
Snippets Groups Projects
Commit dd6d8962 authored by Elias Pipping's avatar Elias Pipping Committed by Elias Pipping
Browse files

Use DirectionalConvexFunction<MyNonlinearityType>

where MyNonlinearityType is MyNonlinearity<dimension, Function>

In doing this,
 * Make directionalDerivative() assume |dir| = 1 and
 * Kill pseudoDirectionalDerivative
parent 4f2d3647
No related branches found
No related tags found
No related merge requests found
...@@ -38,52 +38,44 @@ class SampleFunctional { ...@@ -38,52 +38,44 @@ class SampleFunctional {
return y * v + func_(v.two_norm()); // <1/2 Av - b,v> + H(|v|) return y * v + func_(v.two_norm()); // <1/2 Av - b,v> + H(|v|)
} }
// Assumes |dir| = 1.
double directionalDerivative(const SmallVector x, double directionalDerivative(const SmallVector x,
const SmallVector dir) const { const SmallVector dir) const {
double norm = dir.two_norm(); assert(abs(dir.two_norm() - 1) < 1e-8);
if (norm == 0) if (x == SmallVector(0.0))
DUNE_THROW(Dune::Exception, "Directional derivatives cannot be computed " return func_.rightDifferential(0);
"w.r.t. the zero-direction.");
SmallVector tmp = dir;
tmp /= norm;
return pseudoDirectionalDerivative(x, tmp); return (x * dir > 0) ? PlusGrad(x) * dir : MinusGrad(x) * dir;
} }
SmallVector minimise(const SmallVector x, unsigned int iterations) const { SmallVector minimise(const SmallVector x, unsigned int iterations) const {
SmallVector descDir = ModifiedGradient(x); SmallVector descDir = ModifiedGradient(x);
if (descDir == SmallVector(0.0))
return SmallVector(0.0);
/* We have /* We have
1/2 <A(u+xv),u+xv>-<b,u+xv> = 1/2 <Av,v> x^2 - <b-Au,v> x + <1/2 Au-b,u> 1/2 <A(u+xv),u+xv>-<b,u+xv> = 1/2 <Av,v> x^2 - <b-Au,v> x + <1/2 Au-b,u>
since A is symmetric. since A is symmetric.
*/ */
SmallVector tmp2;
A_.mv(descDir, tmp2);
double const rest_A = tmp2 * descDir;
{ SmallVector tmp3;
SmallVector tmp2; A_.mv(x, tmp3);
A_.mv(descDir, tmp2); double const rest_b = (b_ - tmp3) * descDir;
double const rest_A = tmp2 * descDir;
SmallVector tmp3; typedef MyNonlinearity<dimension, Function> MyNonlinearityType;
A_.mv(x, tmp3); MyNonlinearityType phi;
double const rest_b = (b_ - tmp3) * descDir; typedef DirectionalConvexFunction<MyNonlinearityType>
MyDirectionalConvexFunctionType;
MyDirectionalConvexFunctionType rest(rest_A, rest_b, phi, x, descDir);
typedef MyNonlinearity<dimension, SampleFunction> MyNonlinearityType; Interval<double> D;
MyNonlinearityType phi;
typedef DirectionalConvexFunction<MyNonlinearityType>
MyDirectionalConvexFunctionType;
MyDirectionalConvexFunctionType rest(rest_A, rest_b, phi, x, descDir);
// Experiment a bit
Interval<double> D;
rest.subDiff(0, D);
}
if (descDir == SmallVector(0.0))
return SmallVector(0.0);
// { Debugging // { Debugging
Dune::dverb << "Starting at x with J(x) = " << operator()(x) << std::endl; Dune::dverb << "Starting at x with J(x) = " << operator()(x) << std::endl;
...@@ -95,11 +87,9 @@ class SampleFunctional { ...@@ -95,11 +87,9 @@ class SampleFunctional {
double l = 0; double l = 0;
double r = 1; double r = 1;
SmallVector tmp;
while (true) { while (true) {
tmp = x; rest.subDiff(r, D);
tmp.axpy(r, descDir); if (D[1] >= 0)
if (pseudoDirectionalDerivative(tmp, descDir) >= 0)
break; break;
l = r; l = r;
...@@ -118,26 +108,26 @@ class SampleFunctional { ...@@ -118,26 +108,26 @@ class SampleFunctional {
} }
double m = l / 2 + r / 2; double m = l / 2 + r / 2;
SmallVector middle = SmallVector(0.0);
for (unsigned int count = 0; count < iterations; ++count) { for (unsigned int count = 0; count < iterations; ++count) {
Dune::dverb << "now at m = " << m << std::endl; { // Debugging
Dune::dverb << "Value of J here: " << operator()(x + middle) << std::endl; Dune::dverb << "now at m = " << m << std::endl;
SmallVector tmp = x;
middle = descDir; tmp.axpy(m, descDir);
middle *= m; Dune::dverb << "Value of J here: " << operator()(tmp) << std::endl;
}
double pseudoDerivative =
pseudoDirectionalDerivative(x + middle, descDir); rest.subDiff(m, D);
if (D[1] < 0) {
if (pseudoDerivative < 0) {
l = m; l = m;
m = (m + r) / 2; m = (m + r) / 2;
} else if (pseudoDerivative > 0) { } else if (D[1] > 0) {
r = m; r = m;
m = (l + m) / 2; m = (l + m) / 2;
} else } else
break; break;
} }
SmallVector middle = descDir;
middle *= m;
return middle; return middle;
} }
...@@ -167,20 +157,6 @@ class SampleFunctional { ...@@ -167,20 +157,6 @@ class SampleFunctional {
return y; return y;
} }
// |dir|-times the directional derivative wrt dir/|dir|. If only
// the sign of the directionalDerivative matters, this saves the
// cost of normalising.
double pseudoDirectionalDerivative(const SmallVector x,
const SmallVector dir) const {
if (x == SmallVector(0.0))
return func_.rightDifferential(0) * dir.two_norm();
if (x * dir > 0)
return PlusGrad(x) * dir;
else
return MinusGrad(x) * dir;
}
SmallVector ModifiedGradient(const SmallVector x) const { SmallVector ModifiedGradient(const SmallVector x) const {
if (x == SmallVector(0.0)) { if (x == SmallVector(0.0)) {
SmallVector d = SmoothGrad(x); SmallVector d = SmoothGrad(x);
...@@ -234,14 +210,20 @@ void testSampleFunction() { ...@@ -234,14 +210,20 @@ void testSampleFunction() {
SampleFunctional J(A, b); SampleFunctional J(A, b);
std::cout << J.directionalDerivative(b, b) << std::endl; SampleFunctional::SmallVector c = b;
assert(J.directionalDerivative(b, b) == 2 * sqrt(5) + 2); c /= c.two_norm();
assert(J.directionalDerivative(b, c) == 2 * sqrt(5) + 2);
SampleFunctional::SmallVector start = b; SampleFunctional::SmallVector start = b;
start *= 17; start *= 17;
SampleFunctional::SmallVector correction = J.minimise(start, 20); SampleFunctional::SmallVector correction = J.minimise(start, 20);
assert(J(start + correction) <= J(start)); assert(J(start + correction) <= J(start));
assert(std::abs(J(start + correction) + 0.254644) < 1e-8);
start += correction;
correction = J.minimise(start, 20);
assert(std::abs(J(start + correction) + 0.254631) < 1e-6);
std::cout << "Arrived at J(...) = " << J(start + correction) << std::endl; std::cout << "Arrived at J(...) = " << J(start + correction) << std::endl;
} }
...@@ -260,8 +242,10 @@ void testTrivialFunction() { ...@@ -260,8 +242,10 @@ void testTrivialFunction() {
SampleFunctional J(A, b); SampleFunctional J(A, b);
std::cout << J.directionalDerivative(b, b) << std::endl; SampleFunctional::SmallVector c = b;
assert(J.directionalDerivative(b, b) == 2 * sqrt(5)); c /= c.two_norm();
assert(J.directionalDerivative(b, c) == 2 * sqrt(5));
SampleFunctional::SmallVector start = b; SampleFunctional::SmallVector start = b;
start *= 17; start *= 17;
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment