Skip to content
Snippets Groups Projects
Commit 6f54b856 authored by Elias Pipping's avatar Elias Pipping Committed by Elias Pipping
Browse files

Hide some functions

parent 495ff77a
Branches
No related tags found
No related merge requests found
...@@ -45,35 +45,6 @@ template <int dimension> class SampleFunctional { ...@@ -45,35 +45,6 @@ template <int dimension> class SampleFunctional {
return MinusGrad(x) * dir; return MinusGrad(x) * dir;
} }
SmallVector ModifiedGradient(const SmallVector x) const {
if (x == SmallVector(0.0))
// TODO
DUNE_THROW(Dune::Exception, "The case x = 0 is not yet handled.");
SmallVector const pg = PlusGrad(x);
SmallVector const mg = MinusGrad(x);
SmallVector ret;
// TODO: collinearity checks suck
if (pg * x == pg.two_norm() * x.two_norm() &&
-(mg * x) == mg.two_norm() * x.two_norm()) {
return SmallVector(0);
} else if (pg * x >= 0 && mg * x >= 0) {
ret = pg;
} else if (pg * x <= 0 && mg * x <= 0) {
ret = mg;
} else {
ret = project(SmoothGrad(x), x);
}
ret *= -1;
return ret;
}
SmallVector project(const SmallVector z, const SmallVector x) const {
SmallVector y = z;
y.axpy(-(z * x) / x.two_norm2(), x);
return y;
}
SmallVector minimise(const SmallVector x, unsigned int iterations) const { SmallVector minimise(const SmallVector x, unsigned int iterations) const {
SmallVector descDir = ModifiedGradient(x); SmallVector descDir = ModifiedGradient(x);
...@@ -158,6 +129,35 @@ template <int dimension> class SampleFunctional { ...@@ -158,6 +129,35 @@ template <int dimension> class SampleFunctional {
y.axpy(HPrimeMinus(x.two_norm()) / x.two_norm(), x); y.axpy(HPrimeMinus(x.two_norm()) / x.two_norm(), x);
return y; return y;
} }
SmallVector ModifiedGradient(const SmallVector x) const {
if (x == SmallVector(0.0))
// TODO
DUNE_THROW(Dune::Exception, "The case x = 0 is not yet handled.");
SmallVector const pg = PlusGrad(x);
SmallVector const mg = MinusGrad(x);
SmallVector ret;
// TODO: collinearity checks suck
if (pg * x == pg.two_norm() * x.two_norm() &&
-(mg * x) == mg.two_norm() * x.two_norm()) {
return SmallVector(0);
} else if (pg * x >= 0 && mg * x >= 0) {
ret = pg;
} else if (pg * x <= 0 && mg * x <= 0) {
ret = mg;
} else {
ret = project(SmoothGrad(x), x);
}
ret *= -1;
return ret;
}
SmallVector project(const SmallVector z, const SmallVector x) const {
SmallVector y = z;
y.axpy(-(z * x) / x.two_norm2(), x);
return y;
}
}; };
int main() { int main() {
...@@ -179,8 +179,6 @@ int main() { ...@@ -179,8 +179,6 @@ int main() {
std::cout << J.directionalDerivative(b, b) << std::endl; std::cout << J.directionalDerivative(b, b) << std::endl;
assert(J.directionalDerivative(b, b) == 10 + 2 * sqrt(5)); assert(J.directionalDerivative(b, b) == 10 + 2 * sqrt(5));
SampleFunctional::SmallVector descDir = J.ModifiedGradient(b);
SampleFunctional::SmallVector start = b; SampleFunctional::SmallVector start = b;
start *= 17; start *= 17;
SampleFunctional::SmallVector correction = J.minimise(start, 20); SampleFunctional::SmallVector correction = J.minimise(start, 20);
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment