Skip to content
Snippets Groups Projects
Commit 6f54b856 authored by Elias Pipping's avatar Elias Pipping Committed by Elias Pipping
Browse files

Hide some functions

parent 495ff77a
No related branches found
No related tags found
No related merge requests found
......@@ -45,35 +45,6 @@ template <int dimension> class SampleFunctional {
return MinusGrad(x) * dir;
}
SmallVector ModifiedGradient(const SmallVector x) const {
if (x == SmallVector(0.0))
// TODO
DUNE_THROW(Dune::Exception, "The case x = 0 is not yet handled.");
SmallVector const pg = PlusGrad(x);
SmallVector const mg = MinusGrad(x);
SmallVector ret;
// TODO: collinearity checks suck
if (pg * x == pg.two_norm() * x.two_norm() &&
-(mg * x) == mg.two_norm() * x.two_norm()) {
return SmallVector(0);
} else if (pg * x >= 0 && mg * x >= 0) {
ret = pg;
} else if (pg * x <= 0 && mg * x <= 0) {
ret = mg;
} else {
ret = project(SmoothGrad(x), x);
}
ret *= -1;
return ret;
}
SmallVector project(const SmallVector z, const SmallVector x) const {
SmallVector y = z;
y.axpy(-(z * x) / x.two_norm2(), x);
return y;
}
SmallVector minimise(const SmallVector x, unsigned int iterations) const {
SmallVector descDir = ModifiedGradient(x);
......@@ -158,6 +129,35 @@ template <int dimension> class SampleFunctional {
y.axpy(HPrimeMinus(x.two_norm()) / x.two_norm(), x);
return y;
}
SmallVector ModifiedGradient(const SmallVector x) const {
if (x == SmallVector(0.0))
// TODO
DUNE_THROW(Dune::Exception, "The case x = 0 is not yet handled.");
SmallVector const pg = PlusGrad(x);
SmallVector const mg = MinusGrad(x);
SmallVector ret;
// TODO: collinearity checks suck
if (pg * x == pg.two_norm() * x.two_norm() &&
-(mg * x) == mg.two_norm() * x.two_norm()) {
return SmallVector(0);
} else if (pg * x >= 0 && mg * x >= 0) {
ret = pg;
} else if (pg * x <= 0 && mg * x <= 0) {
ret = mg;
} else {
ret = project(SmoothGrad(x), x);
}
ret *= -1;
return ret;
}
SmallVector project(const SmallVector z, const SmallVector x) const {
SmallVector y = z;
y.axpy(-(z * x) / x.two_norm2(), x);
return y;
}
};
int main() {
......@@ -179,8 +179,6 @@ int main() {
std::cout << J.directionalDerivative(b, b) << std::endl;
assert(J.directionalDerivative(b, b) == 10 + 2 * sqrt(5));
SampleFunctional::SmallVector descDir = J.ModifiedGradient(b);
SampleFunctional::SmallVector start = b;
start *= 17;
SampleFunctional::SmallVector correction = J.minimise(start, 20);
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment