diff --git a/src/samplefunctional.hh b/src/samplefunctional.hh index 5fa5793feb8a64081aff60c208608b2a9c6a7a3d..6f2d2de496efa95f5670cb0b865739e6807061e6 100644 --- a/src/samplefunctional.hh +++ b/src/samplefunctional.hh @@ -31,7 +31,7 @@ class SampleFunctional { SmallVector descentDirection(const SmallVector x) const { if (x == SmallVector(0.0)) { - SmallVector d = SmoothGrad(x); + SmallVector d = smoothGradient(x); // Decline of the smooth part in the negative gradient direction double smoothDecline = -(d * d); double nonlinearDecline = @@ -41,8 +41,8 @@ class SampleFunctional { return (combinedDecline < 0) ? d : SmallVector(0.0); } - SmallVector const pg = PlusGrad(x); - SmallVector const mg = MinusGrad(x); + SmallVector const pg = upperGradient(x); + SmallVector const mg = lowerGradient(x); SmallVector ret; // TODO: collinearity checks suck if (pg * x == pg.two_norm() * x.two_norm() && @@ -53,7 +53,7 @@ class SampleFunctional { } else if (pg * x <= 0 && mg * x <= 0) { ret = mg; } else { - ret = project(SmoothGrad(x), x); + ret = project(smoothGradient(x), x); } ret *= -1; return ret; @@ -66,21 +66,21 @@ class SampleFunctional { Function func_; // Gradient of the smooth part - SmallVector SmoothGrad(const SmallVector x) const { + SmallVector smoothGradient(const SmallVector x) const { SmallVector y; A.mv(x, y); // y = Av y -= b; // y = Av - b return y; } - SmallVector PlusGrad(const SmallVector x) const { - SmallVector y = SmoothGrad(x); + SmallVector upperGradient(const SmallVector x) const { + SmallVector y = smoothGradient(x); y.axpy(func_.rightDifferential(x.two_norm()) / x.two_norm(), x); return y; } - SmallVector MinusGrad(const SmallVector x) const { - SmallVector y = SmoothGrad(x); + SmallVector lowerGradient(const SmallVector x) const { + SmallVector y = smoothGradient(x); y.axpy(func_.leftDifferential(x.two_norm()) / x.two_norm(), x); return y; }