diff --git a/src/bisection-example.cc b/src/bisection-example.cc
index e268cf1da196bfa452d0140ab77bb0a4862edf06..1ae3c95eef1e254be5d4fbfd1ea1539f11970e9a 100644
--- a/src/bisection-example.cc
+++ b/src/bisection-example.cc
@@ -45,35 +45,6 @@ template <int dimension> class SampleFunctional {
       return MinusGrad(x) * dir;
   }
 
-  SmallVector ModifiedGradient(const SmallVector x) const {
-    if (x == SmallVector(0.0))
-      // TODO
-      DUNE_THROW(Dune::Exception, "The case x = 0 is not yet handled.");
-
-    SmallVector const pg = PlusGrad(x);
-    SmallVector const mg = MinusGrad(x);
-    SmallVector ret;
-    // TODO: collinearity checks suck
-    if (pg * x == pg.two_norm() * x.two_norm() &&
-        -(mg * x) == mg.two_norm() * x.two_norm()) {
-      return SmallVector(0);
-    } else if (pg * x >= 0 && mg * x >= 0) {
-      ret = pg;
-    } else if (pg * x <= 0 && mg * x <= 0) {
-      ret = mg;
-    } else {
-      ret = project(SmoothGrad(x), x);
-    }
-    ret *= -1;
-    return ret;
-  }
-
-  SmallVector project(const SmallVector z, const SmallVector x) const {
-    SmallVector y = z;
-    y.axpy(-(z * x) / x.two_norm2(), x);
-    return y;
-  }
-
   SmallVector minimise(const SmallVector x, unsigned int iterations) const {
     SmallVector descDir = ModifiedGradient(x);
 
@@ -158,6 +129,35 @@ template <int dimension> class SampleFunctional {
     y.axpy(HPrimeMinus(x.two_norm()) / x.two_norm(), x);
     return y;
   }
+
+  SmallVector ModifiedGradient(const SmallVector x) const {
+    if (x == SmallVector(0.0))
+      // TODO
+      DUNE_THROW(Dune::Exception, "The case x = 0 is not yet handled.");
+
+    SmallVector const pg = PlusGrad(x);
+    SmallVector const mg = MinusGrad(x);
+    SmallVector ret;
+    // TODO: collinearity checks suck
+    if (pg * x == pg.two_norm() * x.two_norm() &&
+        -(mg * x) == mg.two_norm() * x.two_norm()) {
+      return SmallVector(0);
+    } else if (pg * x >= 0 && mg * x >= 0) {
+      ret = pg;
+    } else if (pg * x <= 0 && mg * x <= 0) {
+      ret = mg;
+    } else {
+      ret = project(SmoothGrad(x), x);
+    }
+    ret *= -1;
+    return ret;
+  }
+
+  SmallVector project(const SmallVector z, const SmallVector x) const {
+    SmallVector y = z;
+    y.axpy(-(z * x) / x.two_norm2(), x);
+    return y;
+  }
 };
 
 int main() {
@@ -179,8 +179,6 @@ int main() {
     std::cout << J.directionalDerivative(b, b) << std::endl;
     assert(J.directionalDerivative(b, b) == 10 + 2 * sqrt(5));
 
-    SampleFunctional::SmallVector descDir = J.ModifiedGradient(b);
-
     SampleFunctional::SmallVector start = b;
     start *= 17;
     SampleFunctional::SmallVector correction = J.minimise(start, 20);