diff --git a/Src/BioTrackerPlugin.cpp b/Src/BioTrackerPlugin.cpp
index d683f9467c2435c5e9db23ac87db51cb4a6db636..12a7cc20f1815eef701739e3c4a78fd8d693682c 100644
--- a/Src/BioTrackerPlugin.cpp
+++ b/Src/BioTrackerPlugin.cpp
@@ -163,9 +163,8 @@ void BioTrackerPlugin::receiveAreaDescriptor(IModelAreaDescriptor* areaDescr)
     Q_EMIT emitAreaDescriptorUpdate(areaDescr);
 }
 
-void BioTrackerPlugin::receiveCurrentFrameFromMainApp(
-    std::shared_ptr<cv::Mat> mat,
-    uint                     frameNumber)
+void BioTrackerPlugin::receiveCurrentFrameFromMainApp(cv::Mat mat,
+                                                      uint    frameNumber)
 {
     qobject_cast<ControllerTrackingAlgorithm*>(m_TrackerController)
         ->doTracking(mat, frameNumber);
diff --git a/Src/BioTrackerPlugin.h b/Src/BioTrackerPlugin.h
index e194e8d679d86d20c84172c490542cd6d868d499..847b0470e17e39c332dfdfaf2c4c03474e5496ff 100644
--- a/Src/BioTrackerPlugin.h
+++ b/Src/BioTrackerPlugin.h
@@ -34,7 +34,7 @@ public:
 private:
     void connectInterfaces();
 signals:
-    void emitCvMat(std::shared_ptr<cv::Mat> mat, QString name);
+    void emitCvMat(cv::Mat mat, QString name);
     void emitTrackingDone(uint framenumber);
     void emitChangeDisplayImage(QString str);
     void emitAreaDescriptorUpdate(IModelAreaDescriptor* areaDescr);
@@ -69,8 +69,7 @@ public slots:
     void receiveCurrentFrameNumberFromMainApp(uint frameNumber);
 
 public slots:
-    void receiveCurrentFrameFromMainApp(std::shared_ptr<cv::Mat> mat,
-                                        uint                     frameNumber);
+    void receiveCurrentFrameFromMainApp(cv::Mat mat, uint frameNumber);
     void receiveAreaDescriptor(IModelAreaDescriptor* areaDescr);
 
 private:
diff --git a/Src/Controller/ControllerTrackingAlgorithm.cpp b/Src/Controller/ControllerTrackingAlgorithm.cpp
index 555a7f64b65b82bca023294b034bc2461d8be3e8..a3681018db366bcffd5295742efac93bdc64a7d6 100644
--- a/Src/Controller/ControllerTrackingAlgorithm.cpp
+++ b/Src/Controller/ControllerTrackingAlgorithm.cpp
@@ -24,8 +24,7 @@ void ControllerTrackingAlgorithm::connectControllerToController()
     m_TrackedTrajectoryMajor = ctrComponent->getModel();
 }
 
-void ControllerTrackingAlgorithm::doTracking(std::shared_ptr<cv::Mat> mat,
-                                             uint                     number)
+void ControllerTrackingAlgorithm::doTracking(cv::Mat mat, uint number)
 {
     qobject_cast<BioTrackerTrackingAlgorithm*>(m_Model)->doTracking(mat,
                                                                     number);
diff --git a/Src/Controller/ControllerTrackingAlgorithm.h b/Src/Controller/ControllerTrackingAlgorithm.h
index 9f8af37d6595ed20035629b59183e3ff207fd49d..eeb75a4e543ec827561fbe3a4aa176144a126428 100644
--- a/Src/Controller/ControllerTrackingAlgorithm.h
+++ b/Src/Controller/ControllerTrackingAlgorithm.h
@@ -28,7 +28,7 @@ public:
 public:
     void connectControllerToController() override;
 
-    void doTracking(std::shared_ptr<cv::Mat> mat, uint number);
+    void doTracking(cv::Mat mat, uint number);
 
     IView* getTrackingParameterWidget();
 
@@ -41,7 +41,7 @@ protected:
     void connectModelToController() override;
 
 Q_SIGNALS:
-    void emitCvMat(std::shared_ptr<cv::Mat> mat, QString name);
+    void emitCvMat(cv::Mat mat, QString name);
     void emitTrackingDone(uint framenumber);
     void emitChangeDisplayImage(QString str);
     void emitAreaDescriptorUpdate(IModelAreaDescriptor* areaDescr);
diff --git a/Src/Model/BioTrackerTrackingAlgorithm.cpp b/Src/Model/BioTrackerTrackingAlgorithm.cpp
index 094b2c20ab1b508c9caa3b23092ec7507ca21615..8b771a87aa290f9a2b04d6d0009d6b0cd4f75e85 100644
--- a/Src/Model/BioTrackerTrackingAlgorithm.cpp
+++ b/Src/Model/BioTrackerTrackingAlgorithm.cpp
@@ -1,274 +1,269 @@
-#include "BioTrackerTrackingAlgorithm.h"
-#include <future>
-#include "TrackedComponents/TrackedComponentFactory.h"
-#include <chrono>
-
-BioTrackerTrackingAlgorithm::BioTrackerTrackingAlgorithm(IController* parent,
-                                                         IModel* parameter,
-                                                         IModel* trajectory)
-: IModelTrackingAlgorithm(parent)
-, _ipp((TrackerParameter*) parameter)
-{
-    _cfg = static_cast<ControllerTrackingAlgorithm*>(parent)->getConfig();
-    _TrackingParameter      = (TrackerParameter*) parameter;
-    _TrackedTrajectoryMajor = (BST::TrackedTrajectory*) trajectory;
-    _nn2d = std::make_shared<NN2dMapper>(_TrackedTrajectoryMajor);
-
-    _bd = BlobsDetector();
-
-    _noFish = -1;
-
-    if (_cfg->DoNetwork) {
-        _listener = new TcpListener(this);
-        _listener->listen(QHostAddress::Any, _cfg->NetworkPort);
-        QObject::connect(_listener,
-                         SIGNAL(newConnection()),
-                         _listener,
-                         SLOT(acceptConnection()));
-    }
-
-    _lastImage       = nullptr;
-    _lastFramenumber = -1;
-}
-
-void BioTrackerTrackingAlgorithm::receiveAreaDescriptorUpdate(
-    IModelAreaDescriptor* areaDescr)
-{
-    _AreaInfo = areaDescr;
-    _bd.setAreaInfo(_AreaInfo);
-}
-
-BioTrackerTrackingAlgorithm::~BioTrackerTrackingAlgorithm()
-{
-}
-
-std::vector<FishPose> BioTrackerTrackingAlgorithm::getLastPositionsAsPose()
-{
-    // TODO: This seems kinda fragile: I just assume that the tree has this
-    // very certain structure:
-    // Trajectory -> M Trajectories -> N TrackedElements
-    // For every of M Trajectories grab the last (highest index) of
-    // TrackedElements.
-    // TODO: If we are tracking somewhere in the middle, this is bad. Do it by
-    // id!
-    std::vector<FishPose> last;
-    for (int i = 0; i < _TrackedTrajectoryMajor->size(); i++) {
-        BST::TrackedTrajectory* t = dynamic_cast<BST::TrackedTrajectory*>(
-            _TrackedTrajectoryMajor->getChild(i));
-        if (t && t->getValid() && !t->getFixed()) {
-            BST::TrackedElement* e = (BST::TrackedElement*) t->getLastChild();
-            last.push_back(e->getFishPose());
-        }
-    }
-    return last;
-}
-
-void BioTrackerTrackingAlgorithm::refreshPolygon()
-{
-}
-
-void BioTrackerTrackingAlgorithm::receiveParametersChanged()
-{
-    if (_lastFramenumber >= 0 && _lastImage && !_lastImage->empty()) {
-        doTracking(_lastImage, _lastFramenumber);
-    }
-}
-
-void BioTrackerTrackingAlgorithm::sendSelectedImage(
-    std::map<std::string, std::shared_ptr<cv::Mat>>* images)
-{
-
-    std::shared_ptr<cv::Mat> sendImage;
-    // Send forth whatever the user selected
-    switch (_TrackingParameter->getSendImage()) {
-    case 0: // Send none
-        Q_EMIT emitChangeDisplayImage("Original");
-        break;
-    case 1:
-        sendImage = images->find(std::string("Background"))->second;
-        Q_EMIT emitCvMatA(sendImage, QString("Background"));
-        Q_EMIT emitChangeDisplayImage(QString("Background"));
-        break;
-    case 2:
-        sendImage = images->find(std::string("Foreground Mask"))->second;
-        Q_EMIT emitCvMatA(sendImage, QString("Foreground Mask"));
-        Q_EMIT emitChangeDisplayImage(QString("Foreground Mask"));
-        break;
-    case 3:
-        sendImage = images->find(std::string("Opened Mask"))->second;
-        Q_EMIT emitCvMatA(sendImage, QString("Opened Mask"));
-        Q_EMIT emitChangeDisplayImage(QString("Opened Mask"));
-        break;
-    case 4:
-        sendImage = images->find(std::string("Closed Mask"))->second;
-        Q_EMIT emitCvMatA(sendImage, QString("Closed Mask"));
-        Q_EMIT emitChangeDisplayImage(QString("Closed Mask"));
-        break;
-    case 5:
-        sendImage = images->find(std::string("Masked Greyscale"))->second;
-        Q_EMIT emitCvMatA(sendImage, QString("Masked Greyscale"));
-        Q_EMIT emitChangeDisplayImage(QString("Masked Greyscale"));
-        break;
-    }
-}
-
-std::vector<BlobPose> BioTrackerTrackingAlgorithm::getContourCentroids(
-    cv::Mat image)
-{
-
-    std::vector<std::vector<cv::Point>> contours;
-    std::vector<cv::Vec4i>              hierarchy;
-    std::vector<BlobPose>               centroids;
-
-    cv::findContours(image,
-                     contours,
-                     hierarchy,
-                     cv::RETR_TREE,
-                     cv::CHAIN_APPROX_SIMPLE,
-                     cv::Point(0, 0));
-
-    for (auto x : contours) {
-        cv::Point2f c(0, 0);
-        float       i = 0;
-        for (auto y : x) {
-            c += cv::Point2f(y);
-            i++;
-        }
-        c.x = c.x / i;
-        c.y = c.y / i;
-
-        // cv::RotatedRect minEllipse;
-        cv::RotatedRect bb = minAreaRect(x);
-
-        // check if blob is in tracking area --> this can be optimized by
-        // checking earlier (only search blobs in tracking area)
-        if (!_AreaInfo->inTrackingArea(c)) {
-            continue;
-        }
-
-        BlobPose bc(_AreaInfo->pxToCm(c),
-                    c,
-                    bb.angle,
-                    bb.size.width,
-                    bb.size.height);
-
-        centroids.push_back(bc);
-    }
-
-    return centroids;
-}
-
-void BioTrackerTrackingAlgorithm::doTracking(std::shared_ptr<cv::Mat> p_image,
-                                             uint framenumber)
-{
-    _ipp.m_TrackingParameter = _TrackingParameter;
-    _lastImage               = p_image;
-    _lastFramenumber         = framenumber;
-
-    // dont do nothing if we ain't got an image
-    if (p_image->empty()) {
-        return;
-    }
-
-    if (_imageX != p_image->size().width ||
-        _imageY != p_image->size().height) {
-        _imageX = p_image->size().width;
-        _imageY = p_image->size().height;
-        Q_EMIT emitDimensionUpdate(_imageX, _imageY);
-    }
-
-    std::chrono::system_clock::time_point start =
-        std::chrono::system_clock::now();
-
-    // Refuse to run tracking if we have no area info...
-    if (_AreaInfo == nullptr) {
-        Q_EMIT emitTrackingDone(framenumber);
-        return;
-    }
-
-    // The user changed the # of fish. Reset the history and start over!
-    if (_noFish != _TrackedTrajectoryMajor->validCount()) {
-        _noFish = _TrackedTrajectoryMajor->validCount();
-        _nn2d   = std::make_shared<NN2dMapper>(_TrackedTrajectoryMajor);
-    }
-
-    if (_TrackingParameter->getResetBackground()) {
-        _TrackingParameter->setResetBackground(false);
-        _ipp.resetBackgroundImage();
-    }
-
-    // Do the preprocessing
-    std::map<std::string, std::shared_ptr<cv::Mat>> images = _ipp.preProcess(
-        p_image);
-    std::shared_ptr<cv::Mat> mask =
-        images.find(std::string("Closed Mask"))->second;
-    std::shared_ptr<cv::Mat> greyMat =
-        images.find(std::string("Greyscale"))->second;
-
-    // Find blobs via ellipsefitting
-    _bd.setMaxBlobSize(_TrackingParameter->getMaxBlobSize());
-    _bd.setMinBlobSize(_TrackingParameter->getMinBlobSize());
-
-    auto foo = *images.find(std::string("Masked Greyscale"))->second;
-
-    std::vector<std::vector<cv::Point>> contours;
-    std::vector<cv::Vec4i>              hierarchy;
-    cv::findContours(foo,
-                     contours,
-                     hierarchy,
-                     cv::RETR_TREE,
-                     cv::CHAIN_APPROX_SIMPLE,
-                     cv::Point(0, 0));
-
-    for (size_t i = 0; i < contours.size(); i++) {
-        drawContours(foo, contours, (int) i, cv::Scalar(255));
-    }
-
-    std::vector<BlobPose> blobs = getContourCentroids(*mask);
-
-    // Never switch the position of the trajectories. The NN2d mapper relies on
-    // this! If you mess up the order, add or remove some t, then create a new
-    // mapper.
-    std::vector<FishPose> fish = getLastPositionsAsPose();
-
-    // Find new positions using 2D nearest neighbor
-    std::tuple<std::vector<FishPose>, std::vector<float>> poses =
-        _nn2d->getNewPoses(_TrackedTrajectoryMajor, framenumber, blobs);
-
-    // Insert new poses into data structure
-    int trajNumber = 0;
-    for (int i = 0; i < _TrackedTrajectoryMajor->size(); i++) {
-        BST::TrackedTrajectory* t = dynamic_cast<BST::TrackedTrajectory*>(
-            _TrackedTrajectoryMajor->getChild(i));
-        if (t && t->getValid() && !t->getFixed()) {
-            BST::TrackedElement* e = new BST::TrackedElement(t,
-                                                             "n.a.",
-                                                             t->getId());
-
-            e->setFishPose(std::get<0>(poses)[trajNumber]);
-            e->setTime(start);
-            t->add(e, framenumber);
-            trajNumber++;
-        }
-    }
-
-    // Send forth new positions to the robotracker, if networking is enabled
-    if (_TrackingParameter->getDoNetwork()) {
-        std::vector<FishPose> ps = std::get<0>(poses);
-        _listener->sendPositions(framenumber,
-                                 ps,
-                                 std::vector<cv::Point2f>(),
-                                 start);
-    }
-
-    sendSelectedImage(&images);
-
-    // First the user still wants to see the original image, right?
-    if (framenumber == 1) {
-        Q_EMIT emitChangeDisplayImage("Original");
-    }
-
-    std::string newSel = _TrackingParameter->getNewSelection();
-
-    Q_EMIT emitTrackingDone(framenumber);
-}
+#include "BioTrackerTrackingAlgorithm.h"
+#include <future>
+#include "TrackedComponents/TrackedComponentFactory.h"
+#include <chrono>
+
+#include <tuple>
+#include <optional>
+
+BioTrackerTrackingAlgorithm::BioTrackerTrackingAlgorithm(IController* parent,
+                                                         IModel* parameter,
+                                                         IModel* trajectory)
+: IModelTrackingAlgorithm(parent)
+, _ipp((TrackerParameter*) parameter)
+, _lastImage(std::nullopt)
+{
+    _cfg = static_cast<ControllerTrackingAlgorithm*>(parent)->getConfig();
+    _TrackingParameter      = (TrackerParameter*) parameter;
+    _TrackedTrajectoryMajor = (BST::TrackedTrajectory*) trajectory;
+    _nn2d = std::make_shared<NN2dMapper>(_TrackedTrajectoryMajor);
+
+    _bd = BlobsDetector();
+
+    _noFish = -1;
+
+    if (_cfg->DoNetwork) {
+        _listener = new TcpListener(this);
+        _listener->listen(QHostAddress::Any, _cfg->NetworkPort);
+        QObject::connect(_listener,
+                         SIGNAL(newConnection()),
+                         _listener,
+                         SLOT(acceptConnection()));
+    }
+
+    _lastFramenumber = -1;
+}
+
+void BioTrackerTrackingAlgorithm::receiveAreaDescriptorUpdate(
+    IModelAreaDescriptor* areaDescr)
+{
+    _AreaInfo = areaDescr;
+    _bd.setAreaInfo(_AreaInfo);
+}
+
+BioTrackerTrackingAlgorithm::~BioTrackerTrackingAlgorithm()
+{
+}
+
+std::vector<FishPose> BioTrackerTrackingAlgorithm::getLastPositionsAsPose()
+{
+    // TODO: This seems kinda fragile: I just assume that the tree has this
+    // very certain structure:
+    // Trajectory -> M Trajectories -> N TrackedElements
+    // For every of M Trajectories grab the last (highest index) of
+    // TrackedElements.
+    // TODO: If we are tracking somewhere in the middle, this is bad. Do it by
+    // id!
+    std::vector<FishPose> last;
+    for (int i = 0; i < _TrackedTrajectoryMajor->size(); i++) {
+        BST::TrackedTrajectory* t = dynamic_cast<BST::TrackedTrajectory*>(
+            _TrackedTrajectoryMajor->getChild(i));
+        if (t && t->getValid() && !t->getFixed()) {
+            BST::TrackedElement* e = (BST::TrackedElement*) t->getLastChild();
+            last.push_back(e->getFishPose());
+        }
+    }
+    return last;
+}
+
+void BioTrackerTrackingAlgorithm::refreshPolygon()
+{
+}
+
+void BioTrackerTrackingAlgorithm::receiveParametersChanged()
+{
+    if (_lastFramenumber >= 0 && _lastImage && !_lastImage->empty()) {
+        doTracking(*_lastImage, _lastFramenumber);
+    }
+}
+
+void BioTrackerTrackingAlgorithm::sendSelectedImage(
+    QMap<QString, cv::Mat> images)
+{
+    auto index = _TrackingParameter->getSendImage();
+    auto name = [&]() -> QString {
+        switch (index) {
+        case 0:
+            return "Original";
+    case 1:
+            return "Background";
+    case 2:
+            return "Foreground Mask";
+    case 3:
+            return "Opened Mask";
+    case 4:
+            return "Closed Mask";
+    case 5:
+            return "Masked Greyscale";
+        default:
+            return "";
+    }
+    }();
+
+    if (name.isEmpty()) {
+        qCritical() << "Invalid tracking image";
+        return;
+    }
+
+    if (index != 0) {
+        emit emitCvMatA(*images.find(name), name);
+    }
+
+    emit emitChangeDisplayImage(name);
+}
+
+std::vector<BlobPose> BioTrackerTrackingAlgorithm::getContourCentroids(
+    cv::Mat image)
+{
+
+    std::vector<std::vector<cv::Point>> contours;
+    std::vector<cv::Vec4i>              hierarchy;
+    std::vector<BlobPose>               centroids;
+
+    cv::findContours(image,
+                     contours,
+                     hierarchy,
+                     cv::RETR_TREE,
+                     cv::CHAIN_APPROX_SIMPLE,
+                     cv::Point(0, 0));
+
+    for (auto x : contours) {
+        cv::Point2f c(0, 0);
+        float       i = 0;
+        for (auto y : x) {
+            c += cv::Point2f(y);
+            i++;
+        }
+        c.x = c.x / i;
+        c.y = c.y / i;
+
+        // cv::RotatedRect minEllipse;
+        cv::RotatedRect bb = minAreaRect(x);
+
+        // check if blob is in tracking area --> this can be optimized by
+        // checking earlier (only search blobs in tracking area)
+        if (!_AreaInfo->inTrackingArea(c)) {
+            continue;
+        }
+
+        BlobPose bc(_AreaInfo->pxToCm(c),
+                    c,
+                    bb.angle,
+                    bb.size.width,
+                    bb.size.height);
+
+        centroids.push_back(bc);
+    }
+
+    return centroids;
+}
+
+void BioTrackerTrackingAlgorithm::doTracking(cv::Mat image, uint framenumber)
+{
+    _ipp.m_TrackingParameter = _TrackingParameter;
+    _lastImage               = image;
+    _lastFramenumber         = framenumber;
+
+    // dont do nothing if we ain't got an image
+    if (image.empty()) {
+        return;
+    }
+
+    if (_imageX != image.size().width || _imageY != image.size().height) {
+        _imageX = image.size().width;
+        _imageY = image.size().height;
+        Q_EMIT emitDimensionUpdate(_imageX, _imageY);
+    }
+
+    std::chrono::system_clock::time_point start =
+        std::chrono::system_clock::now();
+
+    // Refuse to run tracking if we have no area info...
+    if (_AreaInfo == nullptr) {
+        Q_EMIT emitTrackingDone(framenumber);
+        return;
+    }
+
+    // The user changed the # of fish. Reset the history and start over!
+    if (_noFish != _TrackedTrajectoryMajor->validCount()) {
+        _noFish = _TrackedTrajectoryMajor->validCount();
+        _nn2d   = std::make_shared<NN2dMapper>(_TrackedTrajectoryMajor);
+    }
+
+    if (_TrackingParameter->getResetBackground()) {
+        _TrackingParameter->setResetBackground(false);
+        _ipp.resetBackgroundImage();
+    }
+
+    // Do the preprocessing
+    auto    images  = _ipp.preProcess(image);
+    cv::Mat mask    = *images.find("Closed Mask");
+    cv::Mat greyMat = *images.find("Greyscale");
+
+    // Find blobs via ellipsefitting
+    _bd.setMaxBlobSize(_TrackingParameter->getMaxBlobSize());
+    _bd.setMinBlobSize(_TrackingParameter->getMinBlobSize());
+
+    auto foo = *images.find("Masked Greyscale");
+
+    std::vector<std::vector<cv::Point>> contours;
+    std::vector<cv::Vec4i>              hierarchy;
+    cv::findContours(foo,
+                     contours,
+                     hierarchy,
+                     cv::RETR_TREE,
+                     cv::CHAIN_APPROX_SIMPLE,
+                     cv::Point(0, 0));
+
+    for (size_t i = 0; i < contours.size(); i++) {
+        drawContours(foo, contours, (int) i, cv::Scalar(255));
+    }
+
+    std::vector<BlobPose> blobs = getContourCentroids(mask);
+
+    // Never switch the position of the trajectories. The NN2d mapper relies on
+    // this! If you mess up the order, add or remove some t, then create a new
+    // mapper.
+    std::vector<FishPose> fish = getLastPositionsAsPose();
+
+    // Find new positions using 2D nearest neighbor
+    std::tuple<std::vector<FishPose>, std::vector<float>> poses =
+        _nn2d->getNewPoses(_TrackedTrajectoryMajor, framenumber, blobs);
+
+    // Insert new poses into data structure
+    int trajNumber = 0;
+    for (int i = 0; i < _TrackedTrajectoryMajor->size(); i++) {
+        BST::TrackedTrajectory* t = dynamic_cast<BST::TrackedTrajectory*>(
+            _TrackedTrajectoryMajor->getChild(i));
+        if (t && t->getValid() && !t->getFixed()) {
+            BST::TrackedElement* e = new BST::TrackedElement(t,
+                                                             "n.a.",
+                                                             t->getId());
+
+            e->setFishPose(std::get<0>(poses)[trajNumber]);
+            e->setTime(start);
+            t->add(e, framenumber);
+            trajNumber++;
+        }
+    }
+
+    // Send forth new positions to the robotracker, if networking is enabled
+    if (_TrackingParameter->getDoNetwork()) {
+        std::vector<FishPose> ps = std::get<0>(poses);
+        _listener->sendPositions(framenumber,
+                                 ps,
+                                 std::vector<cv::Point2f>(),
+                                 start);
+    }
+
+    sendSelectedImage(images);
+
+    // First the user still wants to see the original image, right?
+    if (framenumber == 1) {
+        Q_EMIT emitChangeDisplayImage("Original");
+    }
+
+    std::string newSel = _TrackingParameter->getNewSelection();
+
+    Q_EMIT emitTrackingDone(framenumber);
+}
diff --git a/Src/Model/BioTrackerTrackingAlgorithm.h b/Src/Model/BioTrackerTrackingAlgorithm.h
index 4fb104220d7215ceb718af6a592b6cc4e9a19286..4b916ca077674f1fd62cd89f23f6b6c066adbd18 100644
--- a/Src/Model/BioTrackerTrackingAlgorithm.h
+++ b/Src/Model/BioTrackerTrackingAlgorithm.h
@@ -1,73 +1,76 @@
-#ifndef BIOTRACKERTRACKINGALGORITHM_H
-#define BIOTRACKERTRACKINGALGORITHM_H
-
-#include "Interfaces/IModel/IModel.h"
-
-#include "TrackerParameter.h"
-
-#include <opencv2/opencv.hpp>
-#include "Interfaces/IModel/IModelTrackingAlgorithm.h"
-#include "Interfaces/IModel/IModelDataExporter.h"
-#include "TrackedComponents/TrackedElement.h"
-#include "TrackedComponents/TrackedTrajectory.h"
-#include "TrackingAlgorithm/imageProcessor/detector/blob/cvBlob/BlobsDetector.h"
-#include "TrackingAlgorithm/imageProcessor/preprocessor/ImagePreProcessor.h"
-#include "../Controller/ControllerTrackingAlgorithm.h"
-#include "TrackingAlgorithm/NN2dMapper.h"
-#include "Interfaces/IModel/IModelAreaDescriptor.h"
-#include <iostream>
-
-#include "Network/TcpListener.h"
-#include "../Config.h"
-
-class BioTrackerTrackingAlgorithm : public IModelTrackingAlgorithm
-{
-    Q_OBJECT
-public:
-    BioTrackerTrackingAlgorithm(IController* parent,
-                                IModel*      parameter,
-                                IModel*      trajectory);
-    ~BioTrackerTrackingAlgorithm();
-
-Q_SIGNALS:
-    void emitCvMatA(std::shared_ptr<cv::Mat> image, QString name);
-    void emitDimensionUpdate(int x, int y);
-    void emitTrackingDone(uint framenumber);
-
-    // ITrackingAlgorithm interface
-public Q_SLOTS:
-    void doTracking(std::shared_ptr<cv::Mat> image, uint framenumber) override;
-    void receiveAreaDescriptorUpdate(IModelAreaDescriptor* areaDescr);
-    void receiveParametersChanged();
-
-private:
-    std::vector<BlobPose> getContourCentroids(cv::Mat image);
-    void                  refreshPolygon();
-    void                  sendSelectedImage(
-                         std::map<std::string, std::shared_ptr<cv::Mat>>* images);
-
-    std::vector<FishPose> getLastPositionsAsPose();
-
-    BST::TrackedTrajectory* _TrackedTrajectoryMajor;
-    TrackerParameter*       _TrackingParameter;
-    IModelAreaDescriptor*   _AreaInfo;
-
-    TcpListener* _listener;
-
-    ImagePreProcessor           _ipp;
-    BlobsDetector               _bd;
-    std::shared_ptr<NN2dMapper> _nn2d;
-
-    int _noFish;
-
-    // std::ofstream _ofs;
-
-    int _imageX;
-    int _imageY;
-
-    std::shared_ptr<cv::Mat> _lastImage;
-    uint                     _lastFramenumber;
-    Config*                  _cfg;
-};
-
-#endif // BIOTRACKERTRACKINGALGORITHM_H
+#ifndef BIOTRACKERTRACKINGALGORITHM_H
+#define BIOTRACKERTRACKINGALGORITHM_H
+
+#include "Interfaces/IModel/IModel.h"
+
+#include "TrackerParameter.h"
+
+#include <optional>
+
+#include <QMap>
+
+#include <opencv2/opencv.hpp>
+#include "Interfaces/IModel/IModelTrackingAlgorithm.h"
+#include "Interfaces/IModel/IModelDataExporter.h"
+#include "TrackedComponents/TrackedElement.h"
+#include "TrackedComponents/TrackedTrajectory.h"
+#include "TrackingAlgorithm/imageProcessor/detector/blob/cvBlob/BlobsDetector.h"
+#include "TrackingAlgorithm/imageProcessor/preprocessor/ImagePreProcessor.h"
+#include "../Controller/ControllerTrackingAlgorithm.h"
+#include "TrackingAlgorithm/NN2dMapper.h"
+#include "Interfaces/IModel/IModelAreaDescriptor.h"
+#include <iostream>
+
+#include "Network/TcpListener.h"
+#include "../Config.h"
+
+class BioTrackerTrackingAlgorithm : public IModelTrackingAlgorithm
+{
+    Q_OBJECT
+public:
+    BioTrackerTrackingAlgorithm(IController* parent,
+                                IModel*      parameter,
+                                IModel*      trajectory);
+    ~BioTrackerTrackingAlgorithm();
+
+Q_SIGNALS:
+    void emitCvMatA(cv::Mat image, QString name);
+    void emitDimensionUpdate(int x, int y);
+    void emitTrackingDone(uint framenumber);
+
+    // ITrackingAlgorithm interface
+public Q_SLOTS:
+    void doTracking(cv::Mat image, uint framenumber) override;
+    void receiveAreaDescriptorUpdate(IModelAreaDescriptor* areaDescr);
+    void receiveParametersChanged();
+
+private:
+    std::vector<BlobPose> getContourCentroids(cv::Mat image);
+    void                  refreshPolygon();
+    void sendSelectedImage(QMap<QString, cv::Mat> images);
+
+    std::vector<FishPose> getLastPositionsAsPose();
+
+    BST::TrackedTrajectory* _TrackedTrajectoryMajor;
+    TrackerParameter*       _TrackingParameter;
+    IModelAreaDescriptor*   _AreaInfo;
+
+    TcpListener* _listener;
+
+    ImagePreProcessor           _ipp;
+    BlobsDetector               _bd;
+    std::shared_ptr<NN2dMapper> _nn2d;
+
+    int _noFish;
+
+    // std::ofstream _ofs;
+
+    int _imageX;
+    int _imageY;
+
+    std::optional<cv::Mat> _lastImage;
+    uint                   _lastFramenumber;
+    Config*                _cfg;
+};
+
+#endif // BIOTRACKERTRACKINGALGORITHM_H
diff --git a/Src/Model/TrackingAlgorithm/imageProcessor/preprocessor/ImagePreProcessor.cpp b/Src/Model/TrackingAlgorithm/imageProcessor/preprocessor/ImagePreProcessor.cpp
index 2a61ac8266a9e20352cbb906b9a2cdaf113c4f66..598f40420959ffa15c17fcb34e28c3219c404462 100644
--- a/Src/Model/TrackingAlgorithm/imageProcessor/preprocessor/ImagePreProcessor.cpp
+++ b/Src/Model/TrackingAlgorithm/imageProcessor/preprocessor/ImagePreProcessor.cpp
@@ -32,8 +32,8 @@ void ImagePreProcessor::init()
         qFatal("Unsupported background subtraction algorithm");
     }
 
-    m_backgroundImage = std::make_shared<cv::Mat>();
-    m_foregroundMask  = std::make_shared<cv::Mat>();
+    m_backgroundImage = cv::Mat();
+    m_foregroundMask  = cv::Mat();
 
     _backgroundSubtractionEnabled = true;
     _backgroundEnabled            = true;
@@ -96,56 +96,43 @@ cv::Mat ImagePreProcessor::backgroundSubtraction(cv::Mat& image)
 
     cv::Mat fgmask;
     m_subtractor->apply(image, fgmask, m_TrackingParameter->getLearningRate());
-    m_subtractor->getBackgroundImage(*m_backgroundImage);
+    m_subtractor->getBackgroundImage(m_backgroundImage);
     return fgmask;
 }
 
-std::map<std::string, std::shared_ptr<cv::Mat>> ImagePreProcessor::preProcess(
-    std::shared_ptr<cv::Mat> p_image)
+QMap<QString, cv::Mat> ImagePreProcessor::preProcess(cv::Mat image)
 {
-    std::shared_ptr<cv::Mat> greyMat    = std::make_shared<cv::Mat>();
-    std::shared_ptr<cv::Mat> openedMask = std::make_shared<cv::Mat>();
-    std::shared_ptr<cv::Mat> closedMask = std::make_shared<cv::Mat>();
+    cv::Mat greyMat;
+    cv::Mat openedMask;
+    cv::Mat closedMask;
 
-    std::shared_ptr<cv::Mat> maskedGrey = std::make_shared<cv::Mat>();
+    cv::Mat maskedGrey;
 
-    cv::cvtColor(*p_image, *greyMat, cv::COLOR_BGR2GRAY);
+    cv::cvtColor(image, greyMat, cv::COLOR_BGR2GRAY);
 
     // 1. step: do the background subtraction
-    *m_foregroundMask = backgroundSubtraction(*greyMat);
+    m_foregroundMask = backgroundSubtraction(greyMat);
 
     // 2. step: open the mask
-    *openedMask = dilate(
-        erode(*m_foregroundMask, m_TrackingParameter->getOpeningErosionSize()),
+    openedMask = dilate(
+        erode(m_foregroundMask, m_TrackingParameter->getOpeningErosionSize()),
         m_TrackingParameter->getOpeningDilationSize());
 
     // 3. step: close the image
-    *closedMask = erode(
-        dilate(*openedMask, m_TrackingParameter->getClosingDilationSize()),
+    closedMask = erode(
+        dilate(openedMask, m_TrackingParameter->getClosingDilationSize()),
         m_TrackingParameter->getClosingErosionSize());
 
     // 4. step: masked greyscale image
-    greyMat->copyTo(*maskedGrey, *closedMask);
-
-    std::map<std::string, std::shared_ptr<cv::Mat>> all;
-    all.insert(std::pair<std::string, std::shared_ptr<cv::Mat>>(
-        std::string("Greyscale"),
-        greyMat));
-    all.insert(std::pair<std::string, std::shared_ptr<cv::Mat>>(
-        std::string("Background"),
-        m_backgroundImage));
-    all.insert(std::pair<std::string, std::shared_ptr<cv::Mat>>(
-        std::string("Foreground Mask"),
-        m_foregroundMask));
-    all.insert(std::pair<std::string, std::shared_ptr<cv::Mat>>(
-        std::string("Opened Mask"),
-        openedMask));
-    all.insert(std::pair<std::string, std::shared_ptr<cv::Mat>>(
-        std::string("Closed Mask"),
-        closedMask));
-    all.insert(std::pair<std::string, std::shared_ptr<cv::Mat>>(
-        std::string("Masked Greyscale"),
-        maskedGrey));
+    greyMat.copyTo(maskedGrey, closedMask);
+
+    QMap<QString, cv::Mat> all;
+    all.insert("Greyscale", greyMat);
+    all.insert("Background", m_backgroundImage);
+    all.insert("Foreground Mask", m_foregroundMask);
+    all.insert("Opened Mask", openedMask);
+    all.insert("Closed Mask", closedMask);
+    all.insert("Masked Greyscale", maskedGrey);
 
     return all;
 }
diff --git a/Src/Model/TrackingAlgorithm/imageProcessor/preprocessor/ImagePreProcessor.h b/Src/Model/TrackingAlgorithm/imageProcessor/preprocessor/ImagePreProcessor.h
index 0acbcc4c4733142741cd514e826084541814ecbc..7773edd4622424576c10d39af5886c2e58f0e9d7 100644
--- a/Src/Model/TrackingAlgorithm/imageProcessor/preprocessor/ImagePreProcessor.h
+++ b/Src/Model/TrackingAlgorithm/imageProcessor/preprocessor/ImagePreProcessor.h
@@ -1,6 +1,7 @@
 #pragma once
 
 #include <QSettings>
+#include <QMap>
 
 #include <opencv2/opencv.hpp>
 
@@ -59,8 +60,7 @@ public:
      * @param: image, image to process,
      * @return: a pre-process image.
      */
-    std::map<std::string, std::shared_ptr<cv::Mat>> preProcess(
-        std::shared_ptr<cv::Mat> p_image);
+    QMap<QString, cv::Mat> preProcess(cv::Mat p_image);
 
     /**
      * The method updates the image background.
@@ -72,8 +72,8 @@ public:
 private:
     cv::Mat _outputImage;
 
-    std::shared_ptr<cv::Mat> m_backgroundImage;
-    std::shared_ptr<cv::Mat> m_foregroundMask;
+    cv::Mat m_backgroundImage;
+    cv::Mat m_foregroundMask;
 
     // parameters for image pre-processing
     bool _backgroundSubtractionEnabled;