Skip to content
Snippets Groups Projects
Commit 708d3c3a authored by calrama's avatar calrama
Browse files

Replace std::shared_ptr<cv::Mat> with cv::Mat

cv::Mat is already reference counted.
parent d9cfb21d
No related branches found
No related tags found
No related merge requests found
Pipeline #51239 failed
......@@ -163,9 +163,8 @@ void BioTrackerPlugin::receiveAreaDescriptor(IModelAreaDescriptor* areaDescr)
Q_EMIT emitAreaDescriptorUpdate(areaDescr);
}
void BioTrackerPlugin::receiveCurrentFrameFromMainApp(
std::shared_ptr<cv::Mat> mat,
uint frameNumber)
void BioTrackerPlugin::receiveCurrentFrameFromMainApp(cv::Mat mat,
uint frameNumber)
{
qobject_cast<ControllerTrackingAlgorithm*>(m_TrackerController)
->doTracking(mat, frameNumber);
......
......@@ -34,7 +34,7 @@ public:
private:
void connectInterfaces();
signals:
void emitCvMat(std::shared_ptr<cv::Mat> mat, QString name);
void emitCvMat(cv::Mat mat, QString name);
void emitTrackingDone(uint framenumber);
void emitChangeDisplayImage(QString str);
void emitAreaDescriptorUpdate(IModelAreaDescriptor* areaDescr);
......@@ -69,8 +69,7 @@ public slots:
void receiveCurrentFrameNumberFromMainApp(uint frameNumber);
public slots:
void receiveCurrentFrameFromMainApp(std::shared_ptr<cv::Mat> mat,
uint frameNumber);
void receiveCurrentFrameFromMainApp(cv::Mat mat, uint frameNumber);
void receiveAreaDescriptor(IModelAreaDescriptor* areaDescr);
private:
......
......@@ -24,8 +24,7 @@ void ControllerTrackingAlgorithm::connectControllerToController()
m_TrackedTrajectoryMajor = ctrComponent->getModel();
}
void ControllerTrackingAlgorithm::doTracking(std::shared_ptr<cv::Mat> mat,
uint number)
void ControllerTrackingAlgorithm::doTracking(cv::Mat mat, uint number)
{
qobject_cast<BioTrackerTrackingAlgorithm*>(m_Model)->doTracking(mat,
number);
......
......@@ -28,7 +28,7 @@ public:
public:
void connectControllerToController() override;
void doTracking(std::shared_ptr<cv::Mat> mat, uint number);
void doTracking(cv::Mat mat, uint number);
IView* getTrackingParameterWidget();
......@@ -41,7 +41,7 @@ protected:
void connectModelToController() override;
Q_SIGNALS:
void emitCvMat(std::shared_ptr<cv::Mat> mat, QString name);
void emitCvMat(cv::Mat mat, QString name);
void emitTrackingDone(uint framenumber);
void emitChangeDisplayImage(QString str);
void emitAreaDescriptorUpdate(IModelAreaDescriptor* areaDescr);
......
......@@ -8,6 +8,7 @@ BioTrackerTrackingAlgorithm::BioTrackerTrackingAlgorithm(IController* parent,
IModel* trajectory)
: IModelTrackingAlgorithm(parent)
, _ipp((TrackerParameter*) parameter)
, _lastImage(std::nullopt)
{
_cfg = static_cast<ControllerTrackingAlgorithm*>(parent)->getConfig();
_TrackingParameter = (TrackerParameter*) parameter;
......@@ -27,7 +28,6 @@ BioTrackerTrackingAlgorithm::BioTrackerTrackingAlgorithm(IController* parent,
SLOT(acceptConnection()));
}
_lastImage = nullptr;
_lastFramenumber = -1;
}
......@@ -70,15 +70,15 @@ void BioTrackerTrackingAlgorithm::refreshPolygon()
void BioTrackerTrackingAlgorithm::receiveParametersChanged()
{
if (_lastFramenumber >= 0 && _lastImage && !_lastImage->empty()) {
doTracking(_lastImage, _lastFramenumber);
doTracking(*_lastImage, _lastFramenumber);
}
}
void BioTrackerTrackingAlgorithm::sendSelectedImage(
std::map<std::string, std::shared_ptr<cv::Mat>>* images)
std::map<std::string, cv::Mat>* images)
{
std::shared_ptr<cv::Mat> sendImage;
cv::Mat sendImage;
// Send forth whatever the user selected
switch (_TrackingParameter->getSendImage()) {
case 0: // Send none
......@@ -158,22 +158,20 @@ std::vector<BlobPose> BioTrackerTrackingAlgorithm::getContourCentroids(
return centroids;
}
void BioTrackerTrackingAlgorithm::doTracking(std::shared_ptr<cv::Mat> p_image,
uint framenumber)
void BioTrackerTrackingAlgorithm::doTracking(cv::Mat image, uint framenumber)
{
_ipp.m_TrackingParameter = _TrackingParameter;
_lastImage = p_image;
_lastImage = image;
_lastFramenumber = framenumber;
// dont do nothing if we ain't got an image
if (p_image->empty()) {
if (image.empty()) {
return;
}
if (_imageX != p_image->size().width ||
_imageY != p_image->size().height) {
_imageX = p_image->size().width;
_imageY = p_image->size().height;
if (_imageX != image.size().width || _imageY != image.size().height) {
_imageX = image.size().width;
_imageY = image.size().height;
Q_EMIT emitDimensionUpdate(_imageX, _imageY);
}
......@@ -198,18 +196,15 @@ void BioTrackerTrackingAlgorithm::doTracking(std::shared_ptr<cv::Mat> p_image,
}
// Do the preprocessing
std::map<std::string, std::shared_ptr<cv::Mat>> images = _ipp.preProcess(
p_image);
std::shared_ptr<cv::Mat> mask =
images.find(std::string("Closed Mask"))->second;
std::shared_ptr<cv::Mat> greyMat =
images.find(std::string("Greyscale"))->second;
std::map<std::string, cv::Mat> images = _ipp.preProcess(image);
cv::Mat mask = images.find(std::string("Closed Mask"))->second;
cv::Mat greyMat = images.find(std::string("Greyscale"))->second;
// Find blobs via ellipsefitting
_bd.setMaxBlobSize(_TrackingParameter->getMaxBlobSize());
_bd.setMinBlobSize(_TrackingParameter->getMinBlobSize());
auto foo = *images.find(std::string("Masked Greyscale"))->second;
auto foo = images.find(std::string("Masked Greyscale"))->second;
std::vector<std::vector<cv::Point>> contours;
std::vector<cv::Vec4i> hierarchy;
......@@ -224,7 +219,7 @@ void BioTrackerTrackingAlgorithm::doTracking(std::shared_ptr<cv::Mat> p_image,
drawContours(foo, contours, (int) i, cv::Scalar(255));
}
std::vector<BlobPose> blobs = getContourCentroids(*mask);
std::vector<BlobPose> blobs = getContourCentroids(mask);
// Never switch the position of the trajectories. The NN2d mapper relies on
// this! If you mess up the order, add or remove some t, then create a new
......
......@@ -5,6 +5,8 @@
#include "TrackerParameter.h"
#include <optional>
#include <opencv2/opencv.hpp>
#include "Interfaces/IModel/IModelTrackingAlgorithm.h"
#include "Interfaces/IModel/IModelDataExporter.h"
......@@ -30,21 +32,20 @@ public:
~BioTrackerTrackingAlgorithm();
Q_SIGNALS:
void emitCvMatA(std::shared_ptr<cv::Mat> image, QString name);
void emitCvMatA(cv::Mat image, QString name);
void emitDimensionUpdate(int x, int y);
void emitTrackingDone(uint framenumber);
// ITrackingAlgorithm interface
public Q_SLOTS:
void doTracking(std::shared_ptr<cv::Mat> image, uint framenumber) override;
void doTracking(cv::Mat image, uint framenumber) override;
void receiveAreaDescriptorUpdate(IModelAreaDescriptor* areaDescr);
void receiveParametersChanged();
private:
std::vector<BlobPose> getContourCentroids(cv::Mat image);
void refreshPolygon();
void sendSelectedImage(
std::map<std::string, std::shared_ptr<cv::Mat>>* images);
void sendSelectedImage(std::map<std::string, cv::Mat>* images);
std::vector<FishPose> getLastPositionsAsPose();
......@@ -65,9 +66,9 @@ private:
int _imageX;
int _imageY;
std::shared_ptr<cv::Mat> _lastImage;
uint _lastFramenumber;
Config* _cfg;
std::optional<cv::Mat> _lastImage;
uint _lastFramenumber;
Config* _cfg;
};
#endif // BIOTRACKERTRACKINGALGORITHM_H
......@@ -32,8 +32,8 @@ void ImagePreProcessor::init()
qFatal("Unsupported background subtraction algorithm");
}
m_backgroundImage = std::make_shared<cv::Mat>();
m_foregroundMask = std::make_shared<cv::Mat>();
m_backgroundImage = cv::Mat();
m_foregroundMask = cv::Mat();
_backgroundSubtractionEnabled = true;
_backgroundEnabled = true;
......@@ -96,56 +96,49 @@ cv::Mat ImagePreProcessor::backgroundSubtraction(cv::Mat& image)
cv::Mat fgmask;
m_subtractor->apply(image, fgmask, m_TrackingParameter->getLearningRate());
m_subtractor->getBackgroundImage(*m_backgroundImage);
m_subtractor->getBackgroundImage(m_backgroundImage);
return fgmask;
}
std::map<std::string, std::shared_ptr<cv::Mat>> ImagePreProcessor::preProcess(
std::shared_ptr<cv::Mat> p_image)
std::map<std::string, cv::Mat> ImagePreProcessor::preProcess(cv::Mat image)
{
std::shared_ptr<cv::Mat> greyMat = std::make_shared<cv::Mat>();
std::shared_ptr<cv::Mat> openedMask = std::make_shared<cv::Mat>();
std::shared_ptr<cv::Mat> closedMask = std::make_shared<cv::Mat>();
cv::Mat greyMat;
cv::Mat openedMask;
cv::Mat closedMask;
std::shared_ptr<cv::Mat> maskedGrey = std::make_shared<cv::Mat>();
cv::Mat maskedGrey;
cv::cvtColor(*p_image, *greyMat, cv::COLOR_BGR2GRAY);
cv::cvtColor(image, greyMat, cv::COLOR_BGR2GRAY);
// 1. step: do the background subtraction
*m_foregroundMask = backgroundSubtraction(*greyMat);
m_foregroundMask = backgroundSubtraction(greyMat);
// 2. step: open the mask
*openedMask = dilate(
erode(*m_foregroundMask, m_TrackingParameter->getOpeningErosionSize()),
openedMask = dilate(
erode(m_foregroundMask, m_TrackingParameter->getOpeningErosionSize()),
m_TrackingParameter->getOpeningDilationSize());
// 3. step: close the image
*closedMask = erode(
dilate(*openedMask, m_TrackingParameter->getClosingDilationSize()),
closedMask = erode(
dilate(openedMask, m_TrackingParameter->getClosingDilationSize()),
m_TrackingParameter->getClosingErosionSize());
// 4. step: masked greyscale image
greyMat->copyTo(*maskedGrey, *closedMask);
std::map<std::string, std::shared_ptr<cv::Mat>> all;
all.insert(std::pair<std::string, std::shared_ptr<cv::Mat>>(
std::string("Greyscale"),
greyMat));
all.insert(std::pair<std::string, std::shared_ptr<cv::Mat>>(
std::string("Background"),
m_backgroundImage));
all.insert(std::pair<std::string, std::shared_ptr<cv::Mat>>(
std::string("Foreground Mask"),
m_foregroundMask));
all.insert(std::pair<std::string, std::shared_ptr<cv::Mat>>(
std::string("Opened Mask"),
openedMask));
all.insert(std::pair<std::string, std::shared_ptr<cv::Mat>>(
std::string("Closed Mask"),
closedMask));
all.insert(std::pair<std::string, std::shared_ptr<cv::Mat>>(
std::string("Masked Greyscale"),
maskedGrey));
greyMat.copyTo(maskedGrey, closedMask);
std::map<std::string, cv::Mat> all;
all.insert(
std::pair<std::string, cv::Mat>(std::string("Greyscale"), greyMat));
all.insert(std::pair<std::string, cv::Mat>(std::string("Background"),
m_backgroundImage));
all.insert(std::pair<std::string, cv::Mat>(std::string("Foreground Mask"),
m_foregroundMask));
all.insert(std::pair<std::string, cv::Mat>(std::string("Opened Mask"),
openedMask));
all.insert(std::pair<std::string, cv::Mat>(std::string("Closed Mask"),
closedMask));
all.insert(std::pair<std::string, cv::Mat>(std::string("Masked Greyscale"),
maskedGrey));
return all;
}
......
......@@ -59,8 +59,7 @@ public:
* @param: image, image to process,
* @return: a pre-process image.
*/
std::map<std::string, std::shared_ptr<cv::Mat>> preProcess(
std::shared_ptr<cv::Mat> p_image);
std::map<std::string, cv::Mat> preProcess(cv::Mat p_image);
/**
* The method updates the image background.
......@@ -72,8 +71,8 @@ public:
private:
cv::Mat _outputImage;
std::shared_ptr<cv::Mat> m_backgroundImage;
std::shared_ptr<cv::Mat> m_foregroundMask;
cv::Mat m_backgroundImage;
cv::Mat m_foregroundMask;
// parameters for image pre-processing
bool _backgroundSubtractionEnabled;
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment