diff --git a/Src/Model/AreaDescriptor/AreaInfoElement.cpp b/Src/Model/AreaDescriptor/AreaInfoElement.cpp
index 6dbb6f275b37debef282c93c4fb33a825d9ab9c3..5ce6c01b63598ab38153c0738a7a7553258829b0 100644
--- a/Src/Model/AreaDescriptor/AreaInfoElement.cpp
+++ b/Src/Model/AreaDescriptor/AreaInfoElement.cpp
@@ -1,7 +1,7 @@
 #include "AreaInfoElement.h"
 
-#include <cv.h>
-#include "opencv2/imgproc/imgproc.hpp"
+#include <opencv2/opencv.hpp>
+#include <opencv2/imgproc/imgproc.hpp>
 
 AreaInfoElement::AreaInfoElement(int type) : 
     _areaType(BiotrackerTypes::AreaType::NONE),
diff --git a/Src/Model/AreaDescriptor/AreaInfoElement.h b/Src/Model/AreaDescriptor/AreaInfoElement.h
index cbc7b87be8b2eae5d13da0c8dca40b3b9f189905..5ed1d957329bc4432d9f602bee6193419524e8ea 100644
--- a/Src/Model/AreaDescriptor/AreaInfoElement.h
+++ b/Src/Model/AreaDescriptor/AreaInfoElement.h
@@ -1,11 +1,11 @@
 #pragma once
 #include "Interfaces/IModel/IModel.h"
-#include <cv.h>
+#include <opencv2/opencv.hpp>
 #include <QPoint>
 #include "util/types.h"
 
 class AreaInfoElement : public IModel
-{
+{
 	Q_OBJECT
 
 public:
diff --git a/Src/Model/ImageStream.cpp b/Src/Model/ImageStream.cpp
index 5799cc916bf8da8b4816a10b2d8ea94466f20829..10ca51ce4a1884df987b42a09e933272bfcdae56 100644
--- a/Src/Model/ImageStream.cpp
+++ b/Src/Model/ImageStream.cpp
@@ -315,8 +315,8 @@ namespace BioTracker {
 			void openMedia(std::vector<boost::filesystem::path> files){
 
 				m_capture.open(files.front().string());
-				m_num_frames = static_cast<size_t>(m_capture.get(CV_CAP_PROP_FRAME_COUNT));
-				m_fps = m_capture.get(CV_CAP_PROP_FPS);
+				m_num_frames = static_cast<size_t>(m_capture.get(cv::CAP_PROP_FRAME_COUNT));
+				m_fps = m_capture.get(cv::CAP_PROP_FPS);
 				m_fileName = files.front().string();
 
 				if (!boost::filesystem::exists(files.front())) {
@@ -335,8 +335,8 @@ namespace BioTracker {
                     m_fps = fps;
                 }
 
-				m_w = m_capture.get(CV_CAP_PROP_FRAME_WIDTH);
-				m_h = m_capture.get(CV_CAP_PROP_FRAME_HEIGHT);
+				m_w = m_capture.get(cv::CAP_PROP_FRAME_WIDTH);
+				m_h = m_capture.get(cv::CAP_PROP_FRAME_HEIGHT);
 				m_recording = false;
 				vCoder = std::make_shared<VideoCoder>(m_fps, _cfg);
 
@@ -367,7 +367,7 @@ namespace BioTracker {
 				}
 				else {
 					// adjust frame position ("0-based index of the frame to be decoded/captured next.")
-					m_capture.set(CV_CAP_PROP_POS_FRAMES, static_cast<double>(frame_number));
+					m_capture.set(cv::CAP_PROP_POS_FRAMES, static_cast<double>(frame_number));
 					return this->nextFrame_impl();
 				}
 			}
@@ -396,7 +396,7 @@ namespace BioTracker {
 			explicit ImageStream3OpenCVCamera(Config *cfg, CameraConfiguration conf)
 				: ImageStream(0, cfg)
 				, m_capture(conf._selector.index)
-				, m_fps(m_capture.get(CV_CAP_PROP_FPS)) {
+				, m_fps(m_capture.get(cv::CAP_PROP_FPS)) {
 				// Give the camera some extra time to get ready:
 				// Somehow opening it on first try sometimes does not succeed.
 				// Workaround: http://stackoverflow.com/questions/22019064/unable-to-read-frames-from-videocapture-from-secondary-webcam-with-opencv?rq=1
@@ -422,13 +422,13 @@ namespace BioTracker {
 					throw device_open_error(":(");
 				}
 
-				if (m_w != -1)     m_capture.set(CV_CAP_PROP_FRAME_WIDTH, m_w);
-				if (m_h != -1)     m_capture.set(CV_CAP_PROP_FRAME_HEIGHT, m_h);
-				if (m_fps != -1)   m_capture.set(CV_CAP_PROP_FPS, m_fps);
+				if (m_w != -1)     m_capture.set(cv::CAP_PROP_FRAME_WIDTH, m_w);
+				if (m_h != -1)     m_capture.set(cv::CAP_PROP_FRAME_HEIGHT, m_h);
+				if (m_fps != -1)   m_capture.set(cv::CAP_PROP_FPS, m_fps);
 
-				m_w = m_capture.get(CV_CAP_PROP_FRAME_WIDTH);
-				m_h = m_capture.get(CV_CAP_PROP_FRAME_HEIGHT);
-				m_fps = m_capture.get(CV_CAP_PROP_FPS);
+				m_w = m_capture.get(cv::CAP_PROP_FRAME_WIDTH);
+				m_h = m_capture.get(cv::CAP_PROP_FRAME_HEIGHT);
+				m_fps = m_capture.get(cv::CAP_PROP_FPS);
 				qDebug() << "Cam open: " << m_capture.isOpened() << " w/h:" << m_w << "/" << m_h << " fps:" << m_fps;
 				// load first image
 				if (this->numFrames() > 0) {
diff --git a/Src/Model/MediaPlayer.cpp b/Src/Model/MediaPlayer.cpp
index 84215f06822ba6ca82ab49bbec1ffffab88b9f66..8c1e446d142b1d2e3b192a223eb1e2db38a0c39f 100644
--- a/Src/Model/MediaPlayer.cpp
+++ b/Src/Model/MediaPlayer.cpp
@@ -185,8 +185,8 @@ int MediaPlayer::reopenVideoWriter() {
 			_imagew = r.width();
 			_imageh = r.height();
 
-			int codec = CV_FOURCC('X', '2', '6', '4');
-			m_videoWriter = std::make_shared<cv::VideoWriter>(getTimeAndDate("./ViewCapture", ".avi"), codec, 30, CvSize(r.width(), r.height()), 1);
+			int codec = cv::VideoWriter::fourcc('X', '2', '6', '4');
+			m_videoWriter = std::make_shared<cv::VideoWriter>(getTimeAndDate("./ViewCapture", ".avi"), codec, 30, cv::Size(r.width(), r.height()), 1);
 			m_recd = m_videoWriter->isOpened();
 		}
 	}
@@ -273,7 +273,7 @@ void MediaPlayer::receivePlayerParameters(playerParameters* param) {
             auto view = cv::Mat(m_image.height(), m_image.width(), CV_8UC(m_image.depth() / 8), m_image.bits(), m_image.bytesPerLine());
 
             auto copy = std::make_shared<cv::Mat>(view.clone());
-            cv::cvtColor(*copy, *copy, CV_BGR2RGB);
+            cv::cvtColor(*copy, *copy, cv::ColorConversionCodes::COLOR_BGR2RGB);
             m_videoc->add(copy);
         }
     }
diff --git a/Src/Model/TextureObject.cpp b/Src/Model/TextureObject.cpp
index 91a09ea02fcdfa969ffbd2e0d02aca0daed728f9..781939beb5e1f25ab48f89a6412af5b5199e5b34 100644
--- a/Src/Model/TextureObject.cpp
+++ b/Src/Model/TextureObject.cpp
@@ -15,7 +15,7 @@ void TextureObject::set(const cv::Mat &img) {
 		return;
     if (img.channels() == 3) {
         img.convertTo(img, CV_8UC3);
-        cv::cvtColor(img, m_img, CV_BGR2RGB);
+        cv::cvtColor(img, m_img, cv::ColorConversionCodes::COLOR_BGR2RGB);
     } else if (img.channels() == 1) {
         // convert grayscale to "color"
         cv::Mat img8U;
@@ -36,7 +36,7 @@ void TextureObject::set(const cv::Mat &img) {
             const double convertedMin = abs(static_cast<int>(min * sizeRatio));
             img.convertTo(img8U, CV_8U, sizeRatio, convertedMin);
         }
-        cv::cvtColor(img8U, m_img, CV_GRAY2RGB);
+        cv::cvtColor(img8U, m_img, cv::ColorConversionCodes::COLOR_GRAY2RGB);
     } else {
         m_img = img;
     }
diff --git a/Src/View/AreaDesciptor/AreaDescriptor.h b/Src/View/AreaDesciptor/AreaDescriptor.h
index 8a2e198c551f2c5ba457e16bd6cf79e22ac4a950..aea4dfad986e0eceb863bd9b2b1f802b3551a76b 100644
--- a/Src/View/AreaDesciptor/AreaDescriptor.h
+++ b/Src/View/AreaDesciptor/AreaDescriptor.h
@@ -1,7 +1,7 @@
 #pragma once
 
 #include "Interfaces/IView/IViewTrackedComponent.h"
-#include <cv.h>
+#include <opencv2/opencv.hpp>
 #include "util/Config.h"
 
 class AreaDescriptor : public QObject, public IView, public QGraphicsItem
diff --git a/Src/View/AreaDesciptor/RectDescriptor.h b/Src/View/AreaDesciptor/RectDescriptor.h
index d05b167265abcade413a9de58f8f2d099b8f1e9b..4499390bf8159ad473aa6f40b1887da07e3941e7 100644
--- a/Src/View/AreaDesciptor/RectDescriptor.h
+++ b/Src/View/AreaDesciptor/RectDescriptor.h
@@ -1,7 +1,7 @@
 #pragma once
 
 #include "AreaDescriptor.h"
-#include "cv.h"
+#include <opencv2/opencv.hpp>
 #include <QBrush>
 #include "util/types.h"
 #include <QObject>
diff --git a/Src/View/CameraDevice.cpp b/Src/View/CameraDevice.cpp
index d8391415fdf050e188c8d71812b5c4fdf02a8a3f..2a2af7578deca678f352f6adc68f8bbff73765b0 100644
--- a/Src/View/CameraDevice.cpp
+++ b/Src/View/CameraDevice.cpp
@@ -66,11 +66,11 @@ void CameraDevice::on_showPreviewButton_clicked()
             std::this_thread::sleep_for(std::chrono::milliseconds(100));
 
         if (conf._width != -1)
-            m_capture.set(CV_CAP_PROP_FRAME_WIDTH, conf._width);
+            m_capture.set(cv::CAP_PROP_FRAME_WIDTH, conf._width);
         if (conf._height != -1)
-            m_capture.set(CV_CAP_PROP_FRAME_HEIGHT, conf._height);
+            m_capture.set(cv::CAP_PROP_FRAME_HEIGHT, conf._height);
         if (conf._fps != -1)
-            m_capture.set(CV_CAP_PROP_FPS, conf._fps);
+            m_capture.set(cv::CAP_PROP_FPS, conf._fps);
 
         if (!m_capture.isOpened())
         {
@@ -151,12 +151,12 @@ void CameraDevice::listAllCameras()
     }
 
     {
-        cv::VideoCapture ximea_camera(CV_CAP_XIAPI);
+        cv::VideoCapture ximea_camera(cv::CAP_XIAPI);
         if (ximea_camera.isOpened())
         {
             ui->comboBox->addItem(
                 "XIMEA default",
-                QVariant::fromValue(CameraSelector{CameraType::OpenCV, CV_CAP_XIAPI}));
+                QVariant::fromValue(CameraSelector{CameraType::OpenCV, cv::CAP_XIAPI}));
         }
     }
 
diff --git a/Src/util/VideoCoder.cpp b/Src/util/VideoCoder.cpp
index 00f890723ee519c88640efc0e2a01204293abb12..0b3ce6101d6342c32c04b37041651c2576fbbef1 100644
--- a/Src/util/VideoCoder.cpp
+++ b/Src/util/VideoCoder.cpp
@@ -180,8 +180,8 @@ int VideoCoder::toggle(int w, int h, double fps) {
 
 		//Check which one to use
 		if (codecStr == "X264") {
-			int codec = CV_FOURCC('X', 'V', 'I', 'D');
-			vWriter = std::make_shared<cv::VideoWriter>(getTimeAndDate(videoDir+"CameraCapture", ".avi"), codec, fps, CvSize(w, h), 1);
+			int codec = cv::VideoWriter::fourcc('X', 'V', 'I', 'D');
+			vWriter = std::make_shared<cv::VideoWriter>(getTimeAndDate(videoDir+"CameraCapture", ".avi"), codec, fps, cv::Size(w, h), 1);
 			m_recording = vWriter->isOpened();
             vWriter->set(cv::VIDEOWRITER_PROP_QUALITY, 100);
 			std::cout << "Video is open:" << m_recording << std::endl;