Better memory management of images between C++ and C#, fixed the OpenFaceDemo crash.

This commit is contained in:
Tadas Baltrusaitis
2018-08-03 11:06:40 +01:00
parent 65b2b7c7aa
commit 3e5fa5cc71
6 changed files with 15 additions and 89 deletions

View File

@@ -417,8 +417,7 @@ namespace HeadPoseLive
// Add objects to recording queues
List<float> pose = new List<float>();
face_model.GetPose(pose, reader.GetFx(), reader.GetFy(), reader.GetCx(), reader.GetCy());
RawImage image = new RawImage(frame);
recording_objects.Enqueue(new Tuple<RawImage, bool, List<float>>(image, detectionSucceeding, pose));
recording_objects.Enqueue(new Tuple<RawImage, bool, List<float>>(frame, detectionSucceeding, pose));
}
}

View File

@@ -224,8 +224,8 @@ namespace OpenFaceDemo
{
// Loading an image file
RawImage frame = new RawImage(reader.GetNextImage());
RawImage gray_frame = new RawImage(reader.GetCurrentFrameGray());
RawImage frame = reader.GetNextImage();
RawImage gray_frame = reader.GetCurrentFrameGray();
lastFrameTime = CurrentTime;
processing_fps.AddFrame();

View File

@@ -229,8 +229,8 @@ namespace OpenFaceOffline
landmark_detector.Reset();
// Loading an image file
var frame = new RawImage(reader.GetNextImage());
var gray_frame = new RawImage(reader.GetCurrentFrameGray());
var frame = reader.GetNextImage();
var gray_frame = reader.GetCurrentFrameGray();
// Setup recording
RecorderOpenFaceParameters rec_params = new RecorderOpenFaceParameters(true, reader.IsWebcam(),
@@ -281,8 +281,8 @@ namespace OpenFaceOffline
if (skip_frames > 0)
skip_frames--;
frame = new RawImage(reader.GetNextImage());
gray_frame = new RawImage(reader.GetCurrentFrameGray());
frame = reader.GetNextImage();
gray_frame = reader.GetCurrentFrameGray();
lastFrameTime = CurrentTime;
processing_fps.AddFrame();
@@ -339,8 +339,8 @@ namespace OpenFaceOffline
face_analyser = new FaceAnalyserManaged(AppDomain.CurrentDomain.BaseDirectory, false, image_output_size, MaskAligned);
// Loading an image file
var frame = new RawImage(reader.GetNextImage());
var gray_frame = new RawImage(reader.GetCurrentFrameGray());
var frame = reader.GetNextImage();
var gray_frame = reader.GetCurrentFrameGray();
// For FPS tracking
DateTime? startTime = CurrentTime;
@@ -405,8 +405,8 @@ namespace OpenFaceOffline
recorder.SetObservationVisualization(visualizer_of.GetVisImage());
frame = new RawImage(reader.GetNextImage());
gray_frame = new RawImage(reader.GetCurrentFrameGray());
frame = reader.GetNextImage();
gray_frame = reader.GetCurrentFrameGray();
// Write out the tracked image
if(RecordTracked)

View File

@@ -116,16 +116,7 @@ namespace UtilitiesOF {
{
cv::Mat next_image = m_image_capture->GetNextImage();
if (m_rgb_frame == nullptr)
{
m_rgb_frame = gcnew OpenCVWrappers::RawImage(next_image.size().width, next_image.size().height, CV_8UC3);
}
else if (m_rgb_frame->Width != next_image.size().width || m_rgb_frame->Height != next_image.size().height)
{
m_rgb_frame = gcnew OpenCVWrappers::RawImage(next_image.size().width, next_image.size().height, CV_8UC3);
}
next_image.copyTo(m_rgb_frame->Mat);
m_rgb_frame = gcnew OpenCVWrappers::RawImage(next_image);
if (next_image.empty())
{
@@ -174,17 +165,7 @@ namespace UtilitiesOF {
OpenCVWrappers::RawImage^ GetCurrentFrameGray() {
cv::Mat next_gray_image = m_image_capture->GetGrayFrame();
if (m_gray_frame == nullptr)
{
m_gray_frame = gcnew OpenCVWrappers::RawImage(next_gray_image.size().width, next_gray_image.size().height, CV_8UC1);
}
else if (m_gray_frame->Width != next_gray_image.size().width || m_gray_frame->Height != next_gray_image.size().height)
{
m_gray_frame = gcnew OpenCVWrappers::RawImage(next_gray_image.size().width, next_gray_image.size().height, CV_8UC1);
}
next_gray_image.copyTo(m_gray_frame->Mat);
m_gray_frame = gcnew OpenCVWrappers::RawImage(next_gray_image);
return m_gray_frame;
}

View File

@@ -66,9 +66,6 @@ namespace OpenCVWrappers {
cv::Mat* mat;
static int refCount;
public:
static int PixelFormatToType(PixelFormat fmt)
@@ -101,39 +98,9 @@ namespace OpenCVWrappers {
}
}
static property int RefCount {
int get() { return refCount; }
}
RawImage()
{
mat = new cv::Mat();
refCount++;
}
RawImage(const cv::Mat& m)
{
mat = new cv::Mat(m.clone());
refCount++;
}
RawImage(RawImage^ img)
{
mat = new cv::Mat(img->Mat.clone());
refCount++;
}
RawImage(int width, int height, int type)
{
mat = new cv::Mat(height, width, type);
refCount++;
}
RawImage(int width, int height, PixelFormat format)
{
int type = RawImage::PixelFormatToType(format);
mat = new cv::Mat(height, width, type);
refCount++;
}
void Mirror()
@@ -150,7 +117,6 @@ namespace OpenCVWrappers {
{
delete mat;
mat = NULL;
refCount--;
}
}

View File

@@ -144,17 +144,7 @@ namespace UtilitiesOF {
OpenCVWrappers::RawImage^ GetNextImage()
{
cv::Mat next_image = m_sequence_capture->GetNextFrame();
if (m_rgb_frame == nullptr)
{
m_rgb_frame = gcnew OpenCVWrappers::RawImage(next_image.size().width, next_image.size().height, CV_8UC3);
}
else if (m_rgb_frame->Mat.size().width != next_image.size().width || m_rgb_frame->Mat.size().height != next_image.size().height)
{
m_rgb_frame = gcnew OpenCVWrappers::RawImage(next_image.size().width, next_image.size().height, CV_8UC3);
}
next_image.copyTo(m_rgb_frame->Mat);
m_rgb_frame = gcnew OpenCVWrappers::RawImage(next_image);
return m_rgb_frame;
}
@@ -162,17 +152,7 @@ namespace UtilitiesOF {
OpenCVWrappers::RawImage^ GetCurrentFrameGray() {
cv::Mat_<uchar> next_gray_image = m_sequence_capture->GetGrayFrame();
if (m_gray_frame == nullptr)
{
m_gray_frame = gcnew OpenCVWrappers::RawImage(next_gray_image.size().width, next_gray_image.size().height, CV_8U);
}
else if (m_gray_frame->Mat.size().width != next_gray_image.size().width || m_gray_frame->Mat.size().height != next_gray_image.size().height)
{
m_gray_frame = gcnew OpenCVWrappers::RawImage(next_gray_image.size().width, next_gray_image.size().height, CV_8U);
}
next_gray_image.copyTo(m_gray_frame->Mat);
m_gray_frame = gcnew OpenCVWrappers::RawImage(next_gray_image);
return m_gray_frame;
}