内存泄漏问题。在Unity中使用OpenCVSharp进行眼动跟踪。

6
我已经在这个项目上工作了几个月,我正在尝试使用OpenCVSharp将眼动跟踪集成到Unity中。我已经成功地完成了所有工作,包括实际的瞳孔跟踪等,但是我遇到了内存泄漏的问题。基本上,在程序运行20-30秒后,它会冻结并且控制台会出现错误,显示“无法分配(插入数字)位”。在运行程序期间查看内存使用情况后,您可以看到其使用量稳步上升,直到达到最大值然后崩溃。

现在,我花了很长时间来解决这个问题,并阅读了很多关于正确释放图像/存储等的帮助文章。尽管我正在做这件事,但它似乎没有正确地释放它们。我尝试使用垃圾收集器强制回收内存,但似乎也没有起作用。我是否在处理图像和如何回收它们方面存在根本性错误?或者每帧创建新图像(即使我正在释放它们)是否导致问题。

非常感谢任何帮助。下面是代码,您可以忽略更新函数中的许多内容,因为它们与实际跟踪部分和校准有关。我意识到代码非常凌乱,对此很抱歉!最主要需要注意的部分是EyeDetection()。

using UnityEngine;
using System.Collections;
using System;
using System.IO;
using OpenCvSharp;
using OpenCvSharp.Blob;
//using System.Xml;
//using System.Threading;
//using AForge;

//using OpenCvSharp.Extensions;
//using System.Windows.Media;
//using System.Windows.Media.Imaging;



public class CaptureScript2 : MonoBehaviour
{
    //public GameObject planeObj;
    public WebCamTexture webcamTexture;     //Texture retrieved from the webcam
    //public Texture2D texImage;              //Texture to apply to plane
    public string deviceName;

    private int devId = 1;
    private int imWidth = 800;             //camera width
    private int imHeight = 600;             //camera height
    private string errorMsg = "No errors found!";
    private static IplImage camImage;                   //Ipl image of the converted webcam texture
    //private static IplImage yuv;
    //private static IplImage dst;
    private CvCapture cap;                  //Current camera capture
    //private IplImage eyeLeft;
    //private IplImage eyeRight;
    //private IplImage eyeLeftFinal;
    //private IplImage eyeRightFinal;
    private double leftEyeX;
    private double leftEyeY;
    private double rightEyeX;
    private double rightEyeY;
    private int calibState;
    private double LTRCPx;
    private double LTLCPx;
    private double LBLCPy;
    private double LTLCPy;
    private double RTRCPx;
    private double RTLCPx;
    private double RBLCPy;
    private double RTLCPy;
    private double gazeWidth;
    private double gazeHeight;
    private double gazeScaleX;
    private double gazeScaleY;

    public static CvMemStorage storageFace;
    public static CvMemStorage storage;

    public static double gazePosX;
    public static double gazePosY;

    private bool printed = true;
    //private CvRect r;
    //private IplImage smallImg;

    CvColor[] colors = new CvColor[]
    {
        new CvColor(0,0,255),
        new CvColor(0,128,255),
        new CvColor(0,255,255),
        new CvColor(0,255,0),
        new CvColor(255,128,0),
        new CvColor(255,255,0),
        new CvColor(255,0,0),
        new CvColor(255,0,255),
    };

    //scale for small image
    const double Scale = 1.25;
    const double scaleEye = 10.0;
    const double ScaleFactor = 2.5;
    //must show 2 eyes on the screen
    const int MinNeighbors = 2;
    const int MinNeighborsFace = 1;


    // Use this for initialization
    void Start ()
    {


        //Webcam initialisation
        WebCamDevice[] devices = WebCamTexture.devices;
        Debug.Log ("num:" + devices.Length);

        for (int i=0; i<devices.Length; i++) 
        {
            print (devices [i].name);
            if (devices [i].name.CompareTo (deviceName) == 1) 
            {
                devId = i;
            }
        }

        if (devId >= 0) 
        {
            //mainImage = new IplImage (imWidth, imHeight, BitDepth.U8, 3);


        }

        //create capture from current device
        cap = Cv.CreateCameraCapture(devId);
        //set properties of the capture
        Cv.SetCaptureProperty(cap, CaptureProperty.FrameWidth, imWidth);
        Cv.SetCaptureProperty(cap, CaptureProperty.FrameHeight, imHeight);
        //create window to display capture
        //Cv.NamedWindow("Eye tracking", WindowMode.AutoSize);
        Cv.NamedWindow ("EyeLeft", WindowMode.AutoSize);
        Cv.NamedWindow ("EyeRight", WindowMode.AutoSize);
        Cv.NamedWindow ("Face", WindowMode.AutoSize);

        calibState = 1;


    }


    void Update ()
    {
        if(Input.GetKeyDown(KeyCode.Space) && calibState < 3)
        {
            calibState++;
        }

        if(Input.GetMouseButtonDown(0) && calibState < 4)
        {
            printed = false;
            calibState++;

            Cv.DestroyAllWindows();
            Cv.ReleaseCapture(cap);

            cap = Cv.CreateCameraCapture(devId);
        }
        //if device is connected
        if (devId >= 0)
        {   
            //cap = Cv.CreateCameraCapture(devId);
            //Cv.Release
            //retrieve the current frame from camera
            camImage = Cv.QueryFrame(cap);
            //detect eyes and apply circles
            //
            EyeDetection();

            Cv.ReleaseImage(camImage);
            //PupilTracking();



            switch(calibState)
            {
            case 1:
                LTRCPx = leftEyeX;
                RTRCPx = rightEyeX;

                break;

            case 2:

                LTLCPx = leftEyeX;
                LTLCPy = leftEyeY;
                RTLCPx = rightEyeX;
                RTLCPy = rightEyeY;

                break;
            case 3:

                LBLCPy = leftEyeY;// + rightEyeY) /2 ;
                RBLCPy = rightEyeY;


                break;

            case 4:

                //gazeWidth = (((LTRCPx - LTLCPx) + (RTRCPx - RTLCPx)) / 2) * -1;
                //gazeHeight = ((LBLCPy - LTLCPy) + (RBLCPy - RTLCPy)) /2;
                gazeWidth = LTLCPx -LTRCPx;
                gazeHeight = LBLCPy - LTLCPy;

                gazeScaleX = (Screen.width/gazeWidth);
                gazeScaleY = Screen.height/gazeHeight;

                gazePosX = gazeScaleX *(leftEyeX - LTRCPx);
                gazePosY = gazeScaleY *(leftEyeY - LTLCPy);

                break;
            }


            //Cv.ReleaseCapture(cap);

        } 
        else 
        {
            Debug.Log ("Can't find camera!");
        }

        //print (calibState);
        if(printed == false)
        {
            print ("Gaze pos x = " + gazePosX);
            print ("Gaze pos Y = " + gazePosY);
            print ("Scale x = " + gazeScaleX);
            print ("Scale y = " + gazeScaleY);
            print ("Gaze width = " + gazeWidth);
            print ("Gaze Height = " + gazeHeight);
            print ("left eye x = " + leftEyeX);
            print ("left eye Y = " + leftEyeY);
            print ("calib state = " + calibState);

            printed = true;
        }



        //Cv.ShowImage("Eye tracking", mainImage);
        //Cv.ShowImage ("EyeLeft", grayEyeLeft);
        //Cv.ShowImage ("EyeRight", grayEyeRight);

    }



    void EyeDetection()
    {
        IplImage mainImage = new IplImage (imWidth, imHeight, BitDepth.U8, 3);

        IplImage smallImg = new IplImage(mainImage.Width, mainImage.Height ,BitDepth.U8, 1);
        Cv.Resize (camImage, mainImage, Interpolation.Linear);

        IplImage gray = new IplImage(mainImage.Size, BitDepth.U8, 1);

        Cv.CvtColor (mainImage, gray, ColorConversion.BgrToGray);
        Cv.Resize(gray, smallImg, Interpolation.Linear);
        Cv.EqualizeHist(smallImg, smallImg);
        Cv.ReleaseImage (gray);


            //IplImage hack = Cv.LoadImage("\\Users\\User\\Desktop\\Honours Projects\\Project10\\Project\\Assets\\bug.jpeg");
            //Cv.Erode (hack, hack);
            //Cv.ReleaseImage (hack);

            //uint sizeStore = 2877212;
        CvHaarClassifierCascade cascadeFace = CvHaarClassifierCascade.FromFile("\\Users\\User\\Documents\\opencv\\sources\\data\\haarcascades\\haarcascade_frontalface_alt2.xml");

        CvMemStorage storageFace = new CvMemStorage();
        storageFace.Clear ();

        CvSeq<CvAvgComp> faces = Cv.HaarDetectObjects(smallImg, cascadeFace, storageFace, ScaleFactor, MinNeighborsFace, 0, new CvSize(30,30));

        for(int j = 0; j < faces.Total; j++)
        {
            CvRect face = faces[j].Value.Rect;

            CvHaarClassifierCascade cascadeEye = CvHaarClassifierCascade.FromFile ("\\Users\\User\\Documents\\opencv\\sources\\data\\haarcascades\\haarcascade_eye.xml");

            IplImage faceImg = new IplImage(face.Width, face.Height, BitDepth.U8, 1);
            IplImage faceImgColour = new IplImage(face.Width, face.Height, BitDepth.U8, 3);

            CvMemStorage storage = new CvMemStorage();
            storage.Clear ();

            Cv.SetImageROI(smallImg, face);
            Cv.Copy (smallImg, faceImg);
            Cv.ResetImageROI(smallImg);

            Cv.SetImageROI(mainImage, face);
            Cv.Copy (mainImage, faceImgColour);
            Cv.ResetImageROI(mainImage);


            Cv.ShowImage ("Face", faceImgColour);


            CvSeq<CvAvgComp> eyes = Cv.HaarDetectObjects(faceImg, cascadeEye, storage, ScaleFactor, MinNeighbors, 0, new CvSize(30, 30));
            for(int i = 0; i < eyes.Total; i++)
            {
                CvRect r = eyes[i].Value.Rect;


                Cv.SetImageROI(faceImgColour, r);

                if(i == 1)
                {
                    IplImage eyeLeft = new IplImage(new CvSize(r.Width, r.Height), BitDepth.U8, 3);

                    Cv.Copy(faceImgColour, eyeLeft);

                    IplImage yuv = new IplImage(eyeLeft.Size, BitDepth.U8, 3);
                    IplImage dst = new IplImage(eyeLeft.Size, BitDepth.U8, 3);
                    IplImage grayEyeLeft = new IplImage(eyeLeft.Size, BitDepth.U8, 1);
                    IplImage eyeLeftFinal = new IplImage(Cv.Round(grayEyeLeft.Width * scaleEye), Cv.Round(grayEyeLeft.Height * scaleEye), BitDepth.U8, 1);
                    Cv.CvtColor(eyeLeft, yuv, ColorConversion.BgrToCrCb);
                    Cv.Not(yuv, dst);
                    Cv.CvtColor(dst,eyeLeft,ColorConversion.CrCbToBgr);
                    Cv.CvtColor(eyeLeft, grayEyeLeft, ColorConversion.BgrToGray);

                    Cv.Resize (grayEyeLeft, eyeLeftFinal, Interpolation.Linear);
                    Cv.Threshold(eyeLeftFinal, eyeLeftFinal, 230, 230, ThresholdType.Binary);
                    CvBlobs b1 = new CvBlobs(eyeLeftFinal);
                    if(b1.Count > 0)
                    {
                        leftEyeX = b1.LargestBlob().Centroid.X;
                        leftEyeY = b1.LargestBlob().Centroid.Y;
                    }

                    Cv.ShowImage ("EyeLeft", eyeLeftFinal);

                    Cv.ReleaseImage (yuv);
                    Cv.ReleaseImage (dst);
                    Cv.ReleaseImage (grayEyeLeft);
                    Cv.ReleaseImage (eyeLeftFinal);
                    b1.Clear();

                    Cv.ReleaseImage (eyeLeft);


                }
                if(i == 0)
                {
                    IplImage eyeRight = new IplImage(new CvSize(r.Width, r.Height), BitDepth.U8, 3);

                    Cv.Copy(faceImgColour, eyeRight);

                    IplImage yuv2 = new IplImage(eyeRight.Size, BitDepth.U8, 3);
                    IplImage dst2 = new IplImage(eyeRight.Size, BitDepth.U8, 3);
                    IplImage grayEyeRight = new IplImage(eyeRight.Size, BitDepth.U8, 1);
                    IplImage eyeRightFinal = new IplImage(Cv.Round(grayEyeRight.Width * scaleEye), Cv.Round(grayEyeRight.Height * scaleEye), BitDepth.U8, 1);
                    Cv.CvtColor(eyeRight, yuv2, ColorConversion.BgrToCrCb);
                    Cv.Not(yuv2, dst2);
                    Cv.CvtColor(dst2,eyeRight,ColorConversion.CrCbToBgr);
                    Cv.CvtColor(eyeRight, grayEyeRight, ColorConversion.BgrToGray);

                    Cv.Resize (grayEyeRight, eyeRightFinal, Interpolation.Linear);
                    Cv.Threshold(eyeRightFinal, eyeRightFinal, 230, 230, ThresholdType.Binary);
                    CvBlobs b2 = new CvBlobs(eyeRightFinal);

                    if(b2.Count > 0)
                    {
                        rightEyeX = b2.LargestBlob().Centroid.X;
                        rightEyeY = b2.LargestBlob().Centroid.Y;
                    }

                    Cv.ShowImage ("EyeRight", eyeRightFinal);

                    Cv.ReleaseImage (yuv2);
                    Cv.ReleaseImage (dst2);
                    Cv.ReleaseImage (grayEyeRight);
                    Cv.ReleaseImage (eyeRightFinal);
                    b2.Clear ();

                    Cv.ReleaseImage (eyeRight);

                }

                Cv.ResetImageROI(faceImgColour);
            }

            //Cv.ShowImage("Eye tracking", mainImage);

            Cv.ReleaseImage (faceImg);
            Cv.ReleaseImage (faceImgColour);
            Cv.ReleaseMemStorage(storage);
            Cv.ReleaseHaarClassifierCascade(cascadeEye);


        }

        Cv.ReleaseMemStorage(storageFace);
        Cv.ReleaseHaarClassifierCascade(cascadeFace);


        //PupilTracking ();
        Cv.ReleaseImage(smallImg);
        Cv.ReleaseImage (mainImage);
        GC.Collect();

    }

    void OnGUI ()
    {
            GUI.Label (new Rect (200, 200, 100, 90), errorMsg);
    }

    void OnDestroy()
    {
        Cv.DestroyAllWindows();
        Cv.ReleaseCapture(cap);
    }

那么您的意思是,据您所知,对于这个问题没有解决方案,它只是使用OpenCVSharp时出现的问题? - Nition
这篇博客有一个很好的关于内存优化的部分:https://software.intel.com/en-us/blogs/2015/02/05/unity-tips-1 - ChileAddict - Intel
1个回答

1

我不熟悉OpenCV,但作为一个通用规则:

  • 我会在Update循环中限制实例化,比如new CvMemStorage()
  • 不要在Update循环中加载数据CvHaarClassifierCascade.FromFile("\\Users\\User\\Documents\\opencv\\sources\\data\\haarcascades\\haarcascade_frontalface_alt2.xml");应该在启动时加载一次并分配给类变量。
  • 在启动时分配内存,只有在需要时才释放

我发现在大多数情况下有足够的RAM可供使用。我在Start()中分配了重复使用的内容,特别是在Update()循环中每秒60次!

但是,像加载XML数据、分配和释放变量(如storagecascadeEye)这样的操作,当应用程序尝试每秒执行60次时,就会创建问题。

创建和销毁对象非常非常昂贵。因此,要明智而谨慎地进行操作,特别是处理OpenCV对象、位图或加载器等复杂数据结构时。

希望对你有所帮助。


网页内容由stack overflow 提供, 点击上面的
可以查看英文原文,
原文链接