AForge.NET

  :: AForge.NET Framework :: Articles :: Forums ::

videoSource.Newframe

Forum to discuss AForge.NET Framework, its features, API, how-tos, etc.

videoSource.Newframe

Postby pjames2009 » Mon Sep 30, 2013 8:28 am

Hi i've managed to load 2 webcams at the same time, and get one camera to recognise symbols, but i have a problem when i try and get the second camera to recognise glyphs at the same time, Picturebox1 shows web camera 1 image once its been converted to grey scale. But the problem is when webcam 2 tries to do it for picturebox2, it either copy's picturebox1's webcam or theres lines or slanted lines so nothing really happens.

Code: Select all
try
            {

                System.Threading.Thread.Sleep(1000);

                // **** Create first video source ****
                VideoCaptureDevice videoSource1 = new VideoCaptureDevice(videoDevices[camera1Combo.SelectedIndex].MonikerString);

                // **** Set Frame Size of Camera's - "1280, 720" For Pictures ****
                videoSource1.DesiredFrameSize = new Size(1280, 720);

                // **** Set VideoSourcePlayer ****
                videoSourcePlayer1.VideoSource = videoSource1;

                // **** Set Frame Rate of Camera's ****
                videoSource1.DesiredFrameRate = 6;

                videoSource1.NewFrame += new NewFrameEventHandler(videoSource1_NewFrame);

                videoSourcePlayer1.Start();
            }
            catch
            {
            }

            try
            {

                System.Threading.Thread.Sleep(1000);

                // **** Create first video source ****
                VideoCaptureDevice videoSource2 = new VideoCaptureDevice(videoDevices[camera2Combo.SelectedIndex].MonikerString);

                // **** Set Frame Size of Camera's - "1280, 720" For Pictures ****
                videoSource2.DesiredFrameSize = new Size(1280, 720);

                // **** Set VideoSourcePlayer ****
                videoSourcePlayer2.VideoSource = videoSource2;

                // **** Set Frame Rate of Camera's ****
                videoSource2.DesiredFrameRate = 6;

                videoSourcePlayer2.Start();
            }
            catch
            {
            }


Further Down i have this:

Code: Select all
public void videoSource1_NewFrame(object sender, NewFrameEventArgs eventArgs)
        {
            // ********************************************************
            // For every new frame coming from the web cam
            // We apply filter to facilitate detection and recognition
            // ********************************************************
            Grayscale filter = new Grayscale(0.2125, 0.7154, 0.0721);
            Bitmap image = (Bitmap)eventArgs.Frame.Clone(); ;
            image = filter.Apply(image);
            BitmapData imageData = image.LockBits(
                new Rectangle(0, 0, image.Width, image.Height),
                ImageLockMode.ReadWrite, image.PixelFormat);
            UnmanagedImage grayImage = new UnmanagedImage(imageData);
            // Edge detection
            DifferenceEdgeDetector edgeDetector = new DifferenceEdgeDetector();
            UnmanagedImage edgesImage = edgeDetector.Apply(grayImage);
            // Threshold edges
            Threshold thresholdFilter = new Threshold(60);
            thresholdFilter.ApplyInPlace(edgesImage);
            // **********
            // Detection
            // **********
            // create and configure blob counter
            BlobCounter blobCounter = new BlobCounter();
            blobCounter.MinHeight = 30;
            blobCounter.MinWidth = 30;
            blobCounter.FilterBlobs = true;
            blobCounter.ObjectsOrder = ObjectsOrder.Size;
            // find all stand alone blobs
            blobCounter.ProcessImage(edgesImage);
            Blob[] blobs = blobCounter.GetObjectsInformation();
            // check each blob
            for (int i = 0, n = blobs.Length; i < n; i++)
            {
                List<IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]);
                List<IntPoint> corners = null;
                // is it a quadrilateral ?
                if (shapeChecker.IsQuadrilateral(edgePoints, out corners))
                {
                    List<IntPoint> leftEdgePoints, rightEdgePoints;
                    blobCounter.GetBlobsLeftAndRightEdges(blobs[i],
                        out leftEdgePoints, out rightEdgePoints);
                    // calculate average difference between pixel values from outside of the
                    // shape and from inside
                    float diff = CalculateAverageEdgesBrightnessDifference(
                        leftEdgePoints, rightEdgePoints, grayImage);
                    if (diff > 40)
                    {
                        Quadrilateral = corners;
                        QuadrilateralTransformation quadrilateralTransformation =
                            new QuadrilateralTransformation(Quadrilateral, 300, 300);
                        glyphImage = quadrilateralTransformation.Apply(grayImage);
                        // Filter to pure Black & White
                        OtsuThreshold otsuThresholdFilter = new OtsuThreshold();
                        otsuThresholdFilter.ApplyInPlace(glyphImage);
                        // Try to recognize the Glyph
                        byte[,] glyphValues = Recognize(glyphImage,
    new Rectangle(0, 0, glyphImage.Width, glyphImage.Height), out confidence);
                        // If Glyph is recognized with minimum confidence...
                        if (confidence >= minConfidenceLevel)
                        {

// Query Database
                            symbolslist(glyphValues);


                            // Debug.Print(System.Convert.ToString(confidence));
                            ImgRes.Image = glyphImage.ToManagedImage();
                        }
                    }
                }
            }
            // *************************
            // Display result in window
            // *************************
            pictureBox1.Image = grayImage.ToManagedImage();
            GC.Collect();
        }

public void videoSource2_NewFrame(object sender, NewFrameEventArgs eventArgs)
        {
            // ********************************************************
            // For every new frame coming from the web cam
            // We apply filter to facilitate detection and recognition
            // ********************************************************
            Grayscale filter = new Grayscale(0.2125, 0.7154, 0.0721);
            Bitmap image = (Bitmap)eventArgs.Frame.Clone(); ;
            image = filter.Apply(image);
            BitmapData imageData = image.LockBits(
                new Rectangle(0, 0, image.Width, image.Height),
                ImageLockMode.ReadWrite, image.PixelFormat);
            UnmanagedImage grayImage = new UnmanagedImage(imageData);
            // Edge detection
            DifferenceEdgeDetector edgeDetector = new DifferenceEdgeDetector();
            UnmanagedImage edgesImage = edgeDetector.Apply(grayImage);
            // Threshold edges
            Threshold thresholdFilter = new Threshold(60);
            thresholdFilter.ApplyInPlace(edgesImage);
            // **********
            // Detection
            // **********
            // create and configure blob counter
            BlobCounter blobCounter = new BlobCounter();
            blobCounter.MinHeight = 30;
            blobCounter.MinWidth = 30;
            blobCounter.FilterBlobs = true;
            blobCounter.ObjectsOrder = ObjectsOrder.Size;
            // find all stand alone blobs
            blobCounter.ProcessImage(edgesImage);
            Blob[] blobs = blobCounter.GetObjectsInformation();
            // check each blob
            for (int i = 0, n = blobs.Length; i < n; i++)
            {
                List<IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]);
                List<IntPoint> corners = null;
                // is it a quadrilateral ?
                if (shapeChecker.IsQuadrilateral(edgePoints, out corners))
                {
                    List<IntPoint> leftEdgePoints, rightEdgePoints;
                    blobCounter.GetBlobsLeftAndRightEdges(blobs[i],
                        out leftEdgePoints, out rightEdgePoints);
                    // calculate average difference between pixel values from outside of the
                    // shape and from inside
                    float diff = CalculateAverageEdgesBrightnessDifference(
                        leftEdgePoints, rightEdgePoints, grayImage);
                    if (diff > 40)
                    {
                        Quadrilateral = corners;
                        QuadrilateralTransformation quadrilateralTransformation =
                            new QuadrilateralTransformation(Quadrilateral, 300, 300);
                        glyphImage = quadrilateralTransformation.Apply(grayImage);
                        // Filter to pure Black & White
                        OtsuThreshold otsuThresholdFilter = new OtsuThreshold();
                        otsuThresholdFilter.ApplyInPlace(glyphImage);
                        // Try to recognize the Glyph
                        byte[,] glyphValues = Recognize(glyphImage,
    new Rectangle(0, 0, glyphImage.Width, glyphImage.Height), out confidence);
                        // If Glyph is recognized with minimum confidence...
                        if (confidence >= minConfidenceLevel)
                        {

// Query Database
                            symbolslist(glyphValues);


                            // Debug.Print(System.Convert.ToString(confidence));
                            ImgRes.Image = glyphImage.ToManagedImage();
                        }
                    }
                }
            }
            // *************************
            // Display result in window
            // *************************
            pictureBox2.Image = grayImage.ToManagedImage();
            GC.Collect();
        }


I've tried changing parts of the code, renaming things differently and it just causes problems.

I've got this at the top of my code but i dont think any of this needs changing:

Code: Select all
       
        private float CalculateAverageEdgesBrightnessDifference(
            List<IntPoint> leftEdgePoints,
            List<IntPoint> rightEdgePoints,
            UnmanagedImage image)
        {
            List<IntPoint> leftEdgePoints1 = new List<IntPoint>();
            List<IntPoint> leftEdgePoints2 = new List<IntPoint>();
            List<IntPoint> rightEdgePoints1 = new List<IntPoint>();
            List<IntPoint> rightEdgePoints2 = new List<IntPoint>();
            int tx1, tx2, ty;
            int widthM1 = image.Width - 1;
            for (int k = 0; k < leftEdgePoints.Count; k++)
            {
                tx1 = leftEdgePoints[k].X - stepSize;
                tx2 = leftEdgePoints[k].X + stepSize;
                ty = leftEdgePoints[k].Y;
                leftEdgePoints1.Add(new IntPoint((tx1 < 0) ? 0 : tx1, ty));
                leftEdgePoints2.Add(new IntPoint((tx2 > widthM1) ? widthM1 : tx2, ty));
                tx1 = rightEdgePoints[k].X - stepSize;
                tx2 = rightEdgePoints[k].X + stepSize;
                ty = rightEdgePoints[k].Y;
                rightEdgePoints1.Add(new IntPoint((tx1 < 0) ? 0 : tx1, ty));
                rightEdgePoints2.Add(new IntPoint((tx2 > widthM1) ? widthM1 : tx2, ty));
            }
            byte[] leftValues1 = image.Collect8bppPixelValues(leftEdgePoints1);
            byte[] leftValues2 = image.Collect8bppPixelValues(leftEdgePoints2);
            byte[] rightValues1 = image.Collect8bppPixelValues(rightEdgePoints1);
            byte[] rightValues2 = image.Collect8bppPixelValues(rightEdgePoints2);
            float diff = 0;
            int pixelCount = 0;
            for (int k = 0; k < leftEdgePoints.Count; k++)
            {
                if (rightEdgePoints[k].X - leftEdgePoints[k].X > stepSize * 2)
                {
                    diff += (leftValues1[k] - leftValues2[k]);
                    diff += (rightValues2[k] - rightValues1[k]);
                    pixelCount += 2;
                }
            }
            return diff / pixelCount;
        }

public byte[,] Recognize(UnmanagedImage image, Rectangle rect, out float confidence)
        {
            int glyphStartX = rect.Left;
            int glyphStartY = rect.Top;
            int glyphWidth = rect.Width;
            int glyphHeight = rect.Height;
            int cellWidth = glyphWidth / glyphSize;
            int cellHeight = glyphHeight / glyphSize;
            int cellOffsetX = (int)(cellWidth * 0.2);
            int cellOffsetY = (int)(cellHeight * 0.2);
            int cellScanX = (int)(cellWidth * 0.6);
            int cellScanY = (int)(cellHeight * 0.6);
            int cellScanArea = cellScanX * cellScanY;
            int[,] cellIntensity = new int[glyphSize, glyphSize];
            unsafe
            {
                int stride = image.Stride;
                byte* srcBase = (byte*)image.ImageData.ToPointer() +
                    (glyphStartY + cellOffsetY) * stride +
                    glyphStartX + cellOffsetX;
                byte* srcLine;
                byte* src;
                for (int gi = 0; gi < glyphSize; gi++)
                {
                    srcLine = srcBase + cellHeight * gi * stride;
                    for (int y = 0; y < cellScanY; y++)
                    {
                        for (int gj = 0; gj < glyphSize; gj++)
                        {
                            src = srcLine + cellWidth * gj;
                            for (int x = 0; x < cellScanX; x++, src++)
                            {
                                cellIntensity[gi, gj] += *src;
                            }
                        }
                        srcLine += stride;
                    }
                }
            }

            // calculate value of each glyph's cell and set
            // glyphs' confidence to minim value of cell's confidence
            byte[,] glyphValues = new byte[glyphSize, glyphSize];
            confidence = 1f;
            for (int gi = 0; gi < glyphSize; gi++)
            {
                for (int gj = 0; gj < glyphSize; gj++)
                {
                    float fullness = (float)
                        (cellIntensity[gi, gj] / 255) / cellScanArea;
                    float conf = (float)System.Math.Abs(fullness - 0.5) + 0.5f;
                    glyphValues[gi, gj] = (byte)((fullness > 0.5f) ? 1 : 0);
                    if (conf < confidence)
                        confidence = conf;
                }
            }
            return glyphValues;
        }

        public int CheckForMatching(byte[,] ModelData, byte[,] rawGlyphData)
        {
            int size = rawGlyphData.GetLength(0);
            int sizeM1 = size - 1;
            bool match1 = true;
            bool match2 = true;
            bool match3 = true;
            bool match4 = true;
            for (int i = 0; i < size; i++)
            {
                for (int j = 0; j < size; j++)
                {
                    byte value = rawGlyphData[i, j];
                    match1 &= (value == ModelData[i, j]);
                    match2 &= (value == ModelData[sizeM1 - i, sizeM1 - j]);
                    match3 &= (value == ModelData[sizeM1 - j, i]);
                    match4 &= (value == ModelData[j, sizeM1 - i]);
                }
            }
            if (match1)
                return 0;
            else if (match2)
                return 180;
            else if (match3)
                return 90;
            else if (match4)
                return 270;
            return -1;
        }


I know its hard to explain but any help would be appriciated.

Many Thanks,

Pete
pjames2009
 
Posts: 4
Joined: Mon Apr 08, 2013 8:24 am



Return to AForge.NET Framework