AVCaptureVideoDataOutputSampleBufferDelegate with OpenGL

mishkamishka USMember

I am using the AVCaptureVideoDataOutputSampleBufferDelegate to set the AVCaptureSessions's output. In doing so I am overriding DidOutputSampleBuffer. I am then capturing the pixelBUffer as CVPixelBUffer and binding the luma and chrome from the pixelbuffer and videoTextureCache (CVOpenGLESTextureChache). This code works fine.

The code breaks when I execute
CGBitmapContext context = new CGBitmapContext (pixelBuffer.BaseAddress, pixelBuffer.Width, pixelBuffer.Height, 8, pixelBuffer.BytesPerRow, CGColorSpace.CreateDeviceRGB (), CGImageAlphaInfo.PremultipliedFirst );

The result is that my View renders a black screen instead of the Camera's input as it did before.

Here is the entire code snipet for better context.

class DataOutputDelegate : AVCaptureVideoDataOutputSampleBufferDelegate {
CVOpenGLESTexture lumaTexture, chromaTexture;
CameraController container;
int textureWidth, textureHeight;

    public DataOutputDelegate (CameraController container)
    {
        this.container = container;
    }

    void CleanupTextures ()
    {
        if (lumaTexture != null)
            lumaTexture.Dispose ();
        if (chromaTexture != null)
            chromaTexture.Dispose ();
        container.videoTextureCache.Flush (CVOptionFlags.None);
    }

    #region events
    public EventHandler<ImageCaptureEventArgs> ImageCaptured;

    void OnImageCaptured (UIImage image)
    {
        if (ImageCaptured != null) {
            var args = new ImageCaptureEventArgs {
                Image = image,
                CapturedAt = DateTime.Now
            };
            ImageCaptured (this, args);
        }
    }

    public EventHandler<CaptureErrorEventArgs> CaptureError;

    void OnCaptureError (string errorMessage )
    {
        if (CaptureError == null)
            return;

        try {
            var args = new CaptureErrorEventArgs {
                ErrorMessage = errorMessage
            };
            CaptureError (this, args);
        }
        catch (Exception e) {
            Console.WriteLine (e.Message);
        }
    }
    #endregion

    public override void DidOutputSampleBuffer (AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
    {
        try {
            using (var pixelBuffer = sampleBuffer.GetImageBuffer () as CVPixelBuffer){
                //pixelBuffer.Lock (CVOptionFlags.None);
                int width = (int) pixelBuffer.Width;
                int height = (int) pixelBuffer.Height;
                var bytesPerRow = (int) pixelBuffer.BytesPerRow;
                var baseAddress = pixelBuffer.BaseAddress;
                var flags = CGBitmapFlags.PremultipliedFirst | CGBitmapFlags.ByteOrder32Little;

                CGBitmapContext context = new CGBitmapContext
                    (
                        pixelBuffer.BaseAddress,
                        pixelBuffer.Width,
                        pixelBuffer.Height,
                        8,
                        pixelBuffer.BytesPerRow, 
                        CGColorSpace.CreateDeviceRGB (), 
                        CGImageAlphaInfo.PremultipliedFirst
                    );



                if (container.ripple == null || width != textureWidth || height != textureHeight){
                    textureWidth = width;
                    textureHeight = height;
                    container.SetupRipple (textureWidth, textureHeight);
                }
                CleanupTextures ();

                // Y-plane
                GL.ActiveTexture(TextureUnit.Texture0);
                All re = (All) 0x1903; // GL_RED_EXT, RED component from ARB OpenGL extension
                CVReturn status;
                lumaTexture = container.videoTextureCache.TextureFromImage (pixelBuffer, true, re, textureWidth, textureHeight, re, DataType.UnsignedByte, 0, out status);

                if (lumaTexture == null){
                    Console.WriteLine ("Error creating luma texture: {0}", status);
                    return;
                }
                GL.BindTexture (lumaTexture.Target, lumaTexture.Name);
                GL.TexParameter (TextureTarget.Texture2D, TextureParameterName.TextureWrapS, (int) All.ClampToEdge);
                GL.TexParameter (TextureTarget.Texture2D, TextureParameterName.TextureWrapT, (int) All.ClampToEdge);

                // UV Plane
                GL.ActiveTexture (TextureUnit.Texture1);
                re = (All) 0x8227; // GL_RG_EXT, RED GREEN component from ARB OpenGL extension
                chromaTexture = container.videoTextureCache.TextureFromImage (pixelBuffer, true, re, textureWidth/2, textureHeight/2, re, DataType.UnsignedByte, 1, out status);

                if (chromaTexture == null){
                    Console.WriteLine ("Error creating chroma texture: {0}", status);
                    return;
                }
                GL.BindTexture (chromaTexture.Target, chromaTexture.Name);
                GL.TexParameter (TextureTarget.Texture2D, TextureParameterName.TextureWrapS, (int) All.ClampToEdge);
                GL.TexParameter (TextureTarget.Texture2D, TextureParameterName.TextureWrapT, (int) All.ClampToEdge);
            }

            // render the image into the debug preview pane
            //UIImage image = getImageFromSampleBuffer (sampleBuffer);

            // event the capture up
            //OnImageCaptured (image);
        } catch (Exception ex) {
            string exceptionText = ErrorHandling.GetExceptionDetailedText (ex);
            string errorMessage = $"Failed to process image capture: {exceptionText}";
            OnCaptureError (errorMessage);
        }finally {
            sampleBuffer.Dispose ();
        }
    }

    UIImage getImageFromSampleBuffer (CMSampleBuffer sampleBuffer)
    {
        // Get the CoreVideo image
        using (var pixelBuffer = sampleBuffer.GetImageBuffer () as CVPixelBuffer) {
            // Lock the base address
            pixelBuffer.Lock (CVOptionFlags.None);

            // Get the number of bytes per row for the pixel buffer
            var baseAddress = pixelBuffer.BaseAddress;
            var bytesPerRow = (int) pixelBuffer.BytesPerRow;
            var width = (int) pixelBuffer.Width;
            var height = (int) pixelBuffer.Height;
            var flags = CGBitmapFlags.PremultipliedFirst | CGBitmapFlags.ByteOrder32Little;

            // Create a CGImage on the RGB colorspace from the configured parameter above
            using (var cs = CGColorSpace.CreateDeviceRGB ())
            using (var context = new CGBitmapContext (baseAddress,width, height, 8, bytesPerRow, cs, (CGImageAlphaInfo) flags))
            using (var cgImage = context.ToImage ()) {
                pixelBuffer.Unlock (CVOptionFlags.None);
                return UIImage.FromImage (cgImage);
            }
        }
    }
}</code>

I appreciate any and all help in getting an explanation to this behavior. I suspect I'm somehow hijacking the context but I just don't get it. I've posted the question also on stackoverflow with no responses so far http://stackoverflow.com/questions/37246627/avcapturevideodataoutputsamplebufferdelegate-with-opengl-on-xamarin

Sign In or Register to comment.