Displaying H.264 Video

SteffRSteffR Member ✭✭

I have the following code, its not pretty but 'should' be functional....
Gets data from a UDP source, and then processes, eventually displays in AVSampleBufferDisplayLayer...at least that's the idea, I can successfully do the first part, its just the displaying bit that fails, I get a blank scree on the emulator???. I get non error messages and cannot figure out what is going on now.
Anybody that can help, Ill be truly gratefully.

using System;
using System.Net;
using System.Net.Sockets;
using System.Text;
using System.Threading.Tasks;
using UIKit;
using CoreMedia;
using System.Collections.Generic;
using AVFoundation;
using CoreVideo;
using System.Runtime.InteropServices;
using ObjCRuntime;

namespace UDPTest.iOS
{
public partial class ViewController : UIViewController
{
UdpClient udpClient;
int Next;
int Previous;

    int state;
     int BufferPointer = 0;
    byte[] videoData = new byte[1000000];

    enum NALUnitType
    {
        unspecified = 0,
        codedSlice = 1,
        idr = 5,
        sps = 7,
        pps = 8,
    }


    byte[] SetUpStream = new byte[] {
        0x00, 0x01, 0x01, 0x76,                         /*     ...v */
        0x30, 0x30, 0x31, 0x30, 0x30, 0x30, 0x30, 0x36, /* 00100006 */
        0x30, 0x30, 0x30, 0x30, 0x35, 0x38, 0x30, 0x35, /* 00005805 */
        0x30, 0x30, 0x30, 0x30, 0x30, 0x31, 0x41, 0x75, /* 000001Au */
        0x64, 0x69, 0x6f, 0x31, 0x30, 0x35, 0x30, 0x30, /* dio10500 */
        0x30, 0x30, 0x30, 0x31, 0x56, 0x69, 0x64, 0x65, /* 0001Vide */
        0x6f, 0x31, 0x30, 0x39, 0x30, 0x30, 0x30, 0x30, /* o1090000 */
        0x30, 0x37, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x53, /* 07FrameS */
        0x69, 0x7a, 0x65, 0x32, 0x38, 0x30, 0x30, 0x30, /* ize28000 */
        0x66, 0x30, 0x30, 0x39, 0x30, 0x30, 0x30, 0x30, /* f0090000 */
        0x30, 0x31, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x52, /* 01FrameR */
        0x61, 0x74, 0x65, 0x66, 0x30, 0x37, 0x30, 0x30, /* atef0700 */
        0x30, 0x30, 0x30, 0x33, 0x42, 0x69, 0x74, 0x52, /* 0003BitR */
        0x61, 0x74, 0x65, 0x34, 0x30, 0x30 };            /* ate400 */

    byte[] Describe = new byte[] {
        0x00, 0x00, 0x00, 0x76,                         /*     ...v */
        0x30, 0x30, 0x31, 0x30, 0x30, 0x30, 0x30, 0x38, /* 00100008 */
        0x30, 0x30, 0x30, 0x30, 0x31, 0x30, 0x30, 0x37, /* 00001007 */
        0x30, 0x30, 0x30, 0x30, 0x30, 0x31, 0x41, 0x6c, /* 000001Al */
        0x6c, 0x49, 0x6e, 0x66, 0x6f, 0x31 };           /* lInfo1   */

    public ViewController(IntPtr handle) : base(handle)
    {
    }
    AVSampleBufferDisplayLayer test;
    public async override void ViewDidLoad()
    {
        base.ViewDidLoad();

        udpClient = new UdpClient(3102);

        state = 20;
        Next = 1;
        Previous = 0;



        test = new AVSampleBufferDisplayLayer
        {
            Frame = View.Frame,
            Bounds = View.Bounds,
            BackgroundColor = UIColor.Black.CGColor
        };

        View.Layer.AddSublayer(test);



        await UDPReceiveTest();





    }

    public override void DidReceiveMemoryWarning()
    {
        base.DidReceiveMemoryWarning();
        // Release any cached data, images, etc that aren't in use.     
    }


    private async Task UDPReceiveTest()
    {
        while (true)
        {
            IPEndPoint Ip2 = new IPEndPoint(IPAddress.Parse("255.255.255.255"), 1000);

            var UdpReceiveResult = await udpClient.ReceiveAsync();
            ProcessPacket(UdpReceiveResult.Buffer);
        }
    }

    private void UDPSendPacket(byte[] data)
    {//192.168.1.1
        IPEndPoint Ip2 = new IPEndPoint(IPAddress.Parse("255.255.255.255"), 1000);
        var UdpReceiveResult = udpClient.Send(data, data.Length, Ip2);
     }

    private void ProcessPacket(byte[] Buffer)
    {
        if (Check_Announcement(Buffer) == true)
        {
            Console.WriteLine("Announcement");

            string DescribeL = "\0\0\0v0010000800001007000001AllInfo1";
            byte[] buffer = System.Text.Encoding.UTF8.GetBytes(DescribeL);
            UDPSendPacket(buffer);
            state = 2;
        }

        switch (state)
        {
            case 2: //Got Anouncement
                {
                    if (Check_Description(Buffer) == true)
                    {
                        Console.WriteLine("Description Received");
                        byte[] buffer2 = new byte[] { 0x01, 0x00, 0x01, 0x76 };
                        UDPSendPacket(buffer2);
                        UDPSendPacket(SetUpStream);
                        state = 3;
                    }
                    break;
                }

            case 3: //Got All Info
                {
                    if (Check_SetUpStreamAck(Buffer) == true)
                    {
                        Console.WriteLine("Got Setup Stream ACK");
                        byte[] buffer2 = new byte[] { 0x01, 0x01, 0x02, 0x76 };
                        UDPSendPacket(buffer2);

                        string s = System.Text.Encoding.UTF8.GetString(buffer2, 0, buffer2.Length);
                        state = 4;
                    }
                    break;
                }

            case 4: //Got Stream Settings
                {
                    if (VideoDataIsRecognised(Buffer) == true)
                    {
                        int temp = Buffer[1] + 1;
                        byte[] next = BitConverter.GetBytes(temp);
                        byte[] ackb = new byte[] { 0x01, Buffer[1], next[0], 0x76 };

                        UDPSendPacket(ackb);
                        AppendVideoData(Buffer);

                    }
                    break;
                }

            default:
                    break;
        }
    }

    private bool Check_Announcement(byte[] buffer)
    {
        if (buffer.Length == 32)
        {
            if (BitConverter.ToUInt32(buffer, 0) == 1752855916)
            {
                return true;
            }
        }
        return false;
    }

    private bool Check_Description(byte[] buffer)
    {
        if (buffer.Length == 733)
        {
            string ss = System.Text.Encoding.UTF8.GetString(buffer, 26, 7);
            if (0 == string.Compare(ss, "AllInfo"))
            {
                return true;
            }
        }
        return false;
   }

    private bool Check_SetUpStreamAck(byte[] buffer)
    {
        if (buffer.Length == 30)
        {
            string ss = System.Text.Encoding.UTF8.GetString(buffer, 26, 4);
            if (0 == string.Compare(ss, "Ret1"))
            {
                return true;
            }
        }
        return false;
    }




    private bool VideoDataIsRecognised(byte[] buffer)
    {
        if (buffer.Length >= 90)
        {
            string ss = System.Text.Encoding.UTF8.GetString(buffer, 95, 4);
            if (0 == string.Compare(ss, "Data"))
            {
                return true;
            }
        }
        return false;
    }


    private void AppendVideoData(byte[] buffer)
    {
        if (buffer[99] == 0x00 && buffer[100] == 0x00 && buffer[101] == 0x00 && buffer[102] == 0x01) // 'Data' in bytes 95,96,97,98 so Ceck 99 onwards for NAL start Code
        {
            if (BufferPointer == 0)
            {
                // this is the first NAL, we will buffer this, and await the rest of the data in the remainder seuqnce,
                // we will display when we get another sequence buffer tstarting with NAL
                int copy = buffer.Length - 99;
                Buffer.BlockCopy(buffer, 99, videoData, BufferPointer, copy);
                BufferPointer = buffer.Length - 99;
            }
            else
            {
                // we already have data in our buffer, got our NAL so lets process what we have
                ProcessVideo(videoData);

                // Now store the new data we got and reset out pointer to show were gthering data
                Buffer.BlockCopy(buffer, 99, videoData, 0, buffer.Length - 99);
                BufferPointer = buffer.Length - 99; 

            }
        }
        else
        {
            // Doesnt start with a NAL start code, so either scrambled, or we are building a bigger buffer...
            BufferPointer = BufferPointer + buffer.Length - 99;
            Buffer.BlockCopy(buffer, 99, videoData, BufferPointer, buffer.Length - 99);
        }
    }


    private bool CheckNALUStartCode(byte[] buffer)
    {
        if (buffer[0] == 0x00 && buffer[1] == 0x00 && buffer[2] == 0x00 && buffer[3] == 0x01)
        {
            return true;
        }
        return false;
    }

    private int StartCodeIndex(byte[] buffer, int from)
    {
        for (int i = from; i < buffer.Length - 5; i++)
        {
            if (buffer[i] == 0x00 && buffer[i + 1] == 0x00 && buffer[i + 2] == 0x00 && buffer[i + 3] == 0x01)
            {
                return i;
            }
        }
        return -1;
    }



    CMBlockBuffer blockBuffer = null;
    CMSampleBuffer sampleBuffer = null;
    CMVideoFormatDescription formatDescription;


    private void ProcessVideo(byte[] buffer)
    {
        // SPS, PPS, IDS
        // BYTE: 0   1   2   3   4   5   6   7   8   9   10  11  12  13  14  15  16  17  18  19  20  21  22  23  24  25  26  27  28  29  30  31  32  33  34  35  36  37  38  39  40  41  42 ........ etc 
        // DATA: 0   0   0   1   SPS D   D   D   D   D   D   D   D   D   D   D   0   0   0   1   PPS D   D   D   D   D   D   D   D   D   D   D   0   0   0   1   IDR D   D   D   D   D   D  ........ etc

        // Coded Slide
        // BYTE: 0   1   2   3   4   5   6   7   ........ etc 
        // DATA: 0   0   0   1   CDS D   D   D   ........ etc

        Console.WriteLine("Processing : " + BufferPointer.ToString());
        NALUnitType naluType = (NALUnitType)(buffer[4] & 0x1F);

        int spsRangeLower = 0;
        int ppsRangeLower = 0;

        int blockLength = 0;

        if (formatDescription == null && naluType != NALUnitType.sps)
        {
            Console.WriteLine("Error: format Description is nil and frame does not start from SPS");
            return;
        }

        if (naluType == NALUnitType.sps)
        {
            //sps should be 4 to 15, then 4 buffer bytes 0,0,0,1 ..so next NALU in byte 20, we DO NOT Include the code 0,0,0,1 (start code header) in the data
            spsRangeLower = 4;
            naluType = (NALUnitType)(buffer[20] & 0x1F);
        }

        if (naluType == NALUnitType.pps)
        {
            //pps should be 20 to 31
            ppsRangeLower = 20;
            naluType = (NALUnitType)(buffer[36] & 0x1F);
        }


        if (spsRangeLower != 0 && ppsRangeLower != 0) // Check we have SPS and PPS
        {
            int spsSize = 12;
            int ppsSize = 12;
            formatDescription = null;

            byte[] param1 = new byte[spsSize];
            byte[] param2 = new byte[ppsSize];
            Array.Copy(buffer, spsRangeLower, param1, 0, spsSize);
            Array.Copy(buffer, ppsRangeLower, param2, 0, ppsSize);
            var props = new List<byte[]> { param1, param2 };

            CMFormatDescriptionError Status = new CMFormatDescriptionError();
            formatDescription = CMVideoFormatDescription.FromH264ParameterSets(props, 4, out Status); 

            if (formatDescription == null)
            {
                Console.WriteLine("Error: Can't create CMFormatDescription" + Status.ToString());
                return;
            }
        }

        if (naluType == NALUnitType.idr)
        {
            Console.WriteLine("Processing IDR");

            int offset = 32; // starts at the header 0,0,0,1
            blockLength = BufferPointer-offset;
            byte[] data = new byte[blockLength];
            Array.Copy(buffer, offset, data, 0, blockLength);

            byte[] intBytes = BitConverter.GetBytes((UInt32)(blockLength - 4)); // dont include the header bytes, in the length
            Array.Reverse(intBytes);

            // now replace the 0,0,0,1 header bytes with the big endian length
            Array.Copy(intBytes, data, 4);

            CMBlockBufferError bbstatus = new CMBlockBufferError();
            blockBuffer =CMBlockBuffer.FromMemoryBlock(data, 0, 0, out bbstatus);

            if (blockBuffer == null)
            {
                Console.WriteLine("Error: Can't create CMBlockBuffer from IDR" + bbstatus.ToString());
                return;
            }
        }


        if (naluType == NALUnitType.codedSlice)
        {
            Console.WriteLine("Processing Code Slice");

            // no offset coded slice always comes in its own UDP packet, with it NALU header as 0,1,2,3 bytes.... this gets replaces with length below.
            blockLength = BufferPointer;

            byte[] data = new byte[blockLength];
            Array.Copy(buffer, 0, data, 0, blockLength); 

            byte[] intBytes =  BitConverter.GetBytes((UInt32)(blockLength - 4)); // dont include the header bytes, in the length
            Array.Reverse(intBytes);
            // now replace the 0,0,0,1 header bytes with the big endian length
            Array.Copy(intBytes, data, 4);

            CMBlockBufferError bbstatus = new CMBlockBufferError();
            blockBuffer = CMBlockBuffer.FromMemoryBlock(data, 0, 0, out bbstatus);

            if (blockBuffer == null)
            {
                Console.WriteLine("Error: Can't create CMBlockBuffer from coded slice" + bbstatus.ToString());
                return;
            }
        }

        if (blockBuffer != null)
        {
            CMSampleBufferError status = new CMSampleBufferError();
            nuint[] sampleSizeArray = new nuint[1];
            sampleSizeArray[0] = (nuint)blockLength; //(nuint)BufferPointer;

            CMSampleTimingInfo[] cmT = new CMSampleTimingInfo[1];
            cmT[0].DecodeTimeStamp = sampleBuffer.DecodeTimeStamp;
            cmT[0].PresentationTimeStamp = sampleBuffer.PresentationTimeStamp;
            cmT[0].Duration = sampleBuffer.Duration;

            sampleBuffer = CMSampleBuffer.CreateReady(blockBuffer, formatDescription, 1, cmT, sampleSizeArray, out status);

            if (sampleBuffer == null)
            {
                Console.WriteLine("Error: Failed to create CMSampleBuffer");
                return;
            }

            var dt = sampleBuffer.Duration;
            var ts = sampleBuffer.DecodeTimeStamp;
            var tp = sampleBuffer.PresentationTimeStamp;

            Console.WriteLine("Duration: " + dt.Value);


            //CMSampleBufferAttachmentSettings set = new CMSampleBufferAttachmentSettings(sampleBuffer.GetAttachments(CMAttachmentMode.ShouldNotPropagate));
            //set.DisplayImmediately = true;


            CMSampleBufferAttachmentSettings c = sampleBuffer.GetSampleAttachments(true)[0];
            c.DisplayImmediately = true;
            sampleBuffer.SetAttachments(c.Dictionary, CMAttachmentMode.ShouldNotPropagate);

            if (test.ReadyForMoreMediaData)
            {
                test.EnqueueSampleBuffer(sampleBuffer);
                test.SetNeedsDisplay();


                CoreGraphics.CGImage tef = test.Contents;
                if (tef == null)
                {
                    Console.WriteLine("No Image Data?");
                }
            }
        }
        else 
        {
            Console.WriteLine("Error: Reached end of the method without available blockBuffer");
            return;
        }


    }




}

}

Answers

  • SteffRSteffR Member ✭✭

    OK Update to this, I now have this working for IDR frames, so getting a frame rate about 5 frames per second.
    I added callback onto display layer, gets a bad data error on coded slice, non idr frames.
    I have not figured this one out yet, it should other than offset to IDR which works.
    I'm working on principal its either a buffering error, or maybe even timing , however display immediately is set so should be required.

    Anybody any ideas?

    Here is the new code.

    using System;
    using System.Collections.Generic;
    using System.Net;
    using System.Net.Sockets;
    using System.Text;
    using System.Threading.Tasks;
    using AVFoundation;
    using CoreMedia;
    using CoreVideo;
    using UIKit;
    using VideoToolbox;
    using Foundation;
    using CoreAnimation;
    using System.Threading;

    namespace UDPTest.iOS
    {
    public partial class ViewController : UIViewController
    {
    UdpClient udpClient;
    int Next;
    int Previous;
    bool processing;
    int state;
    int BufferPointer = 0;
    byte[] videoData = new byte[1000000];

        enum NALUnitType
        {
            unspecified = 0,
            codedSlice = 1,
            dataPartitionA = 2,
            dataPartitionB = 3,
            dataPartitionC = 4,
            idr = 5,
            sei = 6,
            sps = 7,
            pps = 8,
            accessUnitDelimiter = 9,
            endOfSequence = 10,
            endOfStream = 11,
            filterData = 12,
            spsExtension = 13,
            prefixNALU = 14,
            subsetSPS = 15,
            reserved16 = 16,
            reserved17 = 17,
            reserved18 = 18,
            csaPictureWoPartitioning = 19,
            cse = 20,
            cseDepthView = 21,
            reserved22 = 22,
            reserved23 = 23,
            stapa = 24,
            stapb = 25,
            mtap16 = 26,
            mtap24 = 27,
            fua = 28,
            fub = 29,
            unspecified30 = 30,
            unspecified31 = 31
        }
    
    
    
    
        public ViewController(IntPtr handle) : base(handle)
        {
        }
    
        AVSampleBufferDisplayLayer test;
        public async override void ViewDidLoad()
        {
            base.ViewDidLoad();
    
            udpClient = new UdpClient(3102);
    
            state = 20;
            Next = 1;
            Previous = 0;
    
    
            test = new AVSampleBufferDisplayLayer
            {
                Frame = View.Frame,
                Bounds = View.Bounds,
                VideoGravity = AVLayerVideoGravity.ResizeAspect.ToString(),
    
                // ContentsRect = View.Bounds,
                BackgroundColor = UIColor.Black.CGColor
            };
    
    
            NSNotificationCenter.DefaultCenter.AddObserver(AVSampleBufferDisplayLayer.FailedToDecodeNotification, Callback);
    
            View.Layer.AddSublayer(test);
    
            UDPReceiveTest();
    
    
    
            Console.WriteLine("got here");
    
        }
    
    
        void Callback(NSNotification notification)
        {
            Console.WriteLine("Received a notification AVSampleBufferDisplayLayer" + notification);
        }
    
    
        public override void DidReceiveMemoryWarning()
        {
            base.DidReceiveMemoryWarning();
            // Release any cached data, images, etc that aren't in use.     
        }
    
    
        private async Task UDPReceiveTest()
        {
            while (true)
            {
                IPEndPoint Ip2 = new IPEndPoint(IPAddress.Parse("255.255.255.255"), 1000);
                  // byte[] test = udpClient.Receive(ref Ip2);
                var UdpReceiveResult = await udpClient.ReceiveAsync();
                  ProcessPacket(UdpReceiveResult.Buffer); 
    
    
            }
    
    
        }
    
    
        private void UDPSendPacket(byte[] data)
        {//192.168.1.1
            IPEndPoint Ip2 = new IPEndPoint(IPAddress.Parse("192.168.1.1"), 1000);
    
             udpClient.SendAsync(data, data.Length, Ip2);
    
        }
    
        int current;
    
    
        private void ProcessPacket(byte[] Buffer)
        {
            switch (state)
            {
                case 2: //Got Anouncement
                    {
                        if (Check_Description(Buffer) == true)
                        {
                            Console.WriteLine("Description Received");
                            byte[] buffer2 = new byte[] { 0x01, 0x00, 0x01, 0x76 };
                            UDPSendPacket(buffer2);
                            UDPSendPacket(SetUpStream);
                            state = 3;
                        }
                        if (Check_Announcement(Buffer) == true)
                        {
                            Console.WriteLine("Announcement");
    
                            string DescribeL = "\0\0\0v0010000800001007000001AllInfo1";
                            byte[] buffer = System.Text.Encoding.UTF8.GetBytes(DescribeL);
                            UDPSendPacket(buffer);
                            state = 2;
                        }
                        break;
                    }
    
                case 3: //Got All Info
                    {
                        if (Check_SetUpStreamAck(Buffer) == true)
                        {
                            Console.WriteLine("Got Setup Stream ACK");
                            byte[] buffer2 = new byte[] { 0x01, 0x01, 0x02, 0x76 };
                            UDPSendPacket(buffer2);
    
                            string s = System.Text.Encoding.UTF8.GetString(buffer2, 0, buffer2.Length);
                            state = 4;
                        }
                        if (Check_Announcement(Buffer) == true)
                        {
                            Console.WriteLine("Announcement");
    
                            string DescribeL = "\0\0\0v0010000800001007000001AllInfo1";
                            byte[] buffer = System.Text.Encoding.UTF8.GetBytes(DescribeL);
                            UDPSendPacket(buffer);
                            state = 2;
                        }
    
                        break;
    
                    }
    
                case 4: //Got Stream Settings
                    {
                        if (VideoDataIsRecognised(Buffer) == true)
                        {
                             int temp = Buffer[1] + 1;
    
                             byte[] next = BitConverter.GetBytes(temp);
                            byte[] ackb = new byte[] { 0x01, Buffer[1], next[0], 0x76 };
                            UDPSendPacket(ackb);
                             AppendVideoData(Buffer);
    
    
                        }
                        else
                        {
    
                            Console.WriteLine("not video data");
                        }
                        if (Check_Announcement(Buffer) == true)
                        {
                            Console.WriteLine("Announcement");
    
                            string DescribeL = "\0\0\0v0010000800001007000001AllInfo1";
                            byte[] buffer = System.Text.Encoding.UTF8.GetBytes(DescribeL);
                            UDPSendPacket(buffer);
                            state = 2;
                        }
                        break;
                    }
    
    
                default:
                    {
    
                        if (Check_Announcement(Buffer) == true)
                        {
                            Console.WriteLine("Announcement");
    
                            string DescribeL = "\0\0\0v0010000800001007000001AllInfo1";
                            byte[] buffer = System.Text.Encoding.UTF8.GetBytes(DescribeL);
                            UDPSendPacket(buffer);
                            state = 2;
                        }
                        break;
    
                    }
    
            }
        }
    
        private bool Check_Announcement(byte[] buffer)
        {
            if (buffer.Length == 32)
            {
                if (BitConverter.ToUInt32(buffer, 0) == 1752855916)
                {
                    return true;
                }
    
            }
    
            return false;
        }
    
        private bool Check_Description(byte[] buffer)
        {
    
            if (buffer.Length == 733)//       static let size = 733
            {
    
                string ss = System.Text.Encoding.UTF8.GetString(buffer, 26, 7);
                if (0 == string.Compare(ss, "AllInfo"))
                {
                    return true;
                }
            }
    
            return false;
    
        }
    
        private bool Check_SetUpStreamAck(byte[] buffer)
        {
            if (buffer.Length == 30)
            {
    
                string ss = System.Text.Encoding.UTF8.GetString(buffer, 26, 4);
                if (0 == string.Compare(ss, "Ret1"))
                {
                    return true;
                }
            }
    
            return false;
    
        }
    
    
    
    
        private bool VideoDataIsRecognised(byte[] buffer)
        {
    
             if (buffer.Length >= 90)
            {
                string ss = System.Text.Encoding.UTF8.GetString(buffer, 95, 4);
    
                if (0 == string.Compare(ss, "Data"))
                {
                    return true;
                }
            }
    
            return false;
    
        }
    
    
        private void AppendVideoData(byte[] buffer)
        {
    
              if (buffer[99] == 0x00 && buffer[100] == 0x00 && buffer[101] == 0x00 && buffer[102] == 0x01) // 'Data' in bytes 95,96,97,98 so Ceck 99 onwards for NAL start Code
            {
                 if (BufferPointer == 0)
                {
                     // this is the first NAL, we will buffer this, and await the rest of the data in the remainder seuqnce,
                    // we will display when we get another sequence buffer tstarting with NAL
    
                    int copy = buffer.Length - 99;
                    Buffer.BlockCopy(buffer, 99, videoData, BufferPointer, copy);
                    BufferPointer = buffer.Length - 99;
                }
                else
                {
    
                    // we already have data in our buffer, got our NAL so lets process what we have
                    ProcessVideo2(videoData);
    
                    BufferPointer = 0; // Processed that frame so throw away and wait for more video data...hopefully slices. sps and pps shouldnt happen again
    
                    // Now store the new data we got, first packet of new frame and reset out pointer to show were gthering data
                    Buffer.BlockCopy(buffer, 99, videoData, 0, buffer.Length - 99);
                    BufferPointer = buffer.Length - 99; 
    
                }
            }
            else
            {
                // Doesnt start with a NAL start code, so either scrambled, or we are building a bigger buffer...
                Buffer.BlockCopy(buffer, 99, videoData, BufferPointer, buffer.Length - 99);
                BufferPointer = BufferPointer + buffer.Length - 99;
            }
    
    
        }
    
    
        private bool CheckNALUStartCode(byte[] buffer)
        {
            string ss = System.Text.Encoding.UTF8.GetString(buffer, 0, 4);
            byte[] NALStart = new byte[] { 0x00, 0x00, 0x00, 0x01 };
    
            if (buffer[0] == 0x00 && buffer[1] == 0x00 && buffer[2] == 0x00 && buffer[3] == 0x01)
            {
                //BufferPointer = 0;
                return true;
            }
            return false;
        }
    
        private int StartCodeIndex(byte[] buffer, int from)
        {
            for (int i = from; i < buffer.Length - 5; i++)
            {
                if (buffer[i] == 0x00 && buffer[i + 1] == 0x00 && buffer[i + 2] == 0x00 && buffer[i + 3] == 0x01)
                {
                    return i;
                }
            }
    
            return -1;
        }
    
    
    
        private CMVideoFormatDescription formatDescription = null;
    
    
    
        private void ProcessVideo2(byte[] buffer)
        {
            int spsRangeLower = -1;
            int ppsRangeLower = -1;
    
            CMBlockBuffer blockBuffer = null;
            CMSampleBuffer sampleBuffer = null;
            int blockLength = 0;
    
            byte[] vbuffer = new byte[BufferPointer];
            Array.Copy(buffer, vbuffer, BufferPointer);
            int frameSize = BufferPointer;
    
    
            // SPS, PPS, IDS
            // BYTE: 0   1   2   3   4   5   6   7   8   9   10  11  12  13  14  15  16  17  18  19  20  21  22  23  24  25  26  27  28  29  30  31  32  33  34  35  36  37  38  39  40  41  42 ........ etc 
            // DATA: 0   0   0   1   SPS D   D   D   D   D   D   D   D   D   D   D   0   0   0   1   PPS D   D   D   D   D   D   D   D   D   D   D   0   0   0   1   IDR D   D   D   D   D   D  ........ etc
    
            // Coded Slide
            // BYTE: 0   1   2   3   4   5   6   7   ........ etc 
            // DATA: 0   0   0   1   CDS D   D   D   ........ etc
    
            Console.WriteLine("Processing : " + frameSize.ToString());
            NALUnitType naluType = (NALUnitType)(vbuffer[4] & 0x1F);
    
    
            if (formatDescription == null && naluType != NALUnitType.sps)
            {
                Console.WriteLine("Error: format Description is nil and frame does not start from SPS");
                return;
            }
    
            if (naluType == NALUnitType.sps)
            {
                //sps should be 4 to 15, then 4 buffer bytes 0,0,0,1 ..so next NALU in byte 20, we DO NOT Include the code 0,0,0,1 (start code header) in the data
                spsRangeLower = 4;//4
                naluType = (NALUnitType)(vbuffer[20] & 0x1F);
            }
    
            if (naluType == NALUnitType.pps)
            {
                //pps should be 20 to 31
                ppsRangeLower = 20;//20
                naluType = (NALUnitType)(vbuffer[36] & 0x1F);
            }
    
    
            if (spsRangeLower != -1 && ppsRangeLower != -1) // Check we have SPS and PPS
            {
                int spsSize = 12;
                int ppsSize = 12;
                formatDescription = null;
    
                byte[] param1 = new byte[spsSize];
                byte[] param2 = new byte[ppsSize];
                Array.Copy(vbuffer, spsRangeLower, param1, 0, spsSize);
                Array.Copy(vbuffer, ppsRangeLower, param2, 0, ppsSize);
                var props = new List<byte[]> { param1, param2 };
    
                formatDescription = null;
                CMFormatDescriptionError Status = new CMFormatDescriptionError();
                formatDescription = CMVideoFormatDescription.FromH264ParameterSets(props, 4, out Status);
    
                if (formatDescription == null)
                {
                    Console.WriteLine("Error: Can't create CMFormatDescription" + Status.ToString());
                    return;
                }
                decompressionSession(sampleBuffer);
            }
    
            if (naluType == NALUnitType.idr)
            {
                Console.WriteLine("Processing IDR");
    
                int offset = 32; // starts at the header 0,0,0,1
                blockLength = frameSize- offset;
                byte[] data = new byte[blockLength];
                Array.Copy(vbuffer, offset, data, 0, blockLength);
    
                byte[] intBytes = BitConverter.GetBytes((UInt32)(blockLength - 4)); // dont include the header bytes, in the length
                Array.Reverse(intBytes);
    
                // now replace the 0,0,0,1 header bytes with the big endian length
                Array.Copy(intBytes, data, 4);
    
                CMBlockBufferError bbstatus = new CMBlockBufferError();
                blockBuffer = CMBlockBuffer.FromMemoryBlock(data, 0, CMBlockBufferFlags.AssureMemoryNow, out bbstatus);
                if (blockBuffer == null)
                {
                    Console.WriteLine("Error: Can't create CMBlockBuffer from IDR" + bbstatus.ToString());
                    return;
                }
              //  CreateDecompressionSession();
    
            }
    
    
            if (naluType == NALUnitType.codedSlice)
            {
                Console.WriteLine("Processing Code Slice");
    
                // no offset coded slice always comes in its own UDP 
                blockLength = frameSize;
    
                byte[] data = new byte[blockLength];
                Array.Copy(vbuffer, 0, data, 0, blockLength);
    
                byte[] intBytes = BitConverter.GetBytes((UInt32)(blockLength - 4)); // dont include the header bytes, in the length
                Array.Reverse(intBytes);
                // now replace the 0,0,0,1 header bytes with the big endian length
                Array.Copy(intBytes, data, 4);
    
                CMBlockBufferError bbstatus = new CMBlockBufferError();
                blockBuffer = CMBlockBuffer.FromMemoryBlock(data, 0, CMBlockBufferFlags.AssureMemoryNow, out bbstatus);
                if (blockBuffer == null)
                {
                    Console.WriteLine("Error: Can't create CMBlockBuffer from IDR" + bbstatus.ToString());
                    return;
                }
            }
    
            if (blockBuffer != null)
            {
                CMSampleBufferError status = new CMSampleBufferError();
                nuint[] sampleSizeArray = new nuint[1];
                sampleSizeArray[0] = (nuint)blockLength; 
                sampleBuffer = CMSampleBuffer.CreateReady(blockBuffer, formatDescription, 1, null, sampleSizeArray, out status);
    
                if (sampleBuffer == null)
                {
                    Console.WriteLine("Error: Failed to create CMSampleBuffer");
                    return;
                }
    
    
                CMSampleBufferAttachmentSettings c = sampleBuffer.GetSampleAttachments(true)[0];
                c.DisplayImmediately = true;
    
    
                if (test.ReadyForMoreMediaData)
                {
                    test.Enqueue(sampleBuffer);
                }
                else
                {
                    Console.WriteLine("Throwing away frame");
                }
    
            }
            else
            {
                Console.WriteLine("Error: Reached end of the method without available blockBuffer");
                return;
            }
    
    
        }
        private VTDecompressionSession decompressionSession;
    
        private bool CreateDecompressionSession()
        {
    
    
            decompressionSession = null;
            var desc = formatDescription;
            var specificationKeys = new VTVideoDecoderSpecification();
    
            var cVPixelBufferAttributes = new CVPixelBufferAttributes();
    
            decompressionSession = VTDecompressionSession.Create(HandleVTDecompressionOutputCallback1, formatDescription, null, cVPixelBufferAttributes);
            return false;
        }
    
        void HandleVTDecompressionOutputCallback1(IntPtr sourceFrame, VTStatus status, VTDecodeInfoFlags flags, CVImageBuffer buffer, CMTime presentationTimeStamp, CMTime presentationDuration)
        {
            Console.WriteLine("Decompression Callback");
        }
    
    
    
    }
    

    }

Sign In or Register to comment.