Forum Xamarin.Forms

Announcement:

The Xamarin Forums have officially moved to the new Microsoft Q&A experience. Microsoft Q&A is the home for technical questions and answers at across all products at Microsoft now including Xamarin!

To create new threads and ask questions head over to Microsoft Q&A for .NET and get involved today.

[WORKING] Video compression

john82john82 ITMember ✭✭✭

I was struggling with video compression on my Xamarin.Forms application.

I saw many posts here taliking about using FFMPEG or Xamarin.Android.MP4Transcoder (ex Xamarin.Android.FFmpeg) in Android, while using AV* native libs on iOS.

I finally did it using these solutions:

@XamarinTeam what do you think?

I'm sharing my code


Install these nuget packages

On ALL Projects
https://www.nuget.org/packages/MetadataExtractor/

On Android Project Only
https://www.nuget.org/packages/Karamunting.Android.Natario1.Transcoder/

Define this Interface in your core project for IOC

    public interface IVideoTranscoder
    {
        Task<Stream> TranscodeVideoAsync(string sourceVideoPath);
    }

Define this helper class in your core project

    public static class VideoHelper
    {
        #region Constants and Fields
        #endregion

        #region Constructors

        static VideoHelper()
        {
        }

        #endregion

        #region Public Methods

        public static TimeSpan GetVideDuration(Stream stream)
        {
            stream.Seek(0, SeekOrigin.Begin);
            var exifDirectories = QuickTimeMetadataReader.ReadMetadata(stream);
            if (exifDirectories.Count == 0)
                return TimeSpan.MinValue;

            TimeSpan duration = TimeSpan.MinValue;
            foreach (var exifDir in exifDirectories)
            {
                var exifTagDuration = exifDir.Tags.FirstOrDefault(x => x.Name.Contains("Duration"));
                if (exifTagDuration != null)
                {
                    duration = TimeSpan.Parse(exifTagDuration.Description);
                    break;
                }
            }

            if (duration == TimeSpan.MinValue)
                return TimeSpan.MinValue;

            return duration;
        }

        public static IReadOnlyList<MetadataExtractor.Directory> GetEXIFData(Stream stream)
        {
            stream.Seek(0, SeekOrigin.Begin);
            var exifData = QuickTimeMetadataReader.ReadMetadata(stream);
            return exifData;
        }

        public static MetadataExtractor.Tag GetEXIFTag(IReadOnlyList<MetadataExtractor.Directory> exifData, string tagName)
        {
            if (exifData.Count == 0)
                return null;

            foreach (var exifDir in exifData)
            {
                foreach (var exifTag in exifDir.Tags)
                {
                    if (exifTag.Name.ToLower() == tagName.ToLower())
                        return exifTag;
                }
            }

            return null;
        }

        public static MetadataExtractor.Tag[] GetEXIFTags(IReadOnlyList<MetadataExtractor.Directory> exifData, string tagName)
        {
            if (exifData.Count == 0)
                return null;

            List<MetadataExtractor.Tag> tags = new List<MetadataExtractor.Tag>();
            foreach (var exifDir in exifData)
            {
                foreach (var exifTag in exifDir.Tags)
                {
                    if (exifTag.Name.ToLower() == tagName.ToLower())
                        tags.Add(exifTag);
                }
            }

            return tags.ToArray();
        }

        #endregion
    }

Define this in your Android Project

[assembly: Dependency(typeof(YourNameSpaceHere.Droid.VideoTranscoder))]
namespace YourNameSpaceHere.Droid
{
    public class VideoTranscoder : IVideoTranscoder
    {
        class AVAssetExportSession : Java.Lang.Object, ITranscoderListener
        {
            #region Constants and Fields

            private string _assetPath;

            private SemaphoreSlim _semaphore;

            #endregion

            #region Properties

            public string OutputPath
            {
                get;
                set;
            }

            #endregion

            #region Constructors

            public AVAssetExportSession(string assetPath)
            {
                _assetPath = assetPath;
                _semaphore = new SemaphoreSlim(1);
            }

            #endregion

            #region Public Methods

            public async Task ExportTaskAsync()
            {
                DefaultVideoStrategy videoStrategy = new DefaultVideoStrategy.Builder()
                    .AddResizer(new ExactResizer(720, 1280))
                    .BitRate(4096)
                    .FrameRate(40)
                    .KeyFrameInterval(3f)
                    .Build();

                DefaultAudioStrategy audioStrategy = new DefaultAudioStrategy.Builder()
                    .SampleRate(44100)
                    .BitRate(96)
                    .Build();

                await _semaphore.WaitAsync();
                Transcoder.Into(this.OutputPath)
                    .AddDataSource(_assetPath)
                    .SetVideoTrackStrategy(DefaultVideoStrategies.For720x1280())
                    .SetAudioTrackStrategy(audioStrategy)
                    .SetListener(this)
                    .Transcode();
                await _semaphore.WaitAsync();
            }

            #endregion

            #region ITranscoderListener Methods

            public void OnTranscodeCanceled()
            {
                _semaphore.Release();
            }

            public void OnTranscodeCompleted(int successCode)
            {
                _semaphore.Release();
            }

            public void OnTranscodeFailed(Throwable exception)
            {
                _semaphore.Release();
            }

            public void OnTranscodeProgress(double progress)
            {
            }

            #endregion
        }

        public async Task<Stream> TranscodeVideoAsync(string sourceVideoPath)
        {
            AVAssetExportSession export = new AVAssetExportSession(sourceVideoPath);

            string outputFilePath = Path.Combine(Path.GetTempPath(), $"{Path.GetRandomFileName().Split('.')[0]}.mp4");
            export.OutputPath = outputFilePath;

            await export.ExportTaskAsync();

            MemoryStream output = new MemoryStream();
            using (Stream source = File.OpenRead(outputFilePath))
                await source.CopyToAsync(output);

            if (File.Exists(outputFilePath))
                File.Delete(outputFilePath);

            output.Seek(0, SeekOrigin.Begin);
            return output;
        }
    }
}

Define this in your iOS Project

[assembly: Dependency(typeof(YourNameSpaceHere.iOS.VideoTranscoder))]
namespace YourNameSpaceHere.iOS
{
    public class VideoTranscoder : IVideoTranscoder
    {
        public async Task<Stream> TranscodeVideoAsync(string sourceVideoPath)
        {
            var asset = AVAsset.FromUrl(NSUrl.FromFilename(sourceVideoPath));
            AVAssetExportSession export = new AVAssetExportSession(asset, AVAssetExportSession.Preset1280x720);

            string outputFilePath = Path.Combine(Path.GetTempPath(), $"{Path.GetRandomFileName().Split('.')[0]}.mp4");
            export.OutputUrl = NSUrl.FromFilename(outputFilePath);
            export.OutputFileType = AVFileType.Mpeg4;
            export.ShouldOptimizeForNetworkUse = true;
            export.FileLengthLimit = 4097152;

            await export.ExportTaskAsync();

            MemoryStream output = new MemoryStream();
            using (Stream source = File.OpenRead(outputFilePath))
                await source.CopyToAsync(output);

            if (File.Exists(outputFilePath))
                File.Delete(outputFilePath);

            output.Seek(0, SeekOrigin.Begin);
            return output;
        }
    }
}
Sign In or Register to comment.