diff --git a/FFMpegCore.Test/ArgumentBuilderTest.cs b/FFMpegCore.Test/ArgumentBuilderTest.cs index 482830d..5fc06b0 100644 --- a/FFMpegCore.Test/ArgumentBuilderTest.cs +++ b/FFMpegCore.Test/ArgumentBuilderTest.cs @@ -10,10 +10,10 @@ namespace FFMpegCore.Test [TestClass] public class ArgumentBuilderTest : BaseTest { - List concatFiles = new List + private List concatFiles = new List { "1.mp4", "2.mp4", "3.mp4", "4.mp4"}; - FFArgumentBuilder builder; + private FFArgumentBuilder builder; public ArgumentBuilderTest() : base() { @@ -22,7 +22,7 @@ public ArgumentBuilderTest() : base() private string GetArgumentsString(params Argument[] args) { - var container = new ArgumentContainer {new InputArgument("input.mp4")}; + var container = new ArgumentContainer { new InputArgument("input.mp4") }; foreach (var a in args) { container.Add(a); @@ -44,7 +44,6 @@ private string GetArgumentsString(ArgumentContainer container) return builder.BuildArguments(resContainer); } - [TestMethod] public void Builder_BuildString_IO_1() { @@ -73,8 +72,22 @@ public void Builder_BuildString_Scale_Fluent() [TestMethod] public void Builder_BuildString_AudioCodec() { - var str = GetArgumentsString(new AudioCodecArgument(AudioCodec.Aac, AudioQuality.Normal)); - Assert.AreEqual(str, "-i \"input.mp4\" -c:a aac -b:a 128k \"output.mp4\""); + var str = GetArgumentsString(new AudioCodecArgument(AudioCodec.Aac)); + Assert.AreEqual(str, "-i \"input.mp4\" -c:a aac \"output.mp4\""); + } + + [TestMethod] + public void Builder_BuildString_AudioBitrate() + { + var str = GetArgumentsString(new AudioBitrateArgument(AudioQuality.Normal)); + Assert.AreEqual(str, "-i \"input.mp4\" -b:a 128k \"output.mp4\""); + } + + [TestMethod] + public void Builder_BuildString_Quiet() + { + var str = GetArgumentsString(new QuietArgument()); + Assert.AreEqual(str, "-i \"input.mp4\" -hide_banner -loglevel warning \"output.mp4\""); } @@ -104,8 +117,7 @@ public void Builder_BuildString_BitStream_Fluent() [TestMethod] public void Builder_BuildString_Concat() { - var container = new ArgumentContainer {new ConcatArgument(concatFiles), new OutputArgument("output.mp4")}; - + var container = new ArgumentContainer { new ConcatArgument(concatFiles), new OutputArgument("output.mp4") }; var str = builder.BuildArguments(container); @@ -322,7 +334,7 @@ public void Builder_BuildString_Speed_Fluent() [TestMethod] public void Builder_BuildString_DrawtextFilter() { - var str = GetArgumentsString(new DrawTextArgument("Stack Overflow", "/path/to/font.ttf", + var str = GetArgumentsString(new DrawTextArgument("Stack Overflow", "/path/to/font.ttf", ("fontcolor", "white"), ("fontsize", "24"), ("box", "1"), @@ -376,7 +388,7 @@ public void Builder_BuildString_StartNumber_Fluent() public void Builder_BuildString_Threads_1() { var str = GetArgumentsString(new ThreadsArgument(50)); - + Assert.AreEqual(str, "-i \"input.mp4\" -threads 50 \"output.mp4\""); } @@ -395,7 +407,7 @@ public void Builder_BuildString_Threads_2() Assert.AreEqual(str, $"-i \"input.mp4\" -threads {Environment.ProcessorCount} \"output.mp4\""); } - + [TestMethod] public void Builder_BuildString_Threads_2_Fluent() { @@ -404,7 +416,6 @@ public void Builder_BuildString_Threads_2_Fluent() Assert.AreEqual(str, $"-i \"input.mp4\" -threads {Environment.ProcessorCount} \"output.mp4\""); } - [TestMethod] public void Builder_BuildString_Codec() { @@ -439,7 +450,8 @@ public void Builder_BuildString_Codec_Override_Fluent() [TestMethod] - public void Builder_BuildString_Duration() { + public void Builder_BuildString_Duration() + { var str = GetArgumentsString(new DurationArgument(TimeSpan.FromSeconds(20))); Assert.AreEqual(str, "-i \"input.mp4\" -t 00:00:20 \"output.mp4\""); @@ -452,5 +464,15 @@ public void Builder_BuildString_Duration_Fluent() Assert.AreEqual(str, "-i \"input.mp4\" -t 00:00:20 \"output.mp4\""); } + + [TestMethod] + public void Builder_BuildString_Raw() + { + var str = GetArgumentsString(new CustomArgument(null)); + Assert.AreEqual(str, "-i \"input.mp4\" \"output.mp4\""); + + str = GetArgumentsString(new CustomArgument("-acodec copy")); + Assert.AreEqual(str, "-i \"input.mp4\" -acodec copy \"output.mp4\""); + } } -} +} \ No newline at end of file diff --git a/FFMpegCore.Test/BitmapSources.cs b/FFMpegCore.Test/BitmapSources.cs new file mode 100644 index 0000000..33c8035 --- /dev/null +++ b/FFMpegCore.Test/BitmapSources.cs @@ -0,0 +1,219 @@ +using FFMpegCore.Extend; +using FFMpegCore.FFMPEG.Pipes; +using System; +using System.Collections.Generic; +using System.Drawing; +using System.Drawing.Imaging; +using System.Numerics; +using System.Text; + +namespace FFMpegCore.Test +{ + static class BitmapSource + { + public static IEnumerable CreateBitmaps(int count, PixelFormat fmt, int w, int h) + { + for (int i = 0; i < count; i++) + { + using (var frame = CreateVideoFrame(i, fmt, w, h, 0.025f, 0.025f * w * 0.03f)) + { + yield return frame; + } + } + } + + private static BitmapVideoFrameWrapper CreateVideoFrame(int index, PixelFormat fmt, int w, int h, float scaleNoise, float offset) + { + var bitmap = new Bitmap(w, h, fmt); + + offset = offset * index; + + for (int y = 0; y < h; y++) + for (int x = 0; x < w; x++) + { + var nx = x * scaleNoise + offset; + var ny = y * scaleNoise + offset; + + var value = (int)((Perlin.Noise(nx, ny) + 1.0f) / 2.0f * 255); + + var color = Color.FromArgb(value, value, value); + + bitmap.SetPixel(x, y, color); + } + + return new BitmapVideoFrameWrapper(bitmap); + } + + // + // Perlin noise generator for Unity + // Keijiro Takahashi, 2013, 2015 + // https://github.com/keijiro/PerlinNoise + // + // Based on the original implementation by Ken Perlin + // http://mrl.nyu.edu/~perlin/noise/ + // + static class Perlin + { + #region Noise functions + + public static float Noise(float x) + { + var X = (int)MathF.Floor(x) & 0xff; + x -= MathF.Floor(x); + var u = Fade(x); + return Lerp(u, Grad(perm[X], x), Grad(perm[X + 1], x - 1)) * 2; + } + + public static float Noise(float x, float y) + { + var X = (int)MathF.Floor(x) & 0xff; + var Y = (int)MathF.Floor(y) & 0xff; + x -= MathF.Floor(x); + y -= MathF.Floor(y); + var u = Fade(x); + var v = Fade(y); + var A = (perm[X] + Y) & 0xff; + var B = (perm[X + 1] + Y) & 0xff; + return Lerp(v, Lerp(u, Grad(perm[A], x, y), Grad(perm[B], x - 1, y)), + Lerp(u, Grad(perm[A + 1], x, y - 1), Grad(perm[B + 1], x - 1, y - 1))); + } + + public static float Noise(Vector2 coord) + { + return Noise(coord.X, coord.Y); + } + + public static float Noise(float x, float y, float z) + { + var X = (int)MathF.Floor(x) & 0xff; + var Y = (int)MathF.Floor(y) & 0xff; + var Z = (int)MathF.Floor(z) & 0xff; + x -= MathF.Floor(x); + y -= MathF.Floor(y); + z -= MathF.Floor(z); + var u = Fade(x); + var v = Fade(y); + var w = Fade(z); + var A = (perm[X] + Y) & 0xff; + var B = (perm[X + 1] + Y) & 0xff; + var AA = (perm[A] + Z) & 0xff; + var BA = (perm[B] + Z) & 0xff; + var AB = (perm[A + 1] + Z) & 0xff; + var BB = (perm[B + 1] + Z) & 0xff; + return Lerp(w, Lerp(v, Lerp(u, Grad(perm[AA], x, y, z), Grad(perm[BA], x - 1, y, z)), + Lerp(u, Grad(perm[AB], x, y - 1, z), Grad(perm[BB], x - 1, y - 1, z))), + Lerp(v, Lerp(u, Grad(perm[AA + 1], x, y, z - 1), Grad(perm[BA + 1], x - 1, y, z - 1)), + Lerp(u, Grad(perm[AB + 1], x, y - 1, z - 1), Grad(perm[BB + 1], x - 1, y - 1, z - 1)))); + } + + public static float Noise(Vector3 coord) + { + return Noise(coord.X, coord.Y, coord.Z); + } + + #endregion + + #region fBm functions + + public static float Fbm(float x, int octave) + { + var f = 0.0f; + var w = 0.5f; + for (var i = 0; i < octave; i++) + { + f += w * Noise(x); + x *= 2.0f; + w *= 0.5f; + } + return f; + } + + public static float Fbm(Vector2 coord, int octave) + { + var f = 0.0f; + var w = 0.5f; + for (var i = 0; i < octave; i++) + { + f += w * Noise(coord); + coord *= 2.0f; + w *= 0.5f; + } + return f; + } + + public static float Fbm(float x, float y, int octave) + { + return Fbm(new Vector2(x, y), octave); + } + + public static float Fbm(Vector3 coord, int octave) + { + var f = 0.0f; + var w = 0.5f; + for (var i = 0; i < octave; i++) + { + f += w * Noise(coord); + coord *= 2.0f; + w *= 0.5f; + } + return f; + } + + public static float Fbm(float x, float y, float z, int octave) + { + return Fbm(new Vector3(x, y, z), octave); + } + + #endregion + + #region Private functions + + static float Fade(float t) + { + return t * t * t * (t * (t * 6 - 15) + 10); + } + + static float Lerp(float t, float a, float b) + { + return a + t * (b - a); + } + + static float Grad(int hash, float x) + { + return (hash & 1) == 0 ? x : -x; + } + + static float Grad(int hash, float x, float y) + { + return ((hash & 1) == 0 ? x : -x) + ((hash & 2) == 0 ? y : -y); + } + + static float Grad(int hash, float x, float y, float z) + { + var h = hash & 15; + var u = h < 8 ? x : y; + var v = h < 4 ? y : (h == 12 || h == 14 ? x : z); + return ((h & 1) == 0 ? u : -u) + ((h & 2) == 0 ? v : -v); + } + + static int[] perm = { + 151,160,137,91,90,15, + 131,13,201,95,96,53,194,233,7,225,140,36,103,30,69,142,8,99,37,240,21,10,23, + 190, 6,148,247,120,234,75,0,26,197,62,94,252,219,203,117,35,11,32,57,177,33, + 88,237,149,56,87,174,20,125,136,171,168, 68,175,74,165,71,134,139,48,27,166, + 77,146,158,231,83,111,229,122,60,211,133,230,220,105,92,41,55,46,245,40,244, + 102,143,54, 65,25,63,161, 1,216,80,73,209,76,132,187,208, 89,18,169,200,196, + 135,130,116,188,159,86,164,100,109,198,173,186, 3,64,52,217,226,250,124,123, + 5,202,38,147,118,126,255,82,85,212,207,206,59,227,47,16,58,17,182,189,28,42, + 223,183,170,213,119,248,152, 2,44,154,163, 70,221,153,101,155,167, 43,172,9, + 129,22,39,253, 19,98,108,110,79,113,224,232,178,185, 112,104,218,246,97,228, + 251,34,242,193,238,210,144,12,191,179,162,241, 81,51,145,235,249,14,239,107, + 49,192,214, 31,181,199,106,157,184, 84,204,176,115,121,50,45,127, 4,150,254, + 138,236,205,93,222,114,67,29,24,72,243,141,128,195,78,66,215,61,156,180, + 151 + }; + + #endregion + } + } +} diff --git a/FFMpegCore.Test/FFMpegCore.Test.csproj b/FFMpegCore.Test/FFMpegCore.Test.csproj index 8e749dd..1c729a9 100644 --- a/FFMpegCore.Test/FFMpegCore.Test.csproj +++ b/FFMpegCore.Test/FFMpegCore.Test.csproj @@ -12,7 +12,7 @@ - Always + PreserveNewest diff --git a/FFMpegCore.Test/FFProbeTests.cs b/FFMpegCore.Test/FFProbeTests.cs index d66d561..5e877ff 100644 --- a/FFMpegCore.Test/FFProbeTests.cs +++ b/FFMpegCore.Test/FFProbeTests.cs @@ -31,5 +31,30 @@ public void Probe_Success() Assert.AreEqual(13, info.Duration.Seconds); } + + [TestMethod] + public void Probe_Success_FromStream() + { + var output = new FFProbe(); + + using (var stream = File.OpenRead(VideoLibrary.LocalVideo.FullName)) + { + var info = output.ParseVideoInfo(stream); + Assert.AreEqual(13, info.Duration.Seconds); + } + } + + [TestMethod] + public void Probe_Success_FromStream_Async() + { + var output = new FFProbe(); + + using (var stream = File.OpenRead(VideoLibrary.LocalVideo.FullName)) + { + var info = output.ParseVideoInfoAsync(stream).WaitForResult(); + + Assert.AreEqual(13, info.Duration.Seconds); + } + } } } \ No newline at end of file diff --git a/FFMpegCore.Test/Resources/VideoLibrary.cs b/FFMpegCore.Test/Resources/VideoLibrary.cs index 90280f8..f630273 100644 --- a/FFMpegCore.Test/Resources/VideoLibrary.cs +++ b/FFMpegCore.Test/Resources/VideoLibrary.cs @@ -17,6 +17,7 @@ public enum ImageType public static class VideoLibrary { public static readonly FileInfo LocalVideo = new FileInfo($".{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}input.mp4"); + public static readonly FileInfo LocalVideoWebm = new FileInfo($".{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}input.webm"); public static readonly FileInfo LocalVideoAudioOnly = new FileInfo($".{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}audio_only.mp4"); public static readonly FileInfo LocalVideoNoAudio = new FileInfo($".{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}mute.mp4"); public static readonly FileInfo LocalAudio = new FileInfo($".{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}audio.mp3"); diff --git a/FFMpegCore.Test/TasksExtensions.cs b/FFMpegCore.Test/TasksExtensions.cs new file mode 100644 index 0000000..67163a7 --- /dev/null +++ b/FFMpegCore.Test/TasksExtensions.cs @@ -0,0 +1,13 @@ +using System; +using System.Collections.Generic; +using System.Text; +using System.Threading.Tasks; + +namespace FFMpegCore.Test +{ + static class TasksExtensions + { + public static T WaitForResult(this Task task) => + task.ConfigureAwait(false).GetAwaiter().GetResult(); + } +} diff --git a/FFMpegCore.Test/VideoTest.cs b/FFMpegCore.Test/VideoTest.cs index eff0d44..0f4fc7f 100644 --- a/FFMpegCore.Test/VideoTest.cs +++ b/FFMpegCore.Test/VideoTest.cs @@ -1,6 +1,8 @@ using FFMpegCore.Enums; using FFMpegCore.FFMPEG.Argument; using FFMpegCore.FFMPEG.Enums; +using FFMpegCore.FFMPEG.Exceptions; +using FFMpegCore.FFMPEG.Pipes; using FFMpegCore.Test.Resources; using Microsoft.VisualStudio.TestTools.UnitTesting; using System; @@ -62,6 +64,109 @@ public bool Convert(VideoType type, bool multithreaded = false, VideoSize size = } } + private void ConvertFromStreamPipe(VideoType type, ArgumentContainer container) + { + var output = Input.OutputLocation(type); + + try + { + var input = VideoInfo.FromFileInfo(VideoLibrary.LocalVideoWebm); + using (var inputStream = System.IO.File.OpenRead(input.FullName)) + { + var pipeSource = new StreamPipeDataWriter(inputStream); + var arguments = new ArgumentContainer { new InputPipeArgument(pipeSource) }; + foreach (var arg in container) + { + arguments.Add(arg.Value); + } + arguments.Add(new OutputArgument(output)); + + var scaling = container.Find(); + + Encoder.Convert(arguments); + + var outputVideo = new VideoInfo(output.FullName); + + Assert.IsTrue(File.Exists(output.FullName)); + Assert.IsTrue(Math.Abs((outputVideo.Duration - input.Duration).TotalMilliseconds) < 1000.0 / input.FrameRate); + + if (scaling == null) + { + Assert.AreEqual(outputVideo.Width, input.Width); + Assert.AreEqual(outputVideo.Height, input.Height); + } + else + { + if (scaling.Value.Width != -1) + { + Assert.AreEqual(outputVideo.Width, scaling.Value.Width); + } + + if (scaling.Value.Height != -1) + { + Assert.AreEqual(outputVideo.Height, scaling.Value.Height); + } + + Assert.AreNotEqual(outputVideo.Width, input.Width); + Assert.AreNotEqual(outputVideo.Height, input.Height); + } + } + } + finally + { + if (File.Exists(output.FullName)) + File.Delete(output.FullName); + } + } + + private void ConvertToStreamPipe(VideoType type, ArgumentContainer container) + { + using (var ms = new MemoryStream()) + { + var input = VideoInfo.FromFileInfo(VideoLibrary.LocalVideo); + var arguments = new ArgumentContainer { new InputArgument(input) }; + + foreach (var arg in container) + { + arguments.Add(arg.Value); + } + + var streamPipeDataReader = new StreamPipeDataReader(ms); + streamPipeDataReader.BlockSize = streamPipeDataReader.BlockSize * 16; + arguments.Add(new OutputPipeArgument(streamPipeDataReader)); + + var scaling = container.Find(); + + Encoder.Convert(arguments); + + ms.Position = 0; + var outputVideo = VideoInfo.FromStream(ms); + + //Assert.IsTrue(Math.Abs((outputVideo.Duration - input.Duration).TotalMilliseconds) < 1000.0 / input.FrameRate); + + if (scaling == null) + { + Assert.AreEqual(outputVideo.Width, input.Width); + Assert.AreEqual(outputVideo.Height, input.Height); + } + else + { + if (scaling.Value.Width != -1) + { + Assert.AreEqual(outputVideo.Width, scaling.Value.Width); + } + + if (scaling.Value.Height != -1) + { + Assert.AreEqual(outputVideo.Height, scaling.Value.Height); + } + + Assert.AreNotEqual(outputVideo.Width, input.Width); + Assert.AreNotEqual(outputVideo.Height, input.Height); + } + } + } + public void Convert(VideoType type, ArgumentContainer container) { var output = Input.OutputLocation(type); @@ -70,7 +175,7 @@ public void Convert(VideoType type, ArgumentContainer container) { var input = VideoInfo.FromFileInfo(Input); - var arguments = new ArgumentContainer {new InputArgument(input)}; + var arguments = new ArgumentContainer { new InputArgument(input) }; foreach (var arg in container) { arguments.Add(arg.Value); @@ -114,6 +219,64 @@ public void Convert(VideoType type, ArgumentContainer container) } } + public void ConvertFromPipe(VideoType type, ArgumentContainer container) + { + ConvertFromPipe(type, container, PixelFormat.Format24bppRgb); + ConvertFromPipe(type, container, PixelFormat.Format32bppArgb); + ConvertFromPipe(type, container, PixelFormat.Format48bppRgb); + } + + public void ConvertFromPipe(VideoType type, ArgumentContainer container, PixelFormat fmt) + { + var output = Input.OutputLocation(type); + + try + { + var videoFramesSource = new RawVideoPipeDataWriter(BitmapSource.CreateBitmaps(128, fmt, 256, 256)); + var arguments = new ArgumentContainer { new InputPipeArgument(videoFramesSource) }; + foreach (var arg in container) + { + arguments.Add(arg.Value); + } + arguments.Add(new OutputArgument(output)); + + var scaling = container.Find(); + + Encoder.Convert(arguments); + + var outputVideo = new VideoInfo(output.FullName); + + Assert.IsTrue(File.Exists(output.FullName)); + + if (scaling == null) + { + Assert.AreEqual(outputVideo.Width, videoFramesSource.Width); + Assert.AreEqual(outputVideo.Height, videoFramesSource.Height); + } + else + { + if (scaling.Value.Width != -1) + { + Assert.AreEqual(outputVideo.Width, scaling.Value.Width); + } + + if (scaling.Value.Height != -1) + { + Assert.AreEqual(outputVideo.Height, scaling.Value.Height); + } + + Assert.AreNotEqual(outputVideo.Width, videoFramesSource.Width); + Assert.AreNotEqual(outputVideo.Height, videoFramesSource.Height); + } + } + finally + { + if (File.Exists(output.FullName)) + File.Delete(output.FullName); + } + + } + [TestMethod] public void Video_ToMP4() { @@ -123,10 +286,90 @@ public void Video_ToMP4() [TestMethod] public void Video_ToMP4_Args() { - var container = new ArgumentContainer {new VideoCodecArgument(VideoCodec.LibX264)}; + var container = new ArgumentContainer { new VideoCodecArgument(VideoCodec.LibX264) }; Convert(VideoType.Mp4, container); } + [TestMethod] + public void Video_ToMP4_Args_Pipe() + { + var container = new ArgumentContainer { new VideoCodecArgument(VideoCodec.LibX264) }; + ConvertFromPipe(VideoType.Mp4, container); + } + + [TestMethod] + public void Video_ToMP4_Args_StreamPipe() + { + var container = new ArgumentContainer { new VideoCodecArgument(VideoCodec.LibX264) }; + ConvertFromStreamPipe(VideoType.Mp4, container); + } + + [TestMethod] + public void Video_ToMP4_Args_StreamOutputPipe_Async_Failure() + { + Assert.ThrowsException(() => + { + using (var ms = new MemoryStream()) + { + var pipeSource = new StreamPipeDataReader(ms); + var container = new ArgumentContainer + { + new InputArgument(VideoLibrary.LocalVideo), + new VideoCodecArgument(VideoCodec.LibX264), + new ForceFormatArgument("mkv"), + new OutputPipeArgument(pipeSource) + }; + + var input = VideoInfo.FromFileInfo(VideoLibrary.LocalVideoWebm); + Encoder.ConvertAsync(container).WaitForResult(); + } + }); + } + + [TestMethod] + public void Video_ToMP4_Args_StreamOutputPipe_Failure() + { + Assert.ThrowsException(() => + { + var container = new ArgumentContainer + { + new ForceFormatArgument("mkv") + }; + ConvertToStreamPipe(VideoType.Mp4, container); + }); + } + + + [TestMethod] + public void Video_ToMP4_Args_StreamOutputPipe_Async() + { + using (var ms = new MemoryStream()) + { + var pipeSource = new StreamPipeDataReader(ms); + var container = new ArgumentContainer + { + new InputArgument(VideoLibrary.LocalVideo), + new VideoCodecArgument(VideoCodec.LibX264), + new ForceFormatArgument("matroska"), + new OutputPipeArgument(pipeSource) + }; + + var input = VideoInfo.FromFileInfo(VideoLibrary.LocalVideoWebm); + Encoder.ConvertAsync(container).WaitForResult(); + } + } + + [TestMethod] + public void Video_ToMP4_Args_StreamOutputPipe() + { + var container = new ArgumentContainer + { + new VideoCodecArgument(VideoCodec.LibX264), + new ForceFormatArgument("matroska") + }; + ConvertToStreamPipe(VideoType.Mp4, container); + } + [TestMethod] public void Video_ToTS() { @@ -145,6 +388,15 @@ public void Video_ToTS_Args() Convert(VideoType.Ts, container); } + [TestMethod] + public void Video_ToTS_Args_Pipe() + { + var container = new ArgumentContainer + { + new ForceFormatArgument(VideoCodec.MpegTs) + }; + ConvertFromPipe(VideoType.Ts, container); + } [TestMethod] public void Video_ToOGV_Resize() @@ -157,12 +409,23 @@ public void Video_ToOGV_Resize_Args() { var container = new ArgumentContainer { - new ScaleArgument(VideoSize.Ed), + new ScaleArgument(VideoSize.Ed), new VideoCodecArgument(VideoCodec.LibTheora) }; Convert(VideoType.Ogv, container); } + [TestMethod] + public void Video_ToOGV_Resize_Args_Pipe() + { + var container = new ArgumentContainer + { + new ScaleArgument(VideoSize.Ed), + new VideoCodecArgument(VideoCodec.LibTheora) + }; + ConvertFromPipe(VideoType.Ogv, container); + } + [TestMethod] public void Video_ToMP4_Resize() { @@ -174,12 +437,23 @@ public void Video_ToMP4_Resize_Args() { var container = new ArgumentContainer { - new ScaleArgument(VideoSize.Ld), + new ScaleArgument(VideoSize.Ld), new VideoCodecArgument(VideoCodec.LibX264) }; Convert(VideoType.Mp4, container); } + [TestMethod] + public void Video_ToMP4_Resize_Args_Pipe() + { + var container = new ArgumentContainer + { + new ScaleArgument(VideoSize.Ld), + new VideoCodecArgument(VideoCodec.LibX264) + }; + ConvertFromPipe(VideoType.Mp4, container); + } + [TestMethod] public void Video_ToOGV() { @@ -323,9 +597,10 @@ public void Video_With_Only_Audio_Should_Extract_Metadata() Assert.AreEqual(79.5, video.Duration.TotalSeconds, 0.5); Assert.AreEqual(1.25, video.Size); } - + [TestMethod] - public void Video_Duration() { + public void Video_Duration() + { var video = VideoInfo.FromFileInfo(VideoLibrary.LocalVideo); var output = Input.OutputLocation(VideoType.Mp4); @@ -336,7 +611,8 @@ public void Video_Duration() { new OutputArgument(output) }; - try { + try + { Encoder.Convert(arguments); Assert.IsTrue(File.Exists(output.FullName)); @@ -346,14 +622,17 @@ public void Video_Duration() { Assert.AreEqual(video.Duration.Hours, outputVideo.Duration.Hours); Assert.AreEqual(video.Duration.Minutes, outputVideo.Duration.Minutes); Assert.AreEqual(video.Duration.Seconds - 5, outputVideo.Duration.Seconds); - } finally { + } + finally + { if (File.Exists(output.FullName)) output.Delete(); } } - + [TestMethod] - public void Video_UpdatesProgress() { + public void Video_UpdatesProgress() + { var output = Input.OutputLocation(VideoType.Mp4); var percentageDone = 0.0; @@ -367,16 +646,44 @@ public void Video_UpdatesProgress() { new OutputArgument(output) }; - try { + try + { Encoder.Convert(arguments); Encoder.OnProgress -= OnProgess; - + Assert.IsTrue(File.Exists(output.FullName)); Assert.AreNotEqual(0.0, percentageDone); - } finally { + } + finally + { if (File.Exists(output.FullName)) output.Delete(); } } + + [TestMethod] + public void Video_TranscodeInMemory() + { + using (var resStream = new MemoryStream()) + { + var reader = new StreamPipeDataReader(resStream); + var writer = new RawVideoPipeDataWriter(BitmapSource.CreateBitmaps(128, PixelFormat.Format24bppRgb, 128, 128)); + + var container = new ArgumentContainer + { + new InputPipeArgument(writer), + new VideoCodecArgument("vp9"), + new ForceFormatArgument("webm"), + new OutputPipeArgument(reader) + }; + + Encoder.Convert(container); + + resStream.Position = 0; + var vi = VideoInfo.FromStream(resStream); + Assert.AreEqual(vi.Width, 128); + Assert.AreEqual(vi.Height, 128); + } + } } } diff --git a/FFMpegCore/Extend/BitmapVideoFrameWrapper.cs b/FFMpegCore/Extend/BitmapVideoFrameWrapper.cs new file mode 100644 index 0000000..bcfdab7 --- /dev/null +++ b/FFMpegCore/Extend/BitmapVideoFrameWrapper.cs @@ -0,0 +1,89 @@ +using FFMpegCore.FFMPEG.Pipes; +using System; +using System.Collections.Generic; +using System.Drawing; +using System.Drawing.Imaging; +using System.Runtime.InteropServices; +using System.Text; +using System.Threading.Tasks; + +namespace FFMpegCore.Extend +{ + public class BitmapVideoFrameWrapper : IVideoFrame, IDisposable + { + public int Width => Source.Width; + + public int Height => Source.Height; + + public string Format { get; private set; } + + public Bitmap Source { get; private set; } + + public BitmapVideoFrameWrapper(Bitmap bitmap) + { + Source = bitmap ?? throw new ArgumentNullException(nameof(bitmap)); + Format = ConvertStreamFormat(bitmap.PixelFormat); + } + + public void Serialize(System.IO.Stream stream) + { + var data = Source.LockBits(new Rectangle(0, 0, Width, Height), ImageLockMode.ReadOnly, Source.PixelFormat); + + try + { + var buffer = new byte[data.Stride * data.Height]; + Marshal.Copy(data.Scan0, buffer, 0, buffer.Length); + stream.Write(buffer, 0, buffer.Length); + } + finally + { + Source.UnlockBits(data); + } + } + + public async Task SerializeAsync(System.IO.Stream stream) + { + var data = Source.LockBits(new Rectangle(0, 0, Width, Height), ImageLockMode.ReadOnly, Source.PixelFormat); + + try + { + var buffer = new byte[data.Stride * data.Height]; + Marshal.Copy(data.Scan0, buffer, 0, buffer.Length); + await stream.WriteAsync(buffer, 0, buffer.Length); + } + finally + { + Source.UnlockBits(data); + } + } + + public void Dispose() + { + Source.Dispose(); + } + + private static string ConvertStreamFormat(PixelFormat fmt) + { + switch (fmt) + { + case PixelFormat.Format16bppGrayScale: + return "gray16le"; + case PixelFormat.Format16bppRgb565: + return "bgr565le"; + case PixelFormat.Format24bppRgb: + return "rgb24"; + case PixelFormat.Format32bppArgb: + return "rgba"; + case PixelFormat.Format32bppPArgb: + //This is not really same as argb32 + return "argb"; + case PixelFormat.Format32bppRgb: + return "rgba"; + case PixelFormat.Format48bppRgb: + return "rgb48le"; + default: + throw new NotSupportedException($"Not supported pixel format {fmt}"); + } + } + } +} diff --git a/FFMpegCore/FFMPEG/Argument/ArgumentContainer.cs b/FFMpegCore/FFMPEG/Argument/ArgumentContainer.cs index d831904..829b511 100644 --- a/FFMpegCore/FFMPEG/Argument/ArgumentContainer.cs +++ b/FFMpegCore/FFMPEG/Argument/ArgumentContainer.cs @@ -1,6 +1,7 @@ using System; using System.Collections; using System.Collections.Generic; +using System.Linq; namespace FFMpegCore.FFMPEG.Argument { @@ -15,7 +16,7 @@ public ArgumentContainer(params Argument[] arguments) { _args = new Dictionary(); - foreach(var argument in arguments) + foreach (var argument in arguments) { Add(argument); } @@ -28,7 +29,7 @@ public bool TryGetArgument(out T output) { if (_args.TryGetValue(typeof(T), out var arg)) { - output = (T) arg; + output = (T)arg; return true; } @@ -90,7 +91,7 @@ public bool Contains(KeyValuePair item) /// Argument that should be added to collection public void Add(params Argument[] values) { - foreach(var value in values) + foreach (var value in values) { _args.Add(value.GetType(), value); } @@ -102,9 +103,8 @@ public void Add(params Argument[] values) /// public bool ContainsInputOutput() { - return ((ContainsKey(typeof(InputArgument)) && !ContainsKey(typeof(ConcatArgument))) || - (!ContainsKey(typeof(InputArgument)) && ContainsKey(typeof(ConcatArgument)))) - && ContainsKey(typeof(OutputArgument)); + return ContainsOnlyOneOf(typeof(InputArgument), typeof(ConcatArgument), typeof(InputPipeArgument)) && + ContainsOnlyOneOf(typeof(OutputArgument), typeof(OutputPipeArgument)); } /// @@ -117,6 +117,11 @@ public bool ContainsKey(Type key) return _args.ContainsKey(key); } + public bool ContainsOnlyOneOf(params Type[] types) + { + return types.Count(t => _args.ContainsKey(t)) == 1; + } + public void CopyTo(KeyValuePair[] array, int arrayIndex) { _args.CopyTo(array, arrayIndex); diff --git a/FFMpegCore/FFMPEG/Argument/Atoms/AudioBitrateArgument.cs b/FFMpegCore/FFMPEG/Argument/Atoms/AudioBitrateArgument.cs new file mode 100644 index 0000000..7ecde09 --- /dev/null +++ b/FFMpegCore/FFMPEG/Argument/Atoms/AudioBitrateArgument.cs @@ -0,0 +1,19 @@ +using FFMpegCore.FFMPEG.Enums; + +namespace FFMpegCore.FFMPEG.Argument +{ + /// + /// Represents parameter of audio codec and it's quality + /// + public class AudioBitrateArgument : Argument + { + public AudioBitrateArgument(AudioQuality value) : base((int)value) { } + public AudioBitrateArgument(int bitrate) : base(bitrate) { } + + /// + public override string GetStringValue() + { + return $"-b:a {Value}k"; + } + } +} \ No newline at end of file diff --git a/FFMpegCore/FFMPEG/Argument/Atoms/AudioCodecArgument.cs b/FFMpegCore/FFMPEG/Argument/Atoms/AudioCodecArgument.cs index 7cdb6c5..9c75386 100644 --- a/FFMpegCore/FFMPEG/Argument/Atoms/AudioCodecArgument.cs +++ b/FFMpegCore/FFMPEG/Argument/Atoms/AudioCodecArgument.cs @@ -7,26 +7,12 @@ namespace FFMpegCore.FFMPEG.Argument /// public class AudioCodecArgument : Argument { - /// - /// Bitrate of audio channel - /// - public int Bitrate { get; } = (int)AudioQuality.Normal; - - public AudioCodecArgument() { } - public AudioCodecArgument(AudioCodec value) : base(value) { } - public AudioCodecArgument(AudioCodec value, AudioQuality bitrate) : this(value, (int) bitrate) { } - - public AudioCodecArgument(AudioCodec value, int bitrate) : base(value) - { - Bitrate = bitrate; - } - /// public override string GetStringValue() { - return $"-c:a {Value.ToString().ToLower()} -b:a {Bitrate}k"; + return $"-c:a {Value.ToString().ToLower()}"; } } } diff --git a/FFMpegCore/FFMPEG/Argument/Atoms/CustomArgument.cs b/FFMpegCore/FFMPEG/Argument/Atoms/CustomArgument.cs new file mode 100644 index 0000000..6a38b4e --- /dev/null +++ b/FFMpegCore/FFMPEG/Argument/Atoms/CustomArgument.cs @@ -0,0 +1,14 @@ +namespace FFMpegCore.FFMPEG.Argument +{ + public class CustomArgument : Argument + { + public CustomArgument(string argument) : base(argument) + { + } + + public override string GetStringValue() + { + return Value ?? string.Empty; + } + } +} \ No newline at end of file diff --git a/FFMpegCore/FFMPEG/Argument/Atoms/ForceFormatArgument.cs b/FFMpegCore/FFMPEG/Argument/Atoms/ForceFormatArgument.cs index 700d320..c2322e0 100644 --- a/FFMpegCore/FFMPEG/Argument/Atoms/ForceFormatArgument.cs +++ b/FFMpegCore/FFMPEG/Argument/Atoms/ForceFormatArgument.cs @@ -5,16 +5,17 @@ namespace FFMpegCore.FFMPEG.Argument /// /// Represents force format parameter /// - public class ForceFormatArgument : Argument + public class ForceFormatArgument : Argument { public ForceFormatArgument() { } + public ForceFormatArgument(string format) : base(format) { } - public ForceFormatArgument(VideoCodec value) : base(value) { } + public ForceFormatArgument(VideoCodec value) : base(value.ToString().ToLower()) { } /// public override string GetStringValue() { - return $"-f {Value.ToString().ToLower()}"; + return $"-f {Value}"; } } } diff --git a/FFMpegCore/FFMPEG/Argument/Atoms/InputPipeArgument.cs b/FFMpegCore/FFMPEG/Argument/Atoms/InputPipeArgument.cs new file mode 100644 index 0000000..6197a23 --- /dev/null +++ b/FFMpegCore/FFMPEG/Argument/Atoms/InputPipeArgument.cs @@ -0,0 +1,39 @@ +using FFMpegCore.FFMPEG.Pipes; +using Instances; +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.IO.Pipes; +using System.Text; +using System.Threading; +using System.Threading.Tasks; + +namespace FFMpegCore.FFMPEG.Argument +{ + /// + /// Represents input parameter for a named pipe + /// + public class InputPipeArgument : PipeArgument + { + public IPipeDataWriter Writer { get; private set; } + + public InputPipeArgument(IPipeDataWriter writer) : base(PipeDirection.Out) + { + Writer = writer; + } + + public override string GetStringValue() + { + return $"-y {Writer.GetFormat()} -i \"{PipePath}\""; + } + + public override async Task ProcessDataAsync(CancellationToken token) + { + await Pipe.WaitForConnectionAsync(token).ConfigureAwait(false); + if (!Pipe.IsConnected) + throw new TaskCanceledException(); + await Writer.WriteDataAsync(Pipe).ConfigureAwait(false); + } + } +} diff --git a/FFMpegCore/FFMPEG/Argument/Atoms/OutputPipeArgument.cs b/FFMpegCore/FFMPEG/Argument/Atoms/OutputPipeArgument.cs new file mode 100644 index 0000000..fd02df2 --- /dev/null +++ b/FFMpegCore/FFMPEG/Argument/Atoms/OutputPipeArgument.cs @@ -0,0 +1,33 @@ +using FFMpegCore.FFMPEG.Pipes; +using System; +using System.Collections.Generic; +using System.IO.Pipes; +using System.Text; +using System.Threading; +using System.Threading.Tasks; + +namespace FFMpegCore.FFMPEG.Argument +{ + public class OutputPipeArgument : PipeArgument + { + public IPipeDataReader Reader { get; private set; } + + public OutputPipeArgument(IPipeDataReader reader) : base(PipeDirection.In) + { + Reader = reader; + } + + public override string GetStringValue() + { + return $"\"{PipePath}\" -y"; + } + + public override async Task ProcessDataAsync(CancellationToken token) + { + await Pipe.WaitForConnectionAsync(token).ConfigureAwait(false); + if (!Pipe.IsConnected) + throw new TaskCanceledException(); + await Reader.ReadDataAsync(Pipe).ConfigureAwait(false); + } + } +} diff --git a/FFMpegCore/FFMPEG/Argument/Atoms/PipeArgument.cs b/FFMpegCore/FFMPEG/Argument/Atoms/PipeArgument.cs new file mode 100644 index 0000000..81fb872 --- /dev/null +++ b/FFMpegCore/FFMPEG/Argument/Atoms/PipeArgument.cs @@ -0,0 +1,45 @@ +using FFMpegCore.FFMPEG.Pipes; +using System; +using System.Collections.Generic; +using System.IO.Pipes; +using System.Text; +using System.Threading; +using System.Threading.Tasks; + +namespace FFMpegCore.FFMPEG.Argument +{ + public abstract class PipeArgument : Argument + { + public string PipeName { get; private set; } + public string PipePath => PipeHelpers.GetPipePath(PipeName); + + protected NamedPipeServerStream Pipe { get; private set; } + private PipeDirection direction; + + protected PipeArgument(PipeDirection direction) + { + PipeName = PipeHelpers.GetUnqiuePipeName(); + this.direction = direction; + } + + public void OpenPipe() + { + if (Pipe != null) + throw new InvalidOperationException("Pipe already has been opened"); + + Pipe = new NamedPipeServerStream(PipeName, direction, 1, PipeTransmissionMode.Byte, PipeOptions.Asynchronous); + } + + public void ClosePipe() + { + Pipe?.Dispose(); + Pipe = null; + } + public Task ProcessDataAsync() + { + return ProcessDataAsync(CancellationToken.None); + } + + public abstract Task ProcessDataAsync(CancellationToken token); + } +} diff --git a/FFMpegCore/FFMPEG/Argument/Atoms/QuietArgument.cs b/FFMpegCore/FFMPEG/Argument/Atoms/QuietArgument.cs new file mode 100644 index 0000000..a5d5c2e --- /dev/null +++ b/FFMpegCore/FFMPEG/Argument/Atoms/QuietArgument.cs @@ -0,0 +1,10 @@ +namespace FFMpegCore.FFMPEG.Argument +{ + public class QuietArgument : Argument + { + public override string GetStringValue() + { + return "-hide_banner -loglevel warning"; + } + } +} \ No newline at end of file diff --git a/FFMpegCore/FFMPEG/Argument/Atoms/VideoCodecArgument.cs b/FFMpegCore/FFMPEG/Argument/Atoms/VideoCodecArgument.cs index e8296ab..ac35f35 100644 --- a/FFMpegCore/FFMPEG/Argument/Atoms/VideoCodecArgument.cs +++ b/FFMpegCore/FFMPEG/Argument/Atoms/VideoCodecArgument.cs @@ -5,15 +5,17 @@ namespace FFMpegCore.FFMPEG.Argument /// /// Represents video codec parameter /// - public class VideoCodecArgument : Argument + public class VideoCodecArgument : Argument { public int Bitrate { get; protected set; } = 0; public VideoCodecArgument() { } - public VideoCodecArgument(VideoCodec value) : base(value) { } + public VideoCodecArgument(string codec) : base(codec) { } - public VideoCodecArgument(VideoCodec value, int bitrate) : base(value) + public VideoCodecArgument(VideoCodec value) : base(value.ToString().ToLower()) { } + + public VideoCodecArgument(VideoCodec value, int bitrate) : base(value.ToString().ToLower()) { Bitrate = bitrate; } @@ -21,7 +23,7 @@ public VideoCodecArgument(VideoCodec value, int bitrate) : base(value) /// public override string GetStringValue() { - var video = $"-c:v {Value.ToString().ToLower()} -pix_fmt yuv420p"; + var video = $"-c:v {Value} -pix_fmt yuv420p"; if (Bitrate != default) { diff --git a/FFMpegCore/FFMPEG/FFMpeg.cs b/FFMpegCore/FFMPEG/FFMpeg.cs index 8f2c1a3..79929bf 100644 --- a/FFMpegCore/FFMPEG/FFMpeg.cs +++ b/FFMpegCore/FFMPEG/FFMpeg.cs @@ -15,6 +15,8 @@ using System.Text.RegularExpressions; using System.Threading.Tasks; using Instances; +using System.Runtime.CompilerServices; +using System.Threading; namespace FFMpegCore.FFMPEG { @@ -65,16 +67,16 @@ public Bitmap Snapshot(VideoInfo source, FileInfo output, Size? size = null, Tim { if (size.Value.Width == 0) { - var ratio = source.Width / (double) size.Value.Width; + var ratio = source.Width / (double)size.Value.Width; - size = new Size((int) (source.Width * ratio), (int) (source.Height * ratio)); + size = new Size((int)(source.Width * ratio), (int)(source.Height * ratio)); } if (size.Value.Height == 0) { - var ratio = source.Height / (double) size.Value.Height; + var ratio = source.Height / (double)size.Value.Height; - size = new Size((int) (source.Width * ratio), (int) (source.Height * ratio)); + size = new Size((int)(source.Width * ratio), (int)(source.Height * ratio)); } } @@ -96,7 +98,7 @@ public Bitmap Snapshot(VideoInfo source, FileInfo output, Size? size = null, Tim output.Refresh(); Bitmap result; - using (var bmp = (Bitmap) Image.FromFile(output.FullName)) + using (var bmp = (Bitmap)Image.FromFile(output.FullName)) { using var ms = new MemoryStream(); bmp.Save(ms, ImageFormat.Png); @@ -135,8 +137,8 @@ public VideoInfo Convert( FFMpegHelper.ExtensionExceptionCheck(output, FileExtension.ForType(type)); FFMpegHelper.ConversionSizeExceptionCheck(source); - var scale = VideoSize.Original == size ? 1 : (double) source.Height / (int) size; - var outputSize = new Size((int) (source.Width / scale), (int) (source.Height / scale)); + var scale = VideoSize.Original == size ? 1 : (double)source.Height / (int)size; + var outputSize = new Size((int)(source.Width / scale), (int)(source.Height / scale)); if (outputSize.Width % 2 != 0) outputSize.Width += 1; @@ -149,7 +151,8 @@ public VideoInfo Convert( new ScaleArgument(outputSize), new VideoCodecArgument(VideoCodec.LibX264, 2400), new SpeedArgument(speed), - new AudioCodecArgument(AudioCodec.Aac, audioQuality), + new AudioCodecArgument(AudioCodec.Aac), + new AudioBitrateArgument(audioQuality), new OutputArgument(output))), VideoType.Ogv => Convert(new ArgumentContainer( new InputArgument(source), @@ -157,7 +160,8 @@ public VideoInfo Convert( new ScaleArgument(outputSize), new VideoCodecArgument(VideoCodec.LibTheora, 2400), new SpeedArgument(speed), - new AudioCodecArgument(AudioCodec.LibVorbis, audioQuality), + new AudioCodecArgument(AudioCodec.LibVorbis), + new AudioBitrateArgument(audioQuality), new OutputArgument(output))), VideoType.Ts => Convert(new ArgumentContainer( new InputArgument(source), @@ -171,7 +175,8 @@ public VideoInfo Convert( new ScaleArgument(outputSize), new VideoCodecArgument(VideoCodec.LibVpx, 2400), new SpeedArgument(speed), - new AudioCodecArgument(AudioCodec.LibVorbis, audioQuality), + new AudioCodecArgument(AudioCodec.LibVorbis), + new AudioBitrateArgument(audioQuality), new OutputArgument(output))), _ => throw new ArgumentOutOfRangeException(nameof(type)) }; @@ -194,7 +199,8 @@ public VideoInfo PosterWithAudio(FileInfo image, FileInfo audio, FileInfo output new InputArgument(image.FullName, audio.FullName), new LoopArgument(1), new VideoCodecArgument(VideoCodec.LibX264, 2400), - new AudioCodecArgument(AudioCodec.Aac, AudioQuality.Normal), + new AudioCodecArgument(AudioCodec.Aac), + new AudioBitrateArgument(AudioQuality.Normal), new ShortestArgument(true), new OutputArgument(output) ); @@ -279,7 +285,7 @@ public VideoInfo JoinImageSequence(FileInfo output, double frameRate = 30, param throw new FFMpegException(FFMpegExceptionType.Operation, "Could not join the provided image sequence."); } - + return new VideoInfo(output); } finally @@ -375,43 +381,57 @@ public VideoInfo ReplaceAudio(VideoInfo source, FileInfo audio, FileInfo output, return Convert(new ArgumentContainer( new InputArgument(source.FullName, audio.FullName), new CopyArgument(), - new AudioCodecArgument(AudioCodec.Aac, AudioQuality.Hd), + new AudioCodecArgument(AudioCodec.Aac), + new AudioBitrateArgument(AudioQuality.Hd), new ShortestArgument(stopAtShortest), new OutputArgument(output) )); } - + public VideoInfo Convert(ArgumentContainer arguments, bool skipExistsCheck = false) { var (sources, output) = GetInputOutput(arguments); - _totalTime = TimeSpan.FromSeconds(sources.Sum(source => source.Duration.TotalSeconds)); + if (sources != null) + _totalTime = TimeSpan.FromSeconds(sources.Sum(source => source.Duration.TotalSeconds)); if (!RunProcess(arguments, output, skipExistsCheck)) throw new FFMpegException(FFMpegExceptionType.Conversion, "Could not process file without error"); _totalTime = TimeSpan.MinValue; - return new VideoInfo(output); + + return output != null && output.Exists ? new VideoInfo(output) : null; } public async Task ConvertAsync(ArgumentContainer arguments, bool skipExistsCheck = false) { var (sources, output) = GetInputOutput(arguments); - _totalTime = TimeSpan.FromSeconds(sources.Sum(source => source.Duration.TotalSeconds)); + if (sources != null) + _totalTime = TimeSpan.FromSeconds(sources.Sum(source => source.Duration.TotalSeconds)); if (!await RunProcessAsync(arguments, output, skipExistsCheck)) throw new FFMpegException(FFMpegExceptionType.Conversion, "Could not process file without error"); _totalTime = TimeSpan.MinValue; - return new VideoInfo(output); + + return output != null && output.Exists ? new VideoInfo(output) : null; } private static (VideoInfo[] Input, FileInfo Output) GetInputOutput(ArgumentContainer arguments) { - var output = ((OutputArgument) arguments[typeof(OutputArgument)]).GetAsFileInfo(); + FileInfo output; + if (arguments.TryGetArgument(out var outputArg)) + output = outputArg.GetAsFileInfo(); + else if (arguments.TryGetArgument(out var outputPipeArg)) + output = null; + else + throw new FFMpegException(FFMpegExceptionType.Operation, "No output argument found"); + VideoInfo[] sources; if (arguments.TryGetArgument(out var input)) sources = input.GetAsVideoInfo(); else if (arguments.TryGetArgument(out var concat)) sources = concat.GetAsVideoInfo(); + else if (arguments.TryGetArgument(out var pipe)) + sources = null; else throw new FFMpegException(FFMpegExceptionType.Operation, "No input or concat argument found"); return (sources, output); @@ -442,12 +462,71 @@ private bool RunProcess(ArgumentContainer container, FileInfo output, bool skipE { _instance?.Dispose(); var arguments = ArgumentBuilder.BuildArguments(container); - + var exitCode = -1; + + if (container.TryGetArgument(out var inputPipeArgument)) + { + inputPipeArgument.OpenPipe(); + } + if (container.TryGetArgument(out var outputPipeArgument)) + { + outputPipeArgument.OpenPipe(); + } + + _instance = new Instance(_ffmpegPath, arguments); _instance.DataReceived += OutputData; - var exitCode = _instance.BlockUntilFinished(); - - if (!skipExistsCheck && (!File.Exists(output.FullName) || new FileInfo(output.FullName).Length == 0)) + + if (inputPipeArgument != null || outputPipeArgument != null) + { + try + { + using (var tokenSource = new CancellationTokenSource()) + { + var concurrentTasks = new List(); + concurrentTasks.Add(_instance.FinishedRunning() + .ContinueWith((t => + { + exitCode = t.Result; + if (exitCode != 0) + tokenSource.Cancel(); + }))); + if (inputPipeArgument != null) + concurrentTasks.Add(inputPipeArgument.ProcessDataAsync(tokenSource.Token) + .ContinueWith((t) => + { + inputPipeArgument.ClosePipe(); + if (t.Exception != null) + throw t.Exception; + })); + if (outputPipeArgument != null) + concurrentTasks.Add(outputPipeArgument.ProcessDataAsync(tokenSource.Token) + .ContinueWith((t) => + { + outputPipeArgument.ClosePipe(); + if (t.Exception != null) + throw t.Exception; + })); + + Task.WaitAll(concurrentTasks.ToArray()/*, tokenSource.Token*/); + } + } + catch (Exception ex) + { + inputPipeArgument?.ClosePipe(); + outputPipeArgument?.ClosePipe(); + throw new FFMpegException(FFMpegExceptionType.Process, string.Join("\n", _instance.ErrorData), ex); + } + } + else + { + exitCode = _instance.BlockUntilFinished(); + } + + if(exitCode != 0) + throw new FFMpegException(FFMpegExceptionType.Process, string.Join("\n", _instance.ErrorData)); + + if (outputPipeArgument == null && !skipExistsCheck && (!File.Exists(output.FullName) || new FileInfo(output.FullName).Length == 0)) throw new FFMpegException(FFMpegExceptionType.Process, string.Join("\n", _instance.ErrorData)); return exitCode == 0; @@ -456,12 +535,71 @@ private async Task RunProcessAsync(ArgumentContainer container, FileInfo o { _instance?.Dispose(); var arguments = ArgumentBuilder.BuildArguments(container); - + var exitCode = -1; + + if (container.TryGetArgument(out var inputPipeArgument)) + { + inputPipeArgument.OpenPipe(); + } + if (container.TryGetArgument(out var outputPipeArgument)) + { + outputPipeArgument.OpenPipe(); + } + + _instance = new Instance(_ffmpegPath, arguments); _instance.DataReceived += OutputData; - var exitCode = await _instance.FinishedRunning(); - - if (!skipExistsCheck && (!File.Exists(output.FullName) || new FileInfo(output.FullName).Length == 0)) + + if (inputPipeArgument != null || outputPipeArgument != null) + { + try + { + using (var tokenSource = new CancellationTokenSource()) + { + var concurrentTasks = new List(); + concurrentTasks.Add(_instance.FinishedRunning() + .ContinueWith((t => + { + exitCode = t.Result; + if (exitCode != 0) + tokenSource.Cancel(); + }))); + if (inputPipeArgument != null) + concurrentTasks.Add(inputPipeArgument.ProcessDataAsync(tokenSource.Token) + .ContinueWith((t) => + { + inputPipeArgument.ClosePipe(); + if (t.Exception != null) + throw t.Exception; + })); + if (outputPipeArgument != null) + concurrentTasks.Add(outputPipeArgument.ProcessDataAsync(tokenSource.Token) + .ContinueWith((t) => + { + outputPipeArgument.ClosePipe(); + if (t.Exception != null) + throw t.Exception; + })); + + await Task.WhenAll(concurrentTasks); + } + } + catch (Exception ex) + { + inputPipeArgument?.ClosePipe(); + outputPipeArgument?.ClosePipe(); + throw new FFMpegException(FFMpegExceptionType.Process, string.Join("\n", _instance.ErrorData), ex); + } + } + else + { + exitCode = await _instance.FinishedRunning(); + } + + if (exitCode != 0) + throw new FFMpegException(FFMpegExceptionType.Process, string.Join("\n", _instance.ErrorData)); + + if (outputPipeArgument == null && !skipExistsCheck && (!File.Exists(output.FullName) || new FileInfo(output.FullName).Length == 0)) throw new FFMpegException(FFMpegExceptionType.Process, string.Join("\n", _instance.ErrorData)); return exitCode == 0; @@ -487,7 +625,7 @@ private void OutputData(object sender, (DataType Type, string Data) msg) Trace.WriteLine(msg.Data); #endif if (OnProgress == null) return; - + var match = ProgressRegex.Match(msg.Data); if (!match.Success) return; diff --git a/FFMpegCore/FFMPEG/FFMpegOptions.cs b/FFMpegCore/FFMPEG/FFMpegOptions.cs index f8fe390..f857122 100644 --- a/FFMpegCore/FFMPEG/FFMpegOptions.cs +++ b/FFMpegCore/FFMPEG/FFMpegOptions.cs @@ -12,8 +12,17 @@ public class FFMpegOptions public static FFMpegOptions Options { get; private set; } = new FFMpegOptions(); + public static void Configure(Action optionsAction) + { + optionsAction?.Invoke(Options); + } + public static void Configure(FFMpegOptions options) { + if (null == options) + { + throw new ArgumentNullException(nameof(options)); + } Options = options; } diff --git a/FFMpegCore/FFMPEG/FFProbe.cs b/FFMpegCore/FFMPEG/FFProbe.cs index 52cb0b8..d73a8f5 100644 --- a/FFMpegCore/FFMPEG/FFProbe.cs +++ b/FFMpegCore/FFMPEG/FFProbe.cs @@ -5,6 +5,9 @@ using System.Globalization; using System.Threading.Tasks; using Instances; +using FFMpegCore.FFMPEG.Argument; +using FFMpegCore.FFMPEG.Pipes; +using System.IO; namespace FFMpegCore.FFMPEG { @@ -47,7 +50,7 @@ public Task ParseVideoInfoAsync(string source) /// A video info object containing all details necessary. public VideoInfo ParseVideoInfo(VideoInfo info) { - var instance = new Instance(_ffprobePath, BuildFFProbeArguments(info)) {DataBufferCapacity = _outputCapacity}; + var instance = new Instance(_ffprobePath, BuildFFProbeArguments(info.FullName)) {DataBufferCapacity = _outputCapacity}; instance.BlockUntilFinished(); var output = string.Join("", instance.OutputData); return ParseVideoInfoInternal(info, output); @@ -59,20 +62,92 @@ public VideoInfo ParseVideoInfo(VideoInfo info) /// A video info object containing all details necessary. public async Task ParseVideoInfoAsync(VideoInfo info) { - var instance = new Instance(_ffprobePath, BuildFFProbeArguments(info)) {DataBufferCapacity = _outputCapacity}; + var instance = new Instance(_ffprobePath, BuildFFProbeArguments(info.FullName)) {DataBufferCapacity = _outputCapacity}; await instance.FinishedRunning(); var output = string.Join("", instance.OutputData); return ParseVideoInfoInternal(info, output); } - private static string BuildFFProbeArguments(VideoInfo info) => - $"-v quiet -print_format json -show_streams \"{info.FullName}\""; + /// + /// Probes the targeted video stream and retrieves all available details. + /// + /// Encoded video stream. + /// A video info object containing all details necessary. + public VideoInfo ParseVideoInfo(System.IO.Stream stream) + { + var info = new VideoInfo(); + var streamPipeSource = new StreamPipeDataWriter(stream); + var pipeArgument = new InputPipeArgument(streamPipeSource); + + var instance = new Instance(_ffprobePath, BuildFFProbeArguments(pipeArgument.PipePath)) { DataBufferCapacity = _outputCapacity }; + pipeArgument.OpenPipe(); + + var task = instance.FinishedRunning(); + try + { + pipeArgument.ProcessDataAsync().ConfigureAwait(false).GetAwaiter().GetResult(); + pipeArgument.ClosePipe(); + } + catch(IOException) + { + } + finally + { + pipeArgument.ClosePipe(); + } + var exitCode = task.ConfigureAwait(false).GetAwaiter().GetResult(); + + if (exitCode != 0) + throw new FFMpegException(FFMpegExceptionType.Process, "FFProbe process returned exit status " + exitCode); + + var output = string.Join("", instance.OutputData); + return ParseVideoInfoInternal(info, output); + } + + /// + /// Probes the targeted video stream asynchronously and retrieves all available details. + /// + /// Encoded video stream. + /// A video info object containing all details necessary. + public async Task ParseVideoInfoAsync(System.IO.Stream stream) + { + var info = new VideoInfo(); + var streamPipeSource = new StreamPipeDataWriter(stream); + var pipeArgument = new InputPipeArgument(streamPipeSource); + + var instance = new Instance(_ffprobePath, BuildFFProbeArguments(pipeArgument.PipePath)) { DataBufferCapacity = _outputCapacity }; + pipeArgument.OpenPipe(); + + var task = instance.FinishedRunning(); + try + { + await pipeArgument.ProcessDataAsync(); + pipeArgument.ClosePipe(); + } + catch (IOException) + { + } + finally + { + pipeArgument.ClosePipe(); + } + var exitCode = await task; + + if (exitCode != 0) + throw new FFMpegException(FFMpegExceptionType.Process, "FFProbe process returned exit status " + exitCode); + + var output = string.Join("", instance.OutputData); + return ParseVideoInfoInternal(info, output); + } + + private static string BuildFFProbeArguments(string fullPath) => + $"-v quiet -print_format json -show_streams \"{fullPath}\""; private VideoInfo ParseVideoInfoInternal(VideoInfo info, string probeOutput) { var metadata = JsonConvert.DeserializeObject(probeOutput); - if (metadata.Streams == null || metadata.Streams.Count == 0) + if (metadata?.Streams == null || metadata.Streams.Count == 0) { throw new FFMpegException(FFMpegExceptionType.File, $"No video or audio streams could be detected. Source: ${info.FullName}"); } @@ -133,5 +208,21 @@ private VideoInfo ParseVideoInfoInternal(VideoInfo info, string probeOutput) return info; } + + internal FFMpegStreamMetadata GetMetadata(string path) + { + var instance = new Instance(_ffprobePath, BuildFFProbeArguments(path)) { DataBufferCapacity = _outputCapacity }; + instance.BlockUntilFinished(); + var output = string.Join("", instance.OutputData); + return JsonConvert.DeserializeObject(output); + } + + internal async Task GetMetadataAsync(string path) + { + var instance = new Instance(_ffprobePath, BuildFFProbeArguments(path)) { DataBufferCapacity = _outputCapacity }; + await instance.FinishedRunning(); + var output = string.Join("", instance.OutputData); + return JsonConvert.DeserializeObject(output); + } } } diff --git a/FFMpegCore/FFMPEG/Pipes/IPipeDataReader.cs b/FFMpegCore/FFMPEG/Pipes/IPipeDataReader.cs new file mode 100644 index 0000000..3912cb3 --- /dev/null +++ b/FFMpegCore/FFMPEG/Pipes/IPipeDataReader.cs @@ -0,0 +1,14 @@ +using System; +using System.Collections.Generic; +using System.Text; +using System.Threading.Tasks; + +namespace FFMpegCore.FFMPEG.Pipes +{ + public interface IPipeDataReader + { + void ReadData(System.IO.Stream stream); + Task ReadDataAsync(System.IO.Stream stream); + string GetFormat(); + } +} diff --git a/FFMpegCore/FFMPEG/Pipes/IPipeDataWriter.cs b/FFMpegCore/FFMPEG/Pipes/IPipeDataWriter.cs new file mode 100644 index 0000000..aa4bbc8 --- /dev/null +++ b/FFMpegCore/FFMPEG/Pipes/IPipeDataWriter.cs @@ -0,0 +1,18 @@ +using FFMpegCore.FFMPEG.Argument; +using System; +using System.Collections.Generic; +using System.Text; +using System.Threading.Tasks; + +namespace FFMpegCore.FFMPEG.Pipes +{ + /// + /// Interface for ffmpeg pipe source data IO + /// + public interface IPipeDataWriter + { + string GetFormat(); + void WriteData(System.IO.Stream pipe); + Task WriteDataAsync(System.IO.Stream pipe); + } +} diff --git a/FFMpegCore/FFMPEG/Pipes/IVideoFrame.cs b/FFMpegCore/FFMPEG/Pipes/IVideoFrame.cs new file mode 100644 index 0000000..60de429 --- /dev/null +++ b/FFMpegCore/FFMPEG/Pipes/IVideoFrame.cs @@ -0,0 +1,20 @@ +using System; +using System.Collections.Generic; +using System.Text; +using System.Threading.Tasks; + +namespace FFMpegCore.FFMPEG.Pipes +{ + /// + /// Interface for Video frame + /// + public interface IVideoFrame + { + int Width { get; } + int Height { get; } + string Format { get; } + + void Serialize(System.IO.Stream pipe); + Task SerializeAsync(System.IO.Stream pipe); + } +} diff --git a/FFMpegCore/FFMPEG/Pipes/PipeHelpers.cs b/FFMpegCore/FFMPEG/Pipes/PipeHelpers.cs new file mode 100644 index 0000000..6717dac --- /dev/null +++ b/FFMpegCore/FFMPEG/Pipes/PipeHelpers.cs @@ -0,0 +1,16 @@ +using System; +using System.Collections.Generic; +using System.Text; + +namespace FFMpegCore.FFMPEG.Pipes +{ + static class PipeHelpers + { + public static string GetUnqiuePipeName() => "FFMpegCore_Pipe_" + Guid.NewGuid(); + + public static string GetPipePath(string pipeName) + { + return $@"\\.\pipe\{pipeName}"; + } + } +} diff --git a/FFMpegCore/FFMPEG/Pipes/RawVideoPipeDataWriter.cs b/FFMpegCore/FFMPEG/Pipes/RawVideoPipeDataWriter.cs new file mode 100644 index 0000000..ce6bcdf --- /dev/null +++ b/FFMpegCore/FFMPEG/Pipes/RawVideoPipeDataWriter.cs @@ -0,0 +1,85 @@ +using FFMpegCore.FFMPEG.Argument; +using FFMpegCore.FFMPEG.Exceptions; +using System; +using System.Collections.Generic; +using System.Text; +using System.Threading.Tasks; + +namespace FFMpegCore.FFMPEG.Pipes +{ + /// + /// Implementation of for a raw video stream that is gathered from + /// + public class RawVideoPipeDataWriter : IPipeDataWriter + { + public string StreamFormat { get; private set; } + public int Width { get; private set; } + public int Height { get; private set; } + public int FrameRate { get; set; } = 25; + private bool formatInitialized = false; + private IEnumerator framesEnumerator; + + public RawVideoPipeDataWriter(IEnumerator framesEnumerator) + { + this.framesEnumerator = framesEnumerator; + } + + public RawVideoPipeDataWriter(IEnumerable framesEnumerator) : this(framesEnumerator.GetEnumerator()) { } + + public string GetFormat() + { + if (!formatInitialized) + { + //see input format references https://lists.ffmpeg.org/pipermail/ffmpeg-user/2012-July/007742.html + if (framesEnumerator.Current == null) + { + if (!framesEnumerator.MoveNext()) + throw new InvalidOperationException("Enumerator is empty, unable to get frame"); + } + StreamFormat = framesEnumerator.Current.Format; + Width = framesEnumerator.Current.Width; + Height = framesEnumerator.Current.Height; + + formatInitialized = true; + } + + return $"-f rawvideo -r {FrameRate} -pix_fmt {StreamFormat} -s {Width}x{Height}"; + } + + public void WriteData(System.IO.Stream stream) + { + if (framesEnumerator.Current != null) + { + CheckFrameAndThrow(framesEnumerator.Current); + framesEnumerator.Current.Serialize(stream); + } + + while (framesEnumerator.MoveNext()) + { + CheckFrameAndThrow(framesEnumerator.Current); + framesEnumerator.Current.Serialize(stream); + } + } + + public async Task WriteDataAsync(System.IO.Stream stream) + { + if (framesEnumerator.Current != null) + { + await framesEnumerator.Current.SerializeAsync(stream); + } + + while (framesEnumerator.MoveNext()) + { + await framesEnumerator.Current.SerializeAsync(stream); + } + } + + private void CheckFrameAndThrow(IVideoFrame frame) + { + if (frame.Width != Width || frame.Height != Height || frame.Format != StreamFormat) + throw new FFMpegException(FFMpegExceptionType.Operation, "Video frame is not the same format as created raw video stream\r\n" + + $"Frame format: {frame.Width}x{frame.Height} pix_fmt: {frame.Format}\r\n" + + $"Stream format: {Width}x{Height} pix_fmt: {StreamFormat}"); + } + } +} diff --git a/FFMpegCore/FFMPEG/Pipes/StreamPipeDataReader.cs b/FFMpegCore/FFMPEG/Pipes/StreamPipeDataReader.cs new file mode 100644 index 0000000..1c43dd2 --- /dev/null +++ b/FFMpegCore/FFMPEG/Pipes/StreamPipeDataReader.cs @@ -0,0 +1,30 @@ +using System; +using System.Collections.Generic; +using System.Text; +using System.Threading.Tasks; + +namespace FFMpegCore.FFMPEG.Pipes +{ + public class StreamPipeDataReader : IPipeDataReader + { + public System.IO.Stream DestanationStream { get; private set; } + public int BlockSize { get; set; } = 4096; + public string Format { get; set; } = string.Empty; + + public StreamPipeDataReader(System.IO.Stream destanationStream) + { + DestanationStream = destanationStream; + } + + public void ReadData(System.IO.Stream stream) => + stream.CopyTo(DestanationStream, BlockSize); + + public Task ReadDataAsync(System.IO.Stream stream) => + stream.CopyToAsync(DestanationStream, BlockSize); + + public string GetFormat() + { + return Format; + } + } +} diff --git a/FFMpegCore/FFMPEG/Pipes/StreamPipeDataWriter.cs b/FFMpegCore/FFMPEG/Pipes/StreamPipeDataWriter.cs new file mode 100644 index 0000000..e2b5120 --- /dev/null +++ b/FFMpegCore/FFMPEG/Pipes/StreamPipeDataWriter.cs @@ -0,0 +1,33 @@ +using System; +using System.Collections.Generic; +using System.Text; +using System.Threading.Tasks; + +namespace FFMpegCore.FFMPEG.Pipes +{ + /// + /// Implementation of used for stream redirection + /// + public class StreamPipeDataWriter : IPipeDataWriter + { + public System.IO.Stream Source { get; private set; } + public int BlockSize { get; set; } = 4096; + public string StreamFormat { get; set; } = string.Empty; + + public StreamPipeDataWriter(System.IO.Stream stream) + { + Source = stream; + } + + public void WriteData(System.IO.Stream pipe)=> + Source.CopyTo(pipe, BlockSize); + + public Task WriteDataAsync(System.IO.Stream pipe) => + Source.CopyToAsync(pipe, BlockSize); + + public string GetFormat() + { + return StreamFormat; + } + } +} diff --git a/FFMpegCore/FFMpegCore.csproj b/FFMpegCore/FFMpegCore.csproj index a3fca0d..821db6e 100644 --- a/FFMpegCore/FFMpegCore.csproj +++ b/FFMpegCore/FFMpegCore.csproj @@ -20,124 +20,14 @@ - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - - Always - - + Always - - diff --git a/FFMpegCore/VideoInfo.cs b/FFMpegCore/VideoInfo.cs index b6f97d7..3a4cb75 100644 --- a/FFMpegCore/VideoInfo.cs +++ b/FFMpegCore/VideoInfo.cs @@ -1,4 +1,6 @@ using FFMpegCore.FFMPEG; +using FFMpegCore.FFMPEG.Argument; +using FFMpegCore.FFMPEG.Pipes; using System; using System.IO; @@ -6,8 +8,13 @@ namespace FFMpegCore { public class VideoInfo { + private const string NoVideoPlaceholder = "NULL"; private FileInfo _file; + internal VideoInfo() + { + + } /// /// Create a video information object from a file information object. /// @@ -74,37 +81,37 @@ public VideoInfo(string path, int outputCapacity = int.MaxValue) : this(new File /// /// Gets the name of the file. /// - public string Name => _file.Name; + public string Name => _file != null ? _file.Name : throw new FileNotFoundException(); /// /// Gets the full path of the file. /// - public string FullName => _file.FullName; + public string FullName => _file != null ? _file.FullName : throw new FileNotFoundException(); /// /// Gets the file extension. /// - public string Extension => _file.Extension; + public string Extension => _file != null ? _file.Extension : throw new FileNotFoundException(); /// /// Gets a flag indicating if the file is read-only. /// - public bool IsReadOnly => _file.IsReadOnly; + public bool IsReadOnly => _file != null ? _file.IsReadOnly : throw new FileNotFoundException(); /// /// Gets a flag indicating if the file exists (no cache, per call verification). /// - public bool Exists => File.Exists(FullName); + public bool Exists => _file != null ? File.Exists(FullName) : false; /// /// Gets the creation date. /// - public DateTime CreationTime => _file.CreationTime; + public DateTime CreationTime => _file != null ? _file.CreationTime : throw new FileNotFoundException(); /// /// Gets the parent directory information. /// - public DirectoryInfo Directory => _file.Directory; + public DirectoryInfo Directory => _file != null ? _file.Directory : throw new FileNotFoundException(); /// /// Create a video information object from a file information object. @@ -126,16 +133,26 @@ public static VideoInfo FromPath(string path) return new VideoInfo(path); } + /// + /// Create a video information object from a encoded stream. + /// + /// Encoded video stream. + /// + public static VideoInfo FromStream(System.IO.Stream stream) + { + return new FFProbe().ParseVideoInfo(stream); + } + /// /// Pretty prints the video information. /// /// public override string ToString() { - return "Video Path : " + FullName + Environment.NewLine + - "Video Root : " + Directory.FullName + Environment.NewLine + - "Video Name: " + Name + Environment.NewLine + - "Video Extension : " + Extension + Environment.NewLine + + return "Video Path : " + (_file != null ? FullName : NoVideoPlaceholder) + Environment.NewLine + + "Video Root : " + (_file != null ? Directory.FullName : NoVideoPlaceholder) + Environment.NewLine + + "Video Name: " + (_file != null ? Name : NoVideoPlaceholder) + Environment.NewLine + + "Video Extension : " + (_file != null ? Extension : NoVideoPlaceholder) + Environment.NewLine + "Video Duration : " + Duration + Environment.NewLine + "Audio Format : " + AudioFormat + Environment.NewLine + "Video Format : " + VideoFormat + Environment.NewLine + diff --git a/README.md b/README.md index 04214df..4a34689 100644 --- a/README.md +++ b/README.md @@ -358,9 +358,9 @@ public enum VideoCodec } ``` ### ArgumentBuilder -Custom video converting presets could be created with help of `ArgumentsContainer` class: +Custom video converting presets could be created with help of `ArgumentContainer` class: ```csharp -var container = new ArgumentsContainer(); +var container = new ArgumentContainer(); container.Add(new VideoCodecArgument(VideoCodec.LibX264)); container.Add(new ScaleArgument(VideoSize.Hd)); ``` @@ -377,7 +377,7 @@ var ffmpeg = new FFMpeg(); var result = ffmpeg.Convert(container, new FileInfo("input.mp4"), new FileInfo("output.mp4")); ``` -Other availible arguments could be found in `FFMpegCore.FFMPEG.Arguments` namespace. +Other availible arguments could be found in `FFMpegCore.FFMPEG.Argument` namespace. If you need to create your custom argument, you just need to create new class, that is inherited from `Argument`, `Argument` or `Argument` For example: @@ -390,6 +390,42 @@ public class OverrideArgument : Argument } } ``` +### Input piping +With input piping it is possible to write video frames directly from program memory without saving them to jpeg or png and then passing path to input of ffmpeg. This feature also allows us to convert video on-the-fly while frames are beeing generated/created/processed. + +`IPipeSource` interface is used as source of data. It could be represented as encoded video stream or raw frames stream. Currently `IPipeSource` interface has single implementation, `RawVideoPipeSource` that is used for raw stream encoding. + +For example: + +Method that is generate bitmap frames: +```csharp +IEnumerable CreateFrames(int count) +{ + for(int i = 0; i < count; i++) + { + yield return GetNextFrame(); //method of generating new frames + } +} +``` +Then create `ArgumentsContainer` with `InputPipeArgument` +```csharp +var videoFramesSource = new RawVideoPipeSource(CreateFrames(64)) //pass IEnumerable or IEnumerator to constructor of RawVideoPipeSource +{ + FrameRate = 30 //set source frame rate +}; +var container = new ArgumentsContainer +{ + new InputPipeArgument(videoFramesSource), + ... //Other encoding arguments + new OutputArgument("temporary.mp4") +}; + +var ffmpeg = new FFMpeg(); +var result = ffmpeg.Convert(arguments); +``` + +if you want to use `System.Drawing.Bitmap` as `IVideoFrame`, there is `BitmapVideoFrameWrapper` wrapper class. + ## Contributors