diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index d00c29b..94a858f 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -45,7 +45,8 @@ jobs:
- name: Test with dotnet
run: dotnet test FFMpegCore.sln --collect "XPlat Code Coverage" --logger GitHubActions
- - name: Upload coverage reports to Codecov
+ - if: matrix.os == 'windows-latest'
+ name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v3
with:
directory: FFMpegCore.Test/TestResults
diff --git a/FFMpegCore.Examples/FFMpegCore.Examples.csproj b/FFMpegCore.Examples/FFMpegCore.Examples.csproj
index 555f4b7..f4a2b9b 100644
--- a/FFMpegCore.Examples/FFMpegCore.Examples.csproj
+++ b/FFMpegCore.Examples/FFMpegCore.Examples.csproj
@@ -7,6 +7,7 @@
+
diff --git a/FFMpegCore.Examples/Program.cs b/FFMpegCore.Examples/Program.cs
index ac4bce5..d7abde4 100644
--- a/FFMpegCore.Examples/Program.cs
+++ b/FFMpegCore.Examples/Program.cs
@@ -1,8 +1,11 @@
using System.Drawing;
using FFMpegCore;
using FFMpegCore.Enums;
+using FFMpegCore.Extensions.SkiaSharp;
using FFMpegCore.Extensions.System.Drawing.Common;
using FFMpegCore.Pipes;
+using SkiaSharp;
+using FFMpegImage = FFMpegCore.Extensions.System.Drawing.Common.FFMpegImage;
var inputPath = "/path/to/input";
var outputPath = "/path/to/output";
@@ -77,11 +80,14 @@ await FFMpegArguments
var inputImagePath = "/path/to/input/image";
{
FFMpeg.PosterWithAudio(inputPath, inputAudioPath, outputPath);
- // or
+ // or using FFMpegCore.Extensions.System.Drawing.Common
#pragma warning disable CA1416
using var image = Image.FromFile(inputImagePath);
image.AddAudio(inputAudioPath, outputPath);
#pragma warning restore CA1416
+ // or using FFMpegCore.Extensions.SkiaSharp
+ using var skiaSharpImage = SKBitmap.Decode(inputImagePath);
+ skiaSharpImage.AddAudio(inputAudioPath, outputPath);
}
IVideoFrame GetNextFrame() => throw new NotImplementedException();
diff --git a/FFMpegCore.Extensions.SkiaSharp/BitmapExtensions.cs b/FFMpegCore.Extensions.SkiaSharp/BitmapExtensions.cs
new file mode 100644
index 0000000..34e303a
--- /dev/null
+++ b/FFMpegCore.Extensions.SkiaSharp/BitmapExtensions.cs
@@ -0,0 +1,28 @@
+using SkiaSharp;
+
+namespace FFMpegCore.Extensions.SkiaSharp
+{
+ public static class BitmapExtensions
+ {
+ public static bool AddAudio(this SKBitmap poster, string audio, string output)
+ {
+ var destination = $"{Environment.TickCount}.png";
+ using (var fileStream = File.OpenWrite(destination))
+ {
+ poster.Encode(fileStream, SKEncodedImageFormat.Png, default); // PNG does not respect the quality parameter
+ }
+
+ try
+ {
+ return FFMpeg.PosterWithAudio(destination, audio, output);
+ }
+ finally
+ {
+ if (File.Exists(destination))
+ {
+ File.Delete(destination);
+ }
+ }
+ }
+ }
+}
diff --git a/FFMpegCore.Extensions.SkiaSharp/BitmapVideoFrameWrapper.cs b/FFMpegCore.Extensions.SkiaSharp/BitmapVideoFrameWrapper.cs
new file mode 100644
index 0000000..7bb98fb
--- /dev/null
+++ b/FFMpegCore.Extensions.SkiaSharp/BitmapVideoFrameWrapper.cs
@@ -0,0 +1,59 @@
+using FFMpegCore.Pipes;
+using SkiaSharp;
+
+namespace FFMpegCore.Extensions.SkiaSharp
+{
+ public class BitmapVideoFrameWrapper : IVideoFrame, IDisposable
+ {
+ public int Width => Source.Width;
+
+ public int Height => Source.Height;
+
+ public string Format { get; private set; }
+
+ public SKBitmap Source { get; private set; }
+
+ public BitmapVideoFrameWrapper(SKBitmap bitmap)
+ {
+ Source = bitmap ?? throw new ArgumentNullException(nameof(bitmap));
+ Format = ConvertStreamFormat(bitmap.ColorType);
+ }
+
+ public void Serialize(Stream stream)
+ {
+ var data = Source.Bytes;
+ stream.Write(data, 0, data.Length);
+ }
+
+ public async Task SerializeAsync(Stream stream, CancellationToken token)
+ {
+ var data = Source.Bytes;
+ await stream.WriteAsync(data, 0, data.Length, token).ConfigureAwait(false);
+ }
+
+ public void Dispose()
+ {
+ Source.Dispose();
+ }
+
+ private static string ConvertStreamFormat(SKColorType fmt)
+ {
+ // TODO: Add support for additional formats
+ switch (fmt)
+ {
+ case SKColorType.Gray8:
+ return "gray8";
+ case SKColorType.Bgra8888:
+ return "bgra";
+ case SKColorType.Rgb888x:
+ return "rgb";
+ case SKColorType.Rgba8888:
+ return "rgba";
+ case SKColorType.Rgb565:
+ return "rgb565";
+ default:
+ throw new NotSupportedException($"Not supported pixel format {fmt}");
+ }
+ }
+ }
+}
diff --git a/FFMpegCore.Extensions.SkiaSharp/FFMpegCore.Extensions.SkiaSharp.csproj b/FFMpegCore.Extensions.SkiaSharp/FFMpegCore.Extensions.SkiaSharp.csproj
new file mode 100644
index 0000000..d15a7bd
--- /dev/null
+++ b/FFMpegCore.Extensions.SkiaSharp/FFMpegCore.Extensions.SkiaSharp.csproj
@@ -0,0 +1,23 @@
+
+
+
+ true
+ Image extension for FFMpegCore using SkiaSharp
+ 5.0.0
+ ../nupkg
+
+
+ ffmpeg ffprobe convert video audio mediafile resize analyze muxing skiasharp
+ Malte Rosenbjerg, Vlad Jerca, Max Bagryantsev, Dimitri Vranken
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/FFMpegCore.Extensions.SkiaSharp/FFMpegImage.cs b/FFMpegCore.Extensions.SkiaSharp/FFMpegImage.cs
new file mode 100644
index 0000000..69929d3
--- /dev/null
+++ b/FFMpegCore.Extensions.SkiaSharp/FFMpegImage.cs
@@ -0,0 +1,57 @@
+using System.Drawing;
+using FFMpegCore.Pipes;
+using SkiaSharp;
+
+namespace FFMpegCore.Extensions.SkiaSharp
+{
+ public static class FFMpegImage
+ {
+ ///
+ /// Saves a 'png' thumbnail to an in-memory bitmap
+ ///
+ /// Source video file.
+ /// Seek position where the thumbnail should be taken.
+ /// Thumbnail size. If width or height equal 0, the other will be computed automatically.
+ /// Selected video stream index.
+ /// Input file index
+ /// Bitmap with the requested snapshot.
+ public static SKBitmap Snapshot(string input, Size? size = null, TimeSpan? captureTime = null, int? streamIndex = null, int inputFileIndex = 0)
+ {
+ var source = FFProbe.Analyse(input);
+ var (arguments, outputOptions) = SnapshotArgumentBuilder.BuildSnapshotArguments(input, source, size, captureTime, streamIndex, inputFileIndex);
+ using var ms = new MemoryStream();
+
+ arguments
+ .OutputToPipe(new StreamPipeSink(ms), options => outputOptions(options
+ .ForceFormat("rawvideo")))
+ .ProcessSynchronously();
+
+ ms.Position = 0;
+ using var bitmap = SKBitmap.Decode(ms);
+ return bitmap.Copy();
+ }
+ ///
+ /// Saves a 'png' thumbnail to an in-memory bitmap
+ ///
+ /// Source video file.
+ /// Seek position where the thumbnail should be taken.
+ /// Thumbnail size. If width or height equal 0, the other will be computed automatically.
+ /// Selected video stream index.
+ /// Input file index
+ /// Bitmap with the requested snapshot.
+ public static async Task SnapshotAsync(string input, Size? size = null, TimeSpan? captureTime = null, int? streamIndex = null, int inputFileIndex = 0)
+ {
+ var source = await FFProbe.AnalyseAsync(input).ConfigureAwait(false);
+ var (arguments, outputOptions) = SnapshotArgumentBuilder.BuildSnapshotArguments(input, source, size, captureTime, streamIndex, inputFileIndex);
+ using var ms = new MemoryStream();
+
+ await arguments
+ .OutputToPipe(new StreamPipeSink(ms), options => outputOptions(options
+ .ForceFormat("rawvideo")))
+ .ProcessAsynchronously();
+
+ ms.Position = 0;
+ return SKBitmap.Decode(ms);
+ }
+ }
+}
diff --git a/FFMpegCore.Extensions.System.Drawing.Common/FFMpegImage.cs b/FFMpegCore.Extensions.System.Drawing.Common/FFMpegImage.cs
index f36f83d..c946507 100644
--- a/FFMpegCore.Extensions.System.Drawing.Common/FFMpegImage.cs
+++ b/FFMpegCore.Extensions.System.Drawing.Common/FFMpegImage.cs
@@ -29,6 +29,7 @@ public static Bitmap Snapshot(string input, Size? size = null, TimeSpan? capture
using var bitmap = new Bitmap(ms);
return bitmap.Clone(new Rectangle(0, 0, bitmap.Width, bitmap.Height), bitmap.PixelFormat);
}
+
///
/// Saves a 'png' thumbnail to an in-memory bitmap
///
diff --git a/FFMpegCore.Test/ArgumentBuilderTest.cs b/FFMpegCore.Test/ArgumentBuilderTest.cs
index f676a44..2c550c9 100644
--- a/FFMpegCore.Test/ArgumentBuilderTest.cs
+++ b/FFMpegCore.Test/ArgumentBuilderTest.cs
@@ -258,6 +258,13 @@ public void Builder_BuildString_Seek()
Assert.AreEqual("-ss 00:00:10.000 -i \"input.mp4\" -ss 00:00:10.000 \"output.mp4\"", str);
}
+ [TestMethod]
+ public void Builder_BuildString_EndSeek()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4", false, opt => opt.EndSeek(TimeSpan.FromSeconds(10))).OutputToFile("output.mp4", false, opt => opt.EndSeek(TimeSpan.FromSeconds(10))).Arguments;
+ Assert.AreEqual("-to 00:00:10.000 -i \"input.mp4\" -to 00:00:10.000 \"output.mp4\"", str);
+ }
+
[TestMethod]
public void Builder_BuildString_Shortest()
{
diff --git a/FFMpegCore.Test/FFMpegCore.Test.csproj b/FFMpegCore.Test/FFMpegCore.Test.csproj
index 0243372..b78af1b 100644
--- a/FFMpegCore.Test/FFMpegCore.Test.csproj
+++ b/FFMpegCore.Test/FFMpegCore.Test.csproj
@@ -12,19 +12,21 @@
all
runtime; build; native; contentfiles; analyzers; buildtransitive
-
+
all
runtime; build; native; contentfiles; analyzers; buildtransitive
-
-
-
+
+
+
+
-
-
+
+
+
diff --git a/FFMpegCore.Test/Utilities/BitmapSources.cs b/FFMpegCore.Test/Utilities/BitmapSources.cs
index b7ecb45..f3b657a 100644
--- a/FFMpegCore.Test/Utilities/BitmapSources.cs
+++ b/FFMpegCore.Test/Utilities/BitmapSources.cs
@@ -2,14 +2,14 @@
using System.Drawing.Imaging;
using System.Numerics;
using System.Runtime.Versioning;
-using FFMpegCore.Extensions.System.Drawing.Common;
using FFMpegCore.Pipes;
+using SkiaSharp;
namespace FFMpegCore.Test.Utilities
{
- [SupportedOSPlatform("windows")]
internal static class BitmapSource
{
+ [SupportedOSPlatform("windows")]
public static IEnumerable CreateBitmaps(int count, PixelFormat fmt, int w, int h)
{
for (var i = 0; i < count; i++)
@@ -21,10 +21,44 @@ public static IEnumerable CreateBitmaps(int count, PixelFormat fmt,
}
}
- public static BitmapVideoFrameWrapper CreateVideoFrame(int index, PixelFormat fmt, int w, int h, float scaleNoise, float offset)
+ public static IEnumerable CreateBitmaps(int count, SKColorType fmt, int w, int h)
+ {
+ for (var i = 0; i < count; i++)
+ {
+ using (var frame = CreateVideoFrame(i, fmt, w, h, 0.025f, 0.025f * w * 0.03f))
+ {
+ yield return frame;
+ }
+ }
+ }
+
+ [SupportedOSPlatform("windows")]
+ public static Extensions.System.Drawing.Common.BitmapVideoFrameWrapper CreateVideoFrame(int index, PixelFormat fmt, int w, int h, float scaleNoise, float offset)
{
var bitmap = new Bitmap(w, h, fmt);
+ foreach (var (x, y, red, green, blue) in GenerateVideoFramePixels(index, w, h, scaleNoise, offset))
+ {
+ var color = Color.FromArgb(red, blue, green);
+ bitmap.SetPixel(x, y, color);
+ }
+
+ return new Extensions.System.Drawing.Common.BitmapVideoFrameWrapper(bitmap);
+ }
+
+ public static Extensions.SkiaSharp.BitmapVideoFrameWrapper CreateVideoFrame(int index, SKColorType fmt, int w, int h, float scaleNoise, float offset)
+ {
+ var bitmap = new SKBitmap(w, h, fmt, SKAlphaType.Opaque);
+
+ bitmap.Pixels = GenerateVideoFramePixels(index, w, h, scaleNoise, offset)
+ .Select(args => new SKColor(args.red, args.blue, args.green))
+ .ToArray();
+
+ return new Extensions.SkiaSharp.BitmapVideoFrameWrapper(bitmap);
+ }
+
+ private static IEnumerable<(int x, int y, byte red, byte green, byte blue)> GenerateVideoFramePixels(int index, int w, int h, float scaleNoise, float offset)
+ {
offset = offset * index;
for (var y = 0; y < h; y++)
@@ -36,15 +70,11 @@ public static BitmapVideoFrameWrapper CreateVideoFrame(int index, PixelFormat fm
var nx = x * scaleNoise + offset;
var ny = y * scaleNoise + offset;
- var value = (int)((Perlin.Noise(nx, ny) + 1.0f) / 2.0f * 255);
+ var value = (byte)((Perlin.Noise(nx, ny) + 1.0f) / 2.0f * 255);
- var color = Color.FromArgb((int)(value * xf), (int)(value * yf), value);
-
- bitmap.SetPixel(x, y, color);
+ yield return ((x, y, (byte)(value * xf), (byte)(value * yf), value));
}
}
-
- return new BitmapVideoFrameWrapper(bitmap);
}
//
diff --git a/FFMpegCore.Test/VideoTest.cs b/FFMpegCore.Test/VideoTest.cs
index fec8386..4403065 100644
--- a/FFMpegCore.Test/VideoTest.cs
+++ b/FFMpegCore.Test/VideoTest.cs
@@ -4,7 +4,6 @@
using FFMpegCore.Arguments;
using FFMpegCore.Enums;
using FFMpegCore.Exceptions;
-using FFMpegCore.Extensions.System.Drawing.Common;
using FFMpegCore.Pipes;
using FFMpegCore.Test.Resources;
using FFMpegCore.Test.Utilities;
@@ -15,7 +14,9 @@ namespace FFMpegCore.Test
[TestClass]
public class VideoTest
{
- [TestMethod, Timeout(10000)]
+ private const int BaseTimeoutMilliseconds = 15_000;
+
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
public void Video_ToOGV()
{
using var outputFile = new TemporaryFile($"out{VideoType.Ogv.Extension}");
@@ -27,7 +28,7 @@ public void Video_ToOGV()
Assert.IsTrue(success);
}
- [TestMethod, Timeout(10000)]
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
public void Video_ToMP4()
{
using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
@@ -39,7 +40,7 @@ public void Video_ToMP4()
Assert.IsTrue(success);
}
- [TestMethod, Timeout(10000)]
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
public void Video_ToMP4_YUV444p()
{
using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
@@ -55,7 +56,7 @@ public void Video_ToMP4_YUV444p()
Assert.IsTrue(analysis.VideoStreams.First().PixelFormat == "yuv444p");
}
- [TestMethod, Timeout(10000)]
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
public void Video_ToMP4_Args()
{
using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
@@ -68,7 +69,7 @@ public void Video_ToMP4_Args()
Assert.IsTrue(success);
}
- [TestMethod, Timeout(10000)]
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
public void Video_ToH265_MKV_Args()
{
using var outputFile = new TemporaryFile($"out.mkv");
@@ -82,10 +83,17 @@ public void Video_ToH265_MKV_Args()
}
[SupportedOSPlatform("windows")]
- [WindowsOnlyDataTestMethod, Timeout(10000)]
+ [WindowsOnlyDataTestMethod, Timeout(BaseTimeoutMilliseconds)]
[DataRow(System.Drawing.Imaging.PixelFormat.Format24bppRgb)]
[DataRow(System.Drawing.Imaging.PixelFormat.Format32bppArgb)]
- public void Video_ToMP4_Args_Pipe(System.Drawing.Imaging.PixelFormat pixelFormat)
+ public void Video_ToMP4_Args_Pipe_WindowsOnly(System.Drawing.Imaging.PixelFormat pixelFormat) => Video_ToMP4_Args_Pipe_Internal(pixelFormat);
+
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
+ [DataRow(SkiaSharp.SKColorType.Rgb565)]
+ [DataRow(SkiaSharp.SKColorType.Bgra8888)]
+ public void Video_ToMP4_Args_Pipe(SkiaSharp.SKColorType pixelFormat) => Video_ToMP4_Args_Pipe_Internal(pixelFormat);
+
+ private static void Video_ToMP4_Args_Pipe_Internal(dynamic pixelFormat)
{
using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
@@ -99,15 +107,20 @@ public void Video_ToMP4_Args_Pipe(System.Drawing.Imaging.PixelFormat pixelFormat
}
[SupportedOSPlatform("windows")]
- [WindowsOnlyTestMethod, Timeout(10000)]
- public void Video_ToMP4_Args_Pipe_DifferentImageSizes()
+ [WindowsOnlyTestMethod, Timeout(BaseTimeoutMilliseconds)]
+ public void Video_ToMP4_Args_Pipe_DifferentImageSizes_WindowsOnly() => Video_ToMP4_Args_Pipe_DifferentImageSizes_Internal(System.Drawing.Imaging.PixelFormat.Format24bppRgb);
+
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
+ public void Video_ToMP4_Args_Pipe_DifferentImageSizes() => Video_ToMP4_Args_Pipe_DifferentImageSizes_Internal(SkiaSharp.SKColorType.Rgb565);
+
+ private static void Video_ToMP4_Args_Pipe_DifferentImageSizes_Internal(dynamic pixelFormat)
{
using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
var frames = new List
{
- BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 255, 255, 1, 0),
- BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 256, 256, 1, 0)
+ BitmapSource.CreateVideoFrame(0, pixelFormat, 255, 255, 1, 0),
+ BitmapSource.CreateVideoFrame(0, pixelFormat, 256, 256, 1, 0)
};
var videoFramesSource = new RawVideoPipeSource(frames);
@@ -119,15 +132,20 @@ public void Video_ToMP4_Args_Pipe_DifferentImageSizes()
}
[SupportedOSPlatform("windows")]
- [WindowsOnlyTestMethod, Timeout(10000)]
- public async Task Video_ToMP4_Args_Pipe_DifferentImageSizes_Async()
+ [WindowsOnlyTestMethod, Timeout(BaseTimeoutMilliseconds)]
+ public async Task Video_ToMP4_Args_Pipe_DifferentImageSizes_WindowsOnly_Async() => await Video_ToMP4_Args_Pipe_DifferentImageSizes_Internal_Async(System.Drawing.Imaging.PixelFormat.Format24bppRgb);
+
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
+ public async Task Video_ToMP4_Args_Pipe_DifferentImageSizes_Async() => await Video_ToMP4_Args_Pipe_DifferentImageSizes_Internal_Async(SkiaSharp.SKColorType.Rgb565);
+
+ private static async Task Video_ToMP4_Args_Pipe_DifferentImageSizes_Internal_Async(dynamic pixelFormat)
{
using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
var frames = new List
{
- BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 255, 255, 1, 0),
- BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 256, 256, 1, 0)
+ BitmapSource.CreateVideoFrame(0, pixelFormat, 255, 255, 1, 0),
+ BitmapSource.CreateVideoFrame(0, pixelFormat, 256, 256, 1, 0)
};
var videoFramesSource = new RawVideoPipeSource(frames);
@@ -139,15 +157,21 @@ public async Task Video_ToMP4_Args_Pipe_DifferentImageSizes_Async()
}
[SupportedOSPlatform("windows")]
- [WindowsOnlyTestMethod, Timeout(10000)]
- public void Video_ToMP4_Args_Pipe_DifferentPixelFormats()
+ [WindowsOnlyTestMethod, Timeout(BaseTimeoutMilliseconds)]
+ public void Video_ToMP4_Args_Pipe_DifferentPixelFormats_WindowsOnly() =>
+ Video_ToMP4_Args_Pipe_DifferentPixelFormats_Internal(System.Drawing.Imaging.PixelFormat.Format24bppRgb, System.Drawing.Imaging.PixelFormat.Format32bppRgb);
+
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
+ public void Video_ToMP4_Args_Pipe_DifferentPixelFormats() => Video_ToMP4_Args_Pipe_DifferentPixelFormats_Internal(SkiaSharp.SKColorType.Rgb565, SkiaSharp.SKColorType.Bgra8888);
+
+ private static void Video_ToMP4_Args_Pipe_DifferentPixelFormats_Internal(dynamic pixelFormatFrame1, dynamic pixelFormatFrame2)
{
using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
var frames = new List
{
- BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 255, 255, 1, 0),
- BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format32bppRgb, 255, 255, 1, 0)
+ BitmapSource.CreateVideoFrame(0, pixelFormatFrame1, 255, 255, 1, 0),
+ BitmapSource.CreateVideoFrame(0, pixelFormatFrame2, 255, 255, 1, 0)
};
var videoFramesSource = new RawVideoPipeSource(frames);
@@ -159,15 +183,21 @@ public void Video_ToMP4_Args_Pipe_DifferentPixelFormats()
}
[SupportedOSPlatform("windows")]
- [WindowsOnlyTestMethod, Timeout(10000)]
- public async Task Video_ToMP4_Args_Pipe_DifferentPixelFormats_Async()
+ [WindowsOnlyTestMethod, Timeout(BaseTimeoutMilliseconds)]
+ public async Task Video_ToMP4_Args_Pipe_DifferentPixelFormats_WindowsOnly_Async() =>
+ await Video_ToMP4_Args_Pipe_DifferentPixelFormats_Internal_Async(System.Drawing.Imaging.PixelFormat.Format24bppRgb, System.Drawing.Imaging.PixelFormat.Format32bppRgb);
+
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
+ public async Task Video_ToMP4_Args_Pipe_DifferentPixelFormats_Async() => await Video_ToMP4_Args_Pipe_DifferentPixelFormats_Internal_Async(SkiaSharp.SKColorType.Rgb565, SkiaSharp.SKColorType.Bgra8888);
+
+ private static async Task Video_ToMP4_Args_Pipe_DifferentPixelFormats_Internal_Async(dynamic pixelFormatFrame1, dynamic pixelFormatFrame2)
{
using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
var frames = new List
{
- BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 255, 255, 1, 0),
- BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format32bppRgb, 255, 255, 1, 0)
+ BitmapSource.CreateVideoFrame(0, pixelFormatFrame1, 255, 255, 1, 0),
+ BitmapSource.CreateVideoFrame(0, pixelFormatFrame2, 255, 255, 1, 0)
};
var videoFramesSource = new RawVideoPipeSource(frames);
@@ -178,7 +208,7 @@ public async Task Video_ToMP4_Args_Pipe_DifferentPixelFormats_Async()
.ProcessAsynchronously());
}
- [TestMethod, Timeout(10000)]
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
public void Video_ToMP4_Args_StreamPipe()
{
using var input = File.OpenRead(TestResources.WebmVideo);
@@ -192,7 +222,7 @@ public void Video_ToMP4_Args_StreamPipe()
Assert.IsTrue(success);
}
- [TestMethod, Timeout(10000)]
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
public async Task Video_ToMP4_Args_StreamOutputPipe_Async_Failure()
{
await Assert.ThrowsExceptionAsync(async () =>
@@ -206,7 +236,7 @@ await FFMpegArguments
});
}
- [TestMethod, Timeout(10000)]
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
public void Video_StreamFile_OutputToMemoryStream()
{
var output = new MemoryStream();
@@ -223,7 +253,7 @@ public void Video_StreamFile_OutputToMemoryStream()
Console.WriteLine(result.Duration);
}
- [TestMethod, Timeout(10000)]
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
public void Video_ToMP4_Args_StreamOutputPipe_Failure()
{
Assert.ThrowsException(() =>
@@ -237,7 +267,7 @@ public void Video_ToMP4_Args_StreamOutputPipe_Failure()
});
}
- [TestMethod, Timeout(10000)]
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
public async Task Video_ToMP4_Args_StreamOutputPipe_Async()
{
await using var ms = new MemoryStream();
@@ -250,7 +280,7 @@ await FFMpegArguments
.ProcessAsynchronously();
}
- [TestMethod, Timeout(10000)]
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
public async Task TestDuplicateRun()
{
FFMpegArguments
@@ -266,7 +296,7 @@ await FFMpegArguments
File.Delete("temporary.mp4");
}
- [TestMethod, Timeout(10000)]
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
public void TranscodeToMemoryStream_Success()
{
using var output = new MemoryStream();
@@ -284,7 +314,7 @@ public void TranscodeToMemoryStream_Success()
Assert.AreEqual(inputAnalysis.Duration.TotalSeconds, outputAnalysis.Duration.TotalSeconds, 0.3);
}
- [TestMethod, Timeout(10000)]
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
public void Video_ToTS()
{
using var outputFile = new TemporaryFile($"out{VideoType.MpegTs.Extension}");
@@ -296,7 +326,7 @@ public void Video_ToTS()
Assert.IsTrue(success);
}
- [TestMethod, Timeout(10000)]
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
public void Video_ToTS_Args()
{
using var outputFile = new TemporaryFile($"out{VideoType.MpegTs.Extension}");
@@ -312,10 +342,17 @@ public void Video_ToTS_Args()
}
[SupportedOSPlatform("windows")]
- [WindowsOnlyDataTestMethod, Timeout(10000)]
+ [WindowsOnlyDataTestMethod, Timeout(BaseTimeoutMilliseconds)]
[DataRow(System.Drawing.Imaging.PixelFormat.Format24bppRgb)]
[DataRow(System.Drawing.Imaging.PixelFormat.Format32bppArgb)]
- public async Task Video_ToTS_Args_Pipe(System.Drawing.Imaging.PixelFormat pixelFormat)
+ public async Task Video_ToTS_Args_Pipe_WindowsOnly(System.Drawing.Imaging.PixelFormat pixelFormat) => await Video_ToTS_Args_Pipe_Internal(pixelFormat);
+
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
+ [DataRow(SkiaSharp.SKColorType.Rgb565)]
+ [DataRow(SkiaSharp.SKColorType.Bgra8888)]
+ public async Task Video_ToTS_Args_Pipe(SkiaSharp.SKColorType pixelFormat) => await Video_ToTS_Args_Pipe_Internal(pixelFormat);
+
+ private static async Task Video_ToTS_Args_Pipe_Internal(dynamic pixelFormat)
{
using var output = new TemporaryFile($"out{VideoType.Ts.Extension}");
var input = new RawVideoPipeSource(BitmapSource.CreateBitmaps(128, pixelFormat, 256, 256));
@@ -331,7 +368,7 @@ public async Task Video_ToTS_Args_Pipe(System.Drawing.Imaging.PixelFormat pixelF
Assert.AreEqual(VideoType.Ts.Name, analysis.Format.FormatName);
}
- [TestMethod, Timeout(10000)]
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
public async Task Video_ToOGV_Resize()
{
using var outputFile = new TemporaryFile($"out{VideoType.Ogv.Extension}");
@@ -345,11 +382,10 @@ public async Task Video_ToOGV_Resize()
}
[SupportedOSPlatform("windows")]
- [WindowsOnlyDataTestMethod, Timeout(10000)]
- [DataRow(System.Drawing.Imaging.PixelFormat.Format24bppRgb)]
- [DataRow(System.Drawing.Imaging.PixelFormat.Format32bppArgb)]
- [DataRow(System.Drawing.Imaging.PixelFormat.Format48bppRgb)]
- public void RawVideoPipeSource_Ogv_Scale(System.Drawing.Imaging.PixelFormat pixelFormat)
+ [WindowsOnlyDataTestMethod, Timeout(BaseTimeoutMilliseconds)]
+ [DataRow(SkiaSharp.SKColorType.Rgb565)]
+ [DataRow(SkiaSharp.SKColorType.Bgra8888)]
+ public void RawVideoPipeSource_Ogv_Scale(SkiaSharp.SKColorType pixelFormat)
{
using var outputFile = new TemporaryFile($"out{VideoType.Ogv.Extension}");
var videoFramesSource = new RawVideoPipeSource(BitmapSource.CreateBitmaps(128, pixelFormat, 256, 256));
@@ -366,7 +402,7 @@ public void RawVideoPipeSource_Ogv_Scale(System.Drawing.Imaging.PixelFormat pixe
Assert.AreEqual((int)VideoSize.Ed, analysis.PrimaryVideoStream!.Width);
}
- [TestMethod, Timeout(10000)]
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
public void Scale_Mp4_Multithreaded()
{
using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
@@ -381,11 +417,18 @@ public void Scale_Mp4_Multithreaded()
}
[SupportedOSPlatform("windows")]
- [WindowsOnlyDataTestMethod, Timeout(10000)]
+ [WindowsOnlyDataTestMethod, Timeout(BaseTimeoutMilliseconds)]
[DataRow(System.Drawing.Imaging.PixelFormat.Format24bppRgb)]
[DataRow(System.Drawing.Imaging.PixelFormat.Format32bppArgb)]
// [DataRow(PixelFormat.Format48bppRgb)]
- public void Video_ToMP4_Resize_Args_Pipe(System.Drawing.Imaging.PixelFormat pixelFormat)
+ public void Video_ToMP4_Resize_Args_Pipe(System.Drawing.Imaging.PixelFormat pixelFormat) => Video_ToMP4_Resize_Args_Pipe_Internal(pixelFormat);
+
+ [DataTestMethod, Timeout(BaseTimeoutMilliseconds)]
+ [DataRow(SkiaSharp.SKColorType.Rgb565)]
+ [DataRow(SkiaSharp.SKColorType.Bgra8888)]
+ public void Video_ToMP4_Resize_Args_Pipe(SkiaSharp.SKColorType pixelFormat) => Video_ToMP4_Resize_Args_Pipe_Internal(pixelFormat);
+
+ private static void Video_ToMP4_Resize_Args_Pipe_Internal(dynamic pixelFormat)
{
using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
var videoFramesSource = new RawVideoPipeSource(BitmapSource.CreateBitmaps(128, pixelFormat, 256, 256));
@@ -399,10 +442,10 @@ public void Video_ToMP4_Resize_Args_Pipe(System.Drawing.Imaging.PixelFormat pixe
}
[SupportedOSPlatform("windows")]
- [WindowsOnlyTestMethod, Timeout(10000)]
- public void Video_Snapshot_InMemory()
+ [WindowsOnlyTestMethod, Timeout(BaseTimeoutMilliseconds)]
+ public void Video_Snapshot_InMemory_SystemDrawingCommon()
{
- using var bitmap = FFMpegImage.Snapshot(TestResources.Mp4Video);
+ using var bitmap = Extensions.System.Drawing.Common.FFMpegImage.Snapshot(TestResources.Mp4Video);
var input = FFProbe.Analyse(TestResources.Mp4Video);
Assert.AreEqual(input.PrimaryVideoStream!.Width, bitmap.Width);
@@ -410,7 +453,19 @@ public void Video_Snapshot_InMemory()
Assert.AreEqual(bitmap.RawFormat, ImageFormat.Png);
}
- [TestMethod, Timeout(10000)]
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
+ public void Video_Snapshot_InMemory_SkiaSharp()
+ {
+ using var bitmap = Extensions.SkiaSharp.FFMpegImage.Snapshot(TestResources.Mp4Video);
+
+ var input = FFProbe.Analyse(TestResources.Mp4Video);
+ Assert.AreEqual(input.PrimaryVideoStream!.Width, bitmap.Width);
+ Assert.AreEqual(input.PrimaryVideoStream.Height, bitmap.Height);
+ // Note: The resulting ColorType is dependent on the execution environment and therefore not assessed,
+ // e.g. Bgra8888 on Windows and Rgba8888 on macOS.
+ }
+
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
public void Video_Snapshot_PersistSnapshot()
{
using var outputPath = new TemporaryFile("out.png");
@@ -424,7 +479,7 @@ public void Video_Snapshot_PersistSnapshot()
Assert.AreEqual("png", analysis.PrimaryVideoStream!.CodecName);
}
- [TestMethod, Timeout(10000)]
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
public void Video_Join()
{
using var inputCopy = new TemporaryFile("copy-input.mp4");
@@ -446,7 +501,7 @@ public void Video_Join()
Assert.AreEqual(input.PrimaryVideoStream.Width, result.PrimaryVideoStream.Width);
}
- [TestMethod, Timeout(20000)]
+ [TestMethod, Timeout(2 * BaseTimeoutMilliseconds)]
public void Video_Join_Image_Sequence()
{
var imageSet = new List();
@@ -471,7 +526,7 @@ public void Video_Join_Image_Sequence()
Assert.AreEqual(imageAnalysis.PrimaryVideoStream!.Height, result.PrimaryVideoStream.Height);
}
- [TestMethod, Timeout(10000)]
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
public void Video_With_Only_Audio_Should_Extract_Metadata()
{
var video = FFProbe.Analyse(TestResources.Mp4WithoutVideo);
@@ -480,7 +535,7 @@ public void Video_With_Only_Audio_Should_Extract_Metadata()
Assert.AreEqual(10, video.Duration.TotalSeconds, 0.5);
}
- [TestMethod, Timeout(10000)]
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
public void Video_Duration()
{
var video = FFProbe.Analyse(TestResources.Mp4Video);
@@ -500,7 +555,7 @@ public void Video_Duration()
Assert.AreEqual(video.Duration.Seconds - 2, outputVideo.Duration.Seconds);
}
- [TestMethod, Timeout(10000)]
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
public void Video_UpdatesProgress()
{
using var outputFile = new TemporaryFile("out.mp4");
@@ -541,7 +596,7 @@ void OnTimeProgess(TimeSpan time)
Assert.AreNotEqual(analysis.Duration, timeDone);
}
- [TestMethod, Timeout(10000)]
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
public void Video_OutputsData()
{
using var outputFile = new TemporaryFile("out.mp4");
@@ -563,12 +618,17 @@ public void Video_OutputsData()
}
[SupportedOSPlatform("windows")]
- [WindowsOnlyTestMethod, Timeout(10000)]
- public void Video_TranscodeInMemory()
+ [WindowsOnlyTestMethod, Timeout(BaseTimeoutMilliseconds)]
+ public void Video_TranscodeInMemory_WindowsOnly() => Video_TranscodeInMemory_Internal(System.Drawing.Imaging.PixelFormat.Format24bppRgb);
+
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
+ public void Video_TranscodeInMemory() => Video_TranscodeInMemory_Internal(SkiaSharp.SKColorType.Rgb565);
+
+ private static void Video_TranscodeInMemory_Internal(dynamic pixelFormat)
{
using var resStream = new MemoryStream();
var reader = new StreamPipeSink(resStream);
- var writer = new RawVideoPipeSource(BitmapSource.CreateBitmaps(128, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 128, 128));
+ var writer = new RawVideoPipeSource(BitmapSource.CreateBitmaps(128, pixelFormat, 128, 128));
FFMpegArguments
.FromPipeInput(writer)
@@ -583,7 +643,7 @@ public void Video_TranscodeInMemory()
Assert.AreEqual(vi.PrimaryVideoStream.Height, 128);
}
- [TestMethod, Timeout(20000)]
+ [TestMethod, Timeout(2 * BaseTimeoutMilliseconds)]
public void Video_TranscodeToMemory()
{
using var memoryStream = new MemoryStream();
@@ -601,7 +661,7 @@ public void Video_TranscodeToMemory()
Assert.AreEqual(vi.PrimaryVideoStream.Height, 360);
}
- [TestMethod, Timeout(10000)]
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
public async Task Video_Cancel_Async()
{
using var outputFile = new TemporaryFile("out.mp4");
@@ -625,7 +685,7 @@ public async Task Video_Cancel_Async()
Assert.IsFalse(result);
}
- [TestMethod, Timeout(10000)]
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
public void Video_Cancel()
{
using var outputFile = new TemporaryFile("out.mp4");
@@ -646,7 +706,7 @@ public void Video_Cancel()
Assert.IsFalse(result);
}
- [TestMethod, Timeout(10000)]
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
public async Task Video_Cancel_Async_With_Timeout()
{
using var outputFile = new TemporaryFile("out.mp4");
@@ -676,7 +736,7 @@ public async Task Video_Cancel_Async_With_Timeout()
Assert.AreEqual("aac", outputInfo.PrimaryAudioStream!.CodecName);
}
- [TestMethod, Timeout(10000)]
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
public async Task Video_Cancel_CancellationToken_Async()
{
using var outputFile = new TemporaryFile("out.mp4");
@@ -701,7 +761,7 @@ public async Task Video_Cancel_CancellationToken_Async()
Assert.IsFalse(result);
}
- [TestMethod, Timeout(10000)]
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
public async Task Video_Cancel_CancellationToken_Async_Throws()
{
using var outputFile = new TemporaryFile("out.mp4");
@@ -724,7 +784,7 @@ public async Task Video_Cancel_CancellationToken_Async_Throws()
await Assert.ThrowsExceptionAsync(() => task);
}
- [TestMethod, Timeout(10000)]
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
public void Video_Cancel_CancellationToken_Throws()
{
using var outputFile = new TemporaryFile("out.mp4");
@@ -746,7 +806,7 @@ public void Video_Cancel_CancellationToken_Throws()
Assert.ThrowsException(() => task.ProcessSynchronously());
}
- [TestMethod, Timeout(10000)]
+ [TestMethod, Timeout(BaseTimeoutMilliseconds)]
public async Task Video_Cancel_CancellationToken_Async_With_Timeout()
{
using var outputFile = new TemporaryFile("out.mp4");
diff --git a/FFMpegCore.sln b/FFMpegCore.sln
index 5a9faa8..7ab0929 100644
--- a/FFMpegCore.sln
+++ b/FFMpegCore.sln
@@ -9,7 +9,9 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "FFMpegCore.Test", "FFMpegCo
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "FFMpegCore.Examples", "FFMpegCore.Examples\FFMpegCore.Examples.csproj", "{3125CF91-FFBD-4E4E-8930-247116AFE772}"
EndProject
-Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "FFMpegCore.Extensions.System.Drawing.Common", "FFMpegCore.Extensions.System.Drawing.Common\FFMpegCore.Extensions.System.Drawing.Common.csproj", "{9C1A4930-9369-4A18-AD98-929A2A510D80}"
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "FFMpegCore.Extensions.System.Drawing.Common", "FFMpegCore.Extensions.System.Drawing.Common\FFMpegCore.Extensions.System.Drawing.Common.csproj", "{9C1A4930-9369-4A18-AD98-929A2A510D80}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "FFMpegCore.Extensions.SkiaSharp", "FFMpegCore.Extensions.SkiaSharp\FFMpegCore.Extensions.SkiaSharp.csproj", "{5A76F9B7-3681-4551-A9B6-8D3AC5DA1090}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
@@ -33,6 +35,10 @@ Global
{9C1A4930-9369-4A18-AD98-929A2A510D80}.Debug|Any CPU.Build.0 = Debug|Any CPU
{9C1A4930-9369-4A18-AD98-929A2A510D80}.Release|Any CPU.ActiveCfg = Release|Any CPU
{9C1A4930-9369-4A18-AD98-929A2A510D80}.Release|Any CPU.Build.0 = Release|Any CPU
+ {5A76F9B7-3681-4551-A9B6-8D3AC5DA1090}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {5A76F9B7-3681-4551-A9B6-8D3AC5DA1090}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {5A76F9B7-3681-4551-A9B6-8D3AC5DA1090}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {5A76F9B7-3681-4551-A9B6-8D3AC5DA1090}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
diff --git a/FFMpegCore/Extend/TimeSpanExtensions.cs b/FFMpegCore/Extend/TimeSpanExtensions.cs
new file mode 100644
index 0000000..3e70d5c
--- /dev/null
+++ b/FFMpegCore/Extend/TimeSpanExtensions.cs
@@ -0,0 +1,15 @@
+namespace FFMpegCore.Extend;
+
+public static class TimeSpanExtensions
+{
+ public static string ToLongString(this TimeSpan timeSpan)
+ {
+ var hours = timeSpan.Hours;
+ if (timeSpan.Days > 0)
+ {
+ hours += timeSpan.Days * 24;
+ }
+
+ return $"{hours:00}:{timeSpan.Minutes:00}:{timeSpan.Seconds:00}.{timeSpan.Milliseconds:000}";
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Arguments/EndSeekArgument.cs b/FFMpegCore/FFMpeg/Arguments/EndSeekArgument.cs
new file mode 100644
index 0000000..e4e8f5d
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/EndSeekArgument.cs
@@ -0,0 +1,19 @@
+using FFMpegCore.Extend;
+
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Represents seek parameter
+ ///
+ public class EndSeekArgument : IArgument
+ {
+ public readonly TimeSpan? SeekTo;
+
+ public EndSeekArgument(TimeSpan? seekTo)
+ {
+ SeekTo = seekTo;
+ }
+
+ public string Text => SeekTo.HasValue ? $"-to {SeekTo.Value.ToLongString()}" : string.Empty;
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Arguments/SeekArgument.cs b/FFMpegCore/FFMpeg/Arguments/SeekArgument.cs
index 8862e76..29cda7f 100644
--- a/FFMpegCore/FFMpeg/Arguments/SeekArgument.cs
+++ b/FFMpegCore/FFMpeg/Arguments/SeekArgument.cs
@@ -1,4 +1,6 @@
-namespace FFMpegCore.Arguments
+using FFMpegCore.Extend;
+
+namespace FFMpegCore.Arguments
{
///
/// Represents seek parameter
@@ -12,25 +14,6 @@ public SeekArgument(TimeSpan? seekTo)
SeekTo = seekTo;
}
- public string Text
- {
- get
- {
- if (SeekTo.HasValue)
- {
- var hours = SeekTo.Value.Hours;
- if (SeekTo.Value.Days > 0)
- {
- hours += SeekTo.Value.Days * 24;
- }
-
- return $"-ss {hours.ToString("00")}:{SeekTo.Value.Minutes.ToString("00")}:{SeekTo.Value.Seconds.ToString("00")}.{SeekTo.Value.Milliseconds.ToString("000")}";
- }
- else
- {
- return string.Empty;
- }
- }
- }
+ public string Text => SeekTo.HasValue ? $"-ss {SeekTo.Value.ToLongString()}" : string.Empty;
}
}
diff --git a/FFMpegCore/FFMpeg/FFMpeg.cs b/FFMpegCore/FFMpeg/FFMpeg.cs
index 94f35c0..362a865 100644
--- a/FFMpegCore/FFMpeg/FFMpeg.cs
+++ b/FFMpegCore/FFMpeg/FFMpeg.cs
@@ -247,6 +247,46 @@ public static bool Join(string output, params string[] videos)
}
}
+ private static FFMpegArgumentProcessor BaseSubVideo(string input, string output, TimeSpan startTime, TimeSpan endTime)
+ {
+ if (Path.GetExtension(input) != Path.GetExtension(output))
+ {
+ output = Path.Combine(Path.GetDirectoryName(output), Path.GetFileNameWithoutExtension(output), Path.GetExtension(input));
+ }
+
+ return FFMpegArguments
+ .FromFileInput(input, true, options => options.Seek(startTime).EndSeek(endTime))
+ .OutputToFile(output, true, options => options.CopyChannel());
+ }
+
+ ///
+ /// Creates a new video starting and ending at the specified times
+ ///
+ /// Input video file.
+ /// Output video file.
+ /// The start time of when the sub video needs to start
+ /// The end time of where the sub video needs to end
+ /// Output video information.
+ public static bool SubVideo(string input, string output, TimeSpan startTime, TimeSpan endTime)
+ {
+ return BaseSubVideo(input, output, startTime, endTime)
+ .ProcessSynchronously();
+ }
+
+ ///
+ /// Creates a new video starting and ending at the specified times
+ ///
+ /// Input video file.
+ /// Output video file.
+ /// The start time of when the sub video needs to start
+ /// The end time of where the sub video needs to end
+ /// Output video information.
+ public static async Task SubVideoAsync(string input, string output, TimeSpan startTime, TimeSpan endTime)
+ {
+ return await BaseSubVideo(input, output, startTime, endTime)
+ .ProcessAsynchronously();
+ }
+
///
/// Records M3U8 streams to the specified output.
///
diff --git a/FFMpegCore/FFMpeg/FFMpegArgumentOptions.cs b/FFMpegCore/FFMpeg/FFMpegArgumentOptions.cs
index 0f54b8c..cc49c5f 100644
--- a/FFMpegCore/FFMpeg/FFMpegArgumentOptions.cs
+++ b/FFMpegCore/FFMpeg/FFMpegArgumentOptions.cs
@@ -54,6 +54,7 @@ public FFMpegArgumentOptions WithAudioFilters(Action audioFi
public FFMpegArgumentOptions WithCustomArgument(string argument) => WithArgument(new CustomArgument(argument));
public FFMpegArgumentOptions Seek(TimeSpan? seekTo) => WithArgument(new SeekArgument(seekTo));
+ public FFMpegArgumentOptions EndSeek(TimeSpan? seekTo) => WithArgument(new EndSeekArgument(seekTo));
public FFMpegArgumentOptions Loop(int times) => WithArgument(new LoopArgument(times));
public FFMpegArgumentOptions OverwriteExisting() => WithArgument(new OverwriteArgument());
public FFMpegArgumentOptions SelectStream(int streamIndex, int inputFileIndex = 0,
diff --git a/FFMpegCore/FFMpeg/Pipes/RawVideoPipeSource.cs b/FFMpegCore/FFMpeg/Pipes/RawVideoPipeSource.cs
index fe4c881..2f3028f 100644
--- a/FFMpegCore/FFMpeg/Pipes/RawVideoPipeSource.cs
+++ b/FFMpegCore/FFMpeg/Pipes/RawVideoPipeSource.cs
@@ -15,13 +15,11 @@ public class RawVideoPipeSource : IPipeSource
private bool _formatInitialized;
private readonly IEnumerator _framesEnumerator;
- public RawVideoPipeSource(IEnumerator framesEnumerator)
+ public RawVideoPipeSource(IEnumerable framesEnumerator)
{
- _framesEnumerator = framesEnumerator;
+ _framesEnumerator = framesEnumerator.GetEnumerator();
}
- public RawVideoPipeSource(IEnumerable framesEnumerator) : this(framesEnumerator.GetEnumerator()) { }
-
public string GetStreamArguments()
{
if (!_formatInitialized)
diff --git a/FFMpegCore/FFMpegCore.csproj b/FFMpegCore/FFMpegCore.csproj
index 843bdba..db5abd1 100644
--- a/FFMpegCore/FFMpegCore.csproj
+++ b/FFMpegCore/FFMpegCore.csproj
@@ -3,7 +3,7 @@
true
A .NET Standard FFMpeg/FFProbe wrapper for easily integrating media analysis and conversion into your .NET applications
- 5.0.1
+ 5.0.2
../nupkg
@@ -13,12 +13,12 @@
-
+
-
-
+
+
diff --git a/FFMpegCore/FFProbe/MediaAnalysis.cs b/FFMpegCore/FFProbe/MediaAnalysis.cs
index 53943dc..9fce0fe 100644
--- a/FFMpegCore/FFProbe/MediaAnalysis.cs
+++ b/FFMpegCore/FFProbe/MediaAnalysis.cs
@@ -50,7 +50,7 @@ private MediaFormat ParseFormat(Format analysisFormat)
{
var bitDepth = int.TryParse(stream.BitsPerRawSample, out var bprs) ? bprs :
stream.BitsPerSample;
- return bitDepth == 0 ? null : (int?)bitDepth;
+ return bitDepth == 0 ? null : bitDepth;
}
private VideoStream ParseVideoStream(FFProbeStream stream)
@@ -126,7 +126,7 @@ public static class MediaAnalysisUtils
{
private static readonly Regex DurationRegex = new(@"^(\d+):(\d{1,2}):(\d{1,2})\.(\d{1,3})", RegexOptions.Compiled);
- internal static Dictionary? ToCaseInsensitive(this Dictionary? dictionary)
+ internal static Dictionary ToCaseInsensitive(this Dictionary? dictionary)
{
return dictionary?.ToDictionary(tag => tag.Key, tag => tag.Value, StringComparer.OrdinalIgnoreCase) ?? new Dictionary();
}
@@ -195,11 +195,6 @@ public static TimeSpan ParseDuration(string duration)
}
}
- public static TimeSpan ParseDuration(FFProbeStream ffProbeStream)
- {
- return ParseDuration(ffProbeStream.Duration);
- }
-
public static int ParseRotation(FFProbeStream fFProbeStream)
{
var displayMatrixSideData = fFProbeStream.SideData?.Find(item => item.TryGetValue("side_data_type", out var rawSideDataType) && rawSideDataType.ToString() == "Display Matrix");
diff --git a/FFMpegCore/Helpers/FFProbeHelper.cs b/FFMpegCore/Helpers/FFProbeHelper.cs
index 0c44ab6..ff1ff20 100644
--- a/FFMpegCore/Helpers/FFProbeHelper.cs
+++ b/FFMpegCore/Helpers/FFProbeHelper.cs
@@ -3,27 +3,10 @@
namespace FFMpegCore.Helpers
{
- public class FFProbeHelper
+ public static class FFProbeHelper
{
private static bool _ffprobeVerified;
- public static int Gcd(int first, int second)
- {
- while (first != 0 && second != 0)
- {
- if (first > second)
- {
- first -= second;
- }
- else
- {
- second -= first;
- }
- }
-
- return first == 0 ? second : first;
- }
-
public static void RootExceptionCheck()
{
if (GlobalFFOptions.Current.BinaryFolder == null)
diff --git a/README.md b/README.md
index 990361e..d2a9633 100644
--- a/README.md
+++ b/README.md
@@ -72,6 +72,15 @@ FFMpeg.Join(@"..\joined_video.mp4",
);
```
+### Create a sub video
+``` csharp
+FFMpeg.SubVideo(inputPath,
+ outputPath,
+ TimeSpan.FromSeconds(0)
+ TimeSpan.FromSeconds(30)
+);
+```
+
### Join images into a video:
```csharp
FFMpeg.JoinImageSequence(@"..\joined_video.mp4", frameRate: 1,
diff --git a/testenvironments.json b/testenvironments.json
new file mode 100644
index 0000000..14b2763
--- /dev/null
+++ b/testenvironments.json
@@ -0,0 +1,10 @@
+{
+ "version": "1",
+ "environments": [
+ {
+ "name": "Ubuntu",
+ "type": "wsl",
+ "wslDistribution": "Ubuntu"
+ }
+ ]
+}
\ No newline at end of file