Merge pull request #192 from rosenbjerg/master

v.4.1.0
This commit is contained in:
Malte Rosenbjerg 2021-03-15 23:54:02 +01:00 committed by GitHub
commit 18201e2cde
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
20 changed files with 423 additions and 82 deletions

View file

@ -0,0 +1,12 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net5.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\FFMpegCore\FFMpegCore.csproj" />
</ItemGroup>
</Project>

View file

@ -0,0 +1,124 @@
using System;
using System.Collections.Generic;
using System.Drawing;
using System.IO;
using FFMpegCore;
using FFMpegCore.Enums;
using FFMpegCore.Pipes;
using FFMpegCore.Extend;
var inputPath = "/path/to/input";
var outputPath = "/path/to/output";
{
var mediaInfo = FFProbe.Analyse(inputPath);
}
{
var mediaInfo = await FFProbe.AnalyseAsync(inputPath);
}
{
FFMpegArguments
.FromFileInput(inputPath)
.OutputToFile(outputPath, false, options => options
.WithVideoCodec(VideoCodec.LibX264)
.WithConstantRateFactor(21)
.WithAudioCodec(AudioCodec.Aac)
.WithVariableBitrate(4)
.WithVideoFilters(filterOptions => filterOptions
.Scale(VideoSize.Hd))
.WithFastStart())
.ProcessSynchronously();
}
{
// process the snapshot in-memory and use the Bitmap directly
var bitmap = FFMpeg.Snapshot(inputPath, new Size(200, 400), TimeSpan.FromMinutes(1));
// or persists the image on the drive
FFMpeg.Snapshot(inputPath, outputPath, new Size(200, 400), TimeSpan.FromMinutes(1));
}
var inputStream = new MemoryStream();
var outputStream = new MemoryStream();
{
await FFMpegArguments
.FromPipeInput(new StreamPipeSource(inputStream))
.OutputToPipe(new StreamPipeSink(outputStream), options => options
.WithVideoCodec("vp9")
.ForceFormat("webm"))
.ProcessAsynchronously();
}
{
FFMpeg.Join(@"..\joined_video.mp4",
@"..\part1.mp4",
@"..\part2.mp4",
@"..\part3.mp4"
);
}
{
FFMpeg.JoinImageSequence(@"..\joined_video.mp4", frameRate: 1,
ImageInfo.FromPath(@"..\1.png"),
ImageInfo.FromPath(@"..\2.png"),
ImageInfo.FromPath(@"..\3.png")
);
}
{
FFMpeg.Mute(inputPath, outputPath);
}
{
FFMpeg.ExtractAudio(inputPath, outputPath);
}
var inputAudioPath = "/path/to/input/audio";
{
FFMpeg.ReplaceAudio(inputPath, inputAudioPath, outputPath);
}
var inputImagePath = "/path/to/input/image";
{
FFMpeg.PosterWithAudio(inputPath, inputAudioPath, outputPath);
// or
var image = Image.FromFile(inputImagePath);
image.AddAudio(inputAudioPath, outputPath);
}
IVideoFrame GetNextFrame() => throw new NotImplementedException();
{
IEnumerable<IVideoFrame> CreateFrames(int count)
{
for(int i = 0; i < count; i++)
{
yield return GetNextFrame(); //method of generating new frames
}
}
var videoFramesSource = new RawVideoPipeSource(CreateFrames(64)) //pass IEnumerable<IVideoFrame> or IEnumerator<IVideoFrame> to constructor of RawVideoPipeSource
{
FrameRate = 30 //set source frame rate
};
await FFMpegArguments
.FromPipeInput(videoFramesSource)
.OutputToFile(outputPath, false, options => options
.WithVideoCodec(VideoCodec.LibVpx))
.ProcessAsynchronously();
}
{
// setting global options
GlobalFFOptions.Configure(new FFOptions { BinaryFolder = "./bin", TemporaryFilesFolder = "/tmp" });
// or
GlobalFFOptions.Configure(options => options.BinaryFolder = "./bin");
// or individual, per-run options
await FFMpegArguments
.FromFileInput(inputPath)
.OutputToFile(outputPath)
.ProcessAsynchronously(true, new FFOptions { BinaryFolder = "./bin", TemporaryFilesFolder = "/tmp" });
}

View file

@ -239,9 +239,8 @@ public void Builder_BuildString_Loop()
[TestMethod] [TestMethod]
public void Builder_BuildString_Seek() public void Builder_BuildString_Seek()
{ {
var str = FFMpegArguments.FromFileInput("input.mp4", false, opt => opt.Seek(TimeSpan.FromSeconds(10))) var str = FFMpegArguments.FromFileInput("input.mp4", false, opt => opt.Seek(TimeSpan.FromSeconds(10))).OutputToFile("output.mp4", false, opt => opt.Seek(TimeSpan.FromSeconds(10))).Arguments;
.OutputToFile("output.mp4", false, opt => opt.Seek(TimeSpan.FromSeconds(10))).Arguments; Assert.AreEqual("-ss 00:00:10.000 -i \"input.mp4\" -ss 00:00:10.000 \"output.mp4\"", str);
Assert.AreEqual("-ss 00:00:10 -i \"input.mp4\" -ss 00:00:10 \"output.mp4\"", str);
} }
[TestMethod] [TestMethod]

View file

@ -39,10 +39,10 @@
</ItemGroup> </ItemGroup>
<ItemGroup> <ItemGroup>
<PackageReference Include="GitHubActionsTestLogger" Version="1.1.2" /> <PackageReference Include="GitHubActionsTestLogger" Version="1.2.0" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="16.8.3" /> <PackageReference Include="Microsoft.NET.Test.Sdk" Version="16.9.1" />
<PackageReference Include="MSTest.TestAdapter" Version="2.1.2" /> <PackageReference Include="MSTest.TestAdapter" Version="2.2.1" />
<PackageReference Include="MSTest.TestFramework" Version="2.1.2" /> <PackageReference Include="MSTest.TestFramework" Version="2.2.1" />
</ItemGroup> </ItemGroup>
<ItemGroup> <ItemGroup>

View file

@ -25,6 +25,24 @@ public async Task Audio_FromStream_Duration()
Assert.IsTrue(fileAnalysis.Duration == streamAnalysis.Duration); Assert.IsTrue(fileAnalysis.Duration == streamAnalysis.Duration);
} }
[DataTestMethod]
[DataRow("0:00:03.008000", 0, 0, 0, 3, 8)]
[DataRow("05:12:59.177", 0, 5, 12, 59, 177)]
[DataRow("149:07:50.911750", 6, 5, 7, 50, 911)]
[DataRow("00:00:00.83", 0, 0, 0, 0, 830)]
public void MediaAnalysis_ParseDuration(string duration, int expectedDays, int expectedHours, int expectedMinutes, int expectedSeconds, int expectedMilliseconds)
{
var ffprobeStream = new FFProbeStream { Duration = duration };
var parsedDuration = MediaAnalysisUtils.ParseDuration(ffprobeStream);
Assert.AreEqual(expectedDays, parsedDuration.Days);
Assert.AreEqual(expectedHours, parsedDuration.Hours);
Assert.AreEqual(expectedMinutes, parsedDuration.Minutes);
Assert.AreEqual(expectedSeconds, parsedDuration.Seconds);
Assert.AreEqual(expectedMilliseconds, parsedDuration.Milliseconds);
}
[TestMethod] [TestMethod]
public async Task Uri_Duration() public async Task Uri_Duration()
{ {

View file

@ -21,7 +21,7 @@ public static IEnumerable<IVideoFrame> CreateBitmaps(int count, PixelFormat fmt,
} }
} }
private static BitmapVideoFrameWrapper CreateVideoFrame(int index, PixelFormat fmt, int w, int h, float scaleNoise, float offset) public static BitmapVideoFrameWrapper CreateVideoFrame(int index, PixelFormat fmt, int w, int h, float scaleNoise, float offset)
{ {
var bitmap = new Bitmap(w, h, fmt); var bitmap = new Bitmap(w, h, fmt);

View file

@ -87,6 +87,92 @@ public void Video_ToMP4_Args_Pipe(System.Drawing.Imaging.PixelFormat pixelFormat
Assert.IsTrue(success); Assert.IsTrue(success);
} }
[TestMethod, Timeout(10000)]
public void Video_ToMP4_Args_Pipe_DifferentImageSizes()
{
using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
var frames = new List<IVideoFrame>
{
BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 255, 255, 1, 0),
BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 256, 256, 1, 0)
};
var videoFramesSource = new RawVideoPipeSource(frames);
var ex = Assert.ThrowsException<FFMpegException>(() => FFMpegArguments
.FromPipeInput(videoFramesSource)
.OutputToFile(outputFile, false, opt => opt
.WithVideoCodec(VideoCodec.LibX264))
.ProcessSynchronously());
Assert.IsInstanceOfType(ex.GetBaseException(), typeof(FFMpegStreamFormatException));
}
[TestMethod, Timeout(10000)]
public async Task Video_ToMP4_Args_Pipe_DifferentImageSizes_Async()
{
using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
var frames = new List<IVideoFrame>
{
BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 255, 255, 1, 0),
BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 256, 256, 1, 0)
};
var videoFramesSource = new RawVideoPipeSource(frames);
var ex = await Assert.ThrowsExceptionAsync<FFMpegException>(() => FFMpegArguments
.FromPipeInput(videoFramesSource)
.OutputToFile(outputFile, false, opt => opt
.WithVideoCodec(VideoCodec.LibX264))
.ProcessAsynchronously());
Assert.IsInstanceOfType(ex.GetBaseException(), typeof(FFMpegStreamFormatException));
}
[TestMethod, Timeout(10000)]
public void Video_ToMP4_Args_Pipe_DifferentPixelFormats()
{
using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
var frames = new List<IVideoFrame>
{
BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 255, 255, 1, 0),
BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format32bppRgb, 255, 255, 1, 0)
};
var videoFramesSource = new RawVideoPipeSource(frames);
var ex = Assert.ThrowsException<FFMpegException>(() => FFMpegArguments
.FromPipeInput(videoFramesSource)
.OutputToFile(outputFile, false, opt => opt
.WithVideoCodec(VideoCodec.LibX264))
.ProcessSynchronously());
Assert.IsInstanceOfType(ex.GetBaseException(), typeof(FFMpegStreamFormatException));
}
[TestMethod, Timeout(10000)]
public async Task Video_ToMP4_Args_Pipe_DifferentPixelFormats_Async()
{
using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
var frames = new List<IVideoFrame>
{
BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 255, 255, 1, 0),
BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format32bppRgb, 255, 255, 1, 0)
};
var videoFramesSource = new RawVideoPipeSource(frames);
var ex = await Assert.ThrowsExceptionAsync<FFMpegException>(() => FFMpegArguments
.FromPipeInput(videoFramesSource)
.OutputToFile(outputFile, false, opt => opt
.WithVideoCodec(VideoCodec.LibX264))
.ProcessAsynchronously());
Assert.IsInstanceOfType(ex.GetBaseException(), typeof(FFMpegStreamFormatException));
}
[TestMethod, Timeout(10000)] [TestMethod, Timeout(10000)]
public void Video_ToMP4_Args_StreamPipe() public void Video_ToMP4_Args_StreamPipe()
{ {
@ -114,6 +200,8 @@ await FFMpegArguments
.ProcessAsynchronously(); .ProcessAsynchronously();
}); });
} }
[TestMethod, Timeout(10000)] [TestMethod, Timeout(10000)]
public void Video_StreamFile_OutputToMemoryStream() public void Video_StreamFile_OutputToMemoryStream()
{ {

View file

@ -7,6 +7,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "FFMpegCore", "FFMpegCore\FF
EndProject EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "FFMpegCore.Test", "FFMpegCore.Test\FFMpegCore.Test.csproj", "{F20C8353-72D9-454B-9F16-3624DBAD2328}" Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "FFMpegCore.Test", "FFMpegCore.Test\FFMpegCore.Test.csproj", "{F20C8353-72D9-454B-9F16-3624DBAD2328}"
EndProject EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "FFMpegCore.Examples", "FFMpegCore.Examples\FFMpegCore.Examples.csproj", "{3125CF91-FFBD-4E4E-8930-247116AFE772}"
EndProject
Global Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU Debug|Any CPU = Debug|Any CPU
@ -21,6 +23,10 @@ Global
{F20C8353-72D9-454B-9F16-3624DBAD2328}.Debug|Any CPU.Build.0 = Debug|Any CPU {F20C8353-72D9-454B-9F16-3624DBAD2328}.Debug|Any CPU.Build.0 = Debug|Any CPU
{F20C8353-72D9-454B-9F16-3624DBAD2328}.Release|Any CPU.ActiveCfg = Release|Any CPU {F20C8353-72D9-454B-9F16-3624DBAD2328}.Release|Any CPU.ActiveCfg = Release|Any CPU
{F20C8353-72D9-454B-9F16-3624DBAD2328}.Release|Any CPU.Build.0 = Release|Any CPU {F20C8353-72D9-454B-9F16-3624DBAD2328}.Release|Any CPU.Build.0 = Release|Any CPU
{3125CF91-FFBD-4E4E-8930-247116AFE772}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{3125CF91-FFBD-4E4E-8930-247116AFE772}.Debug|Any CPU.Build.0 = Debug|Any CPU
{3125CF91-FFBD-4E4E-8930-247116AFE772}.Release|Any CPU.ActiveCfg = Release|Any CPU
{3125CF91-FFBD-4E4E-8930-247116AFE772}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection EndGlobalSection
GlobalSection(SolutionProperties) = preSolution GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE HideSolutionNode = FALSE

3
FFMpegCore/Assembly.cs Normal file
View file

@ -0,0 +1,3 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("FFMpegCore.Test")]

View file

@ -6,7 +6,7 @@ namespace FFMpegCore.Extend
{ {
public static class BitmapExtensions public static class BitmapExtensions
{ {
public static bool AddAudio(this Bitmap poster, string audio, string output) public static bool AddAudio(this Image poster, string audio, string output)
{ {
var destination = $"{Environment.TickCount}.png"; var destination = $"{Environment.TickCount}.png";
poster.Save(destination); poster.Save(destination);

View file

@ -0,0 +1,27 @@
using System.Threading;
using System.Threading.Tasks;
namespace FFMpegCore.Arguments
{
/// <summary>
/// Represents outputting to url using supported protocols
/// See http://ffmpeg.org/ffmpeg-protocols.html
/// </summary>
public class OutputUrlArgument : IOutputArgument
{
public readonly string Url;
public OutputUrlArgument(string url)
{
Url = url;
}
public void Post() { }
public Task During(CancellationToken cancellationToken = default) => Task.CompletedTask;
public void Pre() { }
public string Text => Url;
}
}

View file

@ -41,13 +41,16 @@ public async Task During(CancellationToken cancellationToken = default)
try try
{ {
await ProcessDataAsync(cancellationToken); await ProcessDataAsync(cancellationToken);
Debug.WriteLine($"Disconnecting NamedPipeServerStream on {GetType().Name}");
Pipe?.Disconnect();
} }
catch (TaskCanceledException) catch (TaskCanceledException)
{ {
Debug.WriteLine($"ProcessDataAsync on {GetType().Name} cancelled"); Debug.WriteLine($"ProcessDataAsync on {GetType().Name} cancelled");
} }
finally
{
Debug.WriteLine($"Disconnecting NamedPipeServerStream on {GetType().Name}");
Pipe?.Disconnect();
}
} }
protected abstract Task ProcessDataAsync(CancellationToken token); protected abstract Task ProcessDataAsync(CancellationToken token);

View file

@ -8,11 +8,28 @@ namespace FFMpegCore.Arguments
public class SeekArgument : IArgument public class SeekArgument : IArgument
{ {
public readonly TimeSpan? SeekTo; public readonly TimeSpan? SeekTo;
public SeekArgument(TimeSpan? seekTo) public SeekArgument(TimeSpan? seekTo)
{ {
SeekTo = seekTo; SeekTo = seekTo;
} }
public string Text => !SeekTo.HasValue ? string.Empty : $"-ss {SeekTo.Value}"; public string Text {
get {
if(SeekTo.HasValue)
{
int hours = SeekTo.Value.Hours;
if(SeekTo.Value.Days > 0)
{
hours += SeekTo.Value.Days * 24;
}
return $"-ss {hours.ToString("00")}:{SeekTo.Value.Minutes.ToString("00")}:{SeekTo.Value.Seconds.ToString("00")}.{SeekTo.Value.Milliseconds.ToString("000")}";
}
else
{
return string.Empty;
}
}
}
} }
} }

View file

@ -49,4 +49,12 @@ public FFMpegArgumentException(string? message = null, Exception? innerException
{ {
} }
} }
public class FFMpegStreamFormatException : FFMpegException
{
public FFMpegStreamFormatException(FFMpegExceptionType type, string message, Exception? innerException = null)
: base(type, message, innerException)
{
}
}
} }

View file

@ -163,8 +163,8 @@ public static bool Convert(
var source = FFProbe.Analyse(input); var source = FFProbe.Analyse(input);
FFMpegHelper.ConversionSizeExceptionCheck(source); FFMpegHelper.ConversionSizeExceptionCheck(source);
var scale = VideoSize.Original == size ? 1 : (double)source.PrimaryVideoStream.Height / (int)size; var scale = VideoSize.Original == size ? 1 : (double)source.PrimaryVideoStream!.Height / (int)size;
var outputSize = new Size((int)(source.PrimaryVideoStream.Width / scale), (int)(source.PrimaryVideoStream.Height / scale)); var outputSize = new Size((int)(source.PrimaryVideoStream!.Width / scale), (int)(source.PrimaryVideoStream.Height / scale));
if (outputSize.Width % 2 != 0) if (outputSize.Width % 2 != 0)
outputSize.Width += 1; outputSize.Width += 1;

View file

@ -49,7 +49,8 @@ private FFMpegArguments WithInput(IInputArgument inputArgument, Action<FFMpegArg
} }
public FFMpegArgumentProcessor OutputToFile(string file, bool overwrite = true, Action<FFMpegArgumentOptions>? addArguments = null) => ToProcessor(new OutputArgument(file, overwrite), addArguments); public FFMpegArgumentProcessor OutputToFile(string file, bool overwrite = true, Action<FFMpegArgumentOptions>? addArguments = null) => ToProcessor(new OutputArgument(file, overwrite), addArguments);
public FFMpegArgumentProcessor OutputToFile(Uri uri, bool overwrite = true, Action<FFMpegArgumentOptions>? addArguments = null) => ToProcessor(new OutputArgument(uri.AbsolutePath, overwrite), addArguments); public FFMpegArgumentProcessor OutputToUrl(string uri, Action<FFMpegArgumentOptions>? addArguments = null) => ToProcessor(new OutputUrlArgument(uri), addArguments);
public FFMpegArgumentProcessor OutputToUrl(Uri uri, Action<FFMpegArgumentOptions>? addArguments = null) => ToProcessor(new OutputUrlArgument(uri.ToString()), addArguments);
public FFMpegArgumentProcessor OutputToPipe(IPipeSink reader, Action<FFMpegArgumentOptions>? addArguments = null) => ToProcessor(new OutputPipeArgument(reader), addArguments); public FFMpegArgumentProcessor OutputToPipe(IPipeSink reader, Action<FFMpegArgumentOptions>? addArguments = null) => ToProcessor(new OutputPipeArgument(reader), addArguments);
private FFMpegArgumentProcessor ToProcessor(IOutputArgument argument, Action<FFMpegArgumentOptions>? addArguments) private FFMpegArgumentProcessor ToProcessor(IOutputArgument argument, Action<FFMpegArgumentOptions>? addArguments)

View file

@ -64,7 +64,7 @@ public async Task WriteAsync(System.IO.Stream outputStream, CancellationToken ca
private void CheckFrameAndThrow(IVideoFrame frame) private void CheckFrameAndThrow(IVideoFrame frame)
{ {
if (frame.Width != Width || frame.Height != Height || frame.Format != StreamFormat) if (frame.Width != Width || frame.Height != Height || frame.Format != StreamFormat)
throw new FFMpegException(FFMpegExceptionType.Operation, "Video frame is not the same format as created raw video stream\r\n" + throw new FFMpegStreamFormatException(FFMpegExceptionType.Operation, "Video frame is not the same format as created raw video stream\r\n" +
$"Frame format: {frame.Width}x{frame.Height} pix_fmt: {frame.Format}\r\n" + $"Frame format: {frame.Width}x{frame.Height} pix_fmt: {frame.Format}\r\n" +
$"Stream format: {Width}x{Height} pix_fmt: {StreamFormat}"); $"Stream format: {Width}x{Height} pix_fmt: {StreamFormat}");
} }

View file

@ -5,18 +5,17 @@
<RepositoryUrl>https://github.com/rosenbjerg/FFMpegCore</RepositoryUrl> <RepositoryUrl>https://github.com/rosenbjerg/FFMpegCore</RepositoryUrl>
<PackageProjectUrl>https://github.com/rosenbjerg/FFMpegCore</PackageProjectUrl> <PackageProjectUrl>https://github.com/rosenbjerg/FFMpegCore</PackageProjectUrl>
<Copyright></Copyright> <Copyright></Copyright>
<Description>A .NET Standard FFMpeg/FFProbe wrapper for easily integrating media analysis and conversion into your C# applications</Description> <Description>A .NET Standard FFMpeg/FFProbe wrapper for easily integrating media analysis and conversion into your .NET applications</Description>
<Version>3.0.0.0</Version> <Version>3.0.0.0</Version>
<AssemblyVersion>3.0.0.0</AssemblyVersion> <AssemblyVersion>3.0.0.0</AssemblyVersion>
<FileVersion>3.0.0.0</FileVersion> <FileVersion>3.0.0.0</FileVersion>
<PackageReleaseNotes>- Video filter args refactored to support multiple arguments <PackageReleaseNotes>- Fixes for RawVideoPipeSource hanging (thanks to max619)
- Cancel improved with timeout (thanks TFleury) - Added .OutputToUrl(..) method for outputting to url using supported protocol (thanks to TFleury)
- Basic support for webcam/mic input through InputDeviceArgument (thanks TFleury) - Improved timespan parsing (thanks to test-in-prod)</PackageReleaseNotes>
- Other fixes and improvements</PackageReleaseNotes>
<LangVersion>8</LangVersion> <LangVersion>8</LangVersion>
<PackageVersion>4.0.0</PackageVersion> <PackageVersion>4.1.0</PackageVersion>
<PackageLicenseExpression>MIT</PackageLicenseExpression> <PackageLicenseExpression>MIT</PackageLicenseExpression>
<Authors>Malte Rosenbjerg, Vlad Jerca</Authors> <Authors>Malte Rosenbjerg, Vlad Jerca, Max Bagryantsev</Authors>
<PackageTags>ffmpeg ffprobe convert video audio mediafile resize analyze muxing</PackageTags> <PackageTags>ffmpeg ffprobe convert video audio mediafile resize analyze muxing</PackageTags>
<RepositoryType>GitHub</RepositoryType> <RepositoryType>GitHub</RepositoryType>
<GeneratePackageOnBuild>true</GeneratePackageOnBuild> <GeneratePackageOnBuild>true</GeneratePackageOnBuild>
@ -32,7 +31,7 @@
<ItemGroup> <ItemGroup>
<PackageReference Include="Instances" Version="1.6.0" /> <PackageReference Include="Instances" Version="1.6.0" />
<PackageReference Include="System.Drawing.Common" Version="5.0.0" /> <PackageReference Include="System.Drawing.Common" Version="5.0.2" />
<PackageReference Include="System.Text.Json" Version="5.0.1" /> <PackageReference Include="System.Text.Json" Version="5.0.1" />
</ItemGroup> </ItemGroup>

View file

@ -7,8 +7,6 @@ namespace FFMpegCore
{ {
internal class MediaAnalysis : IMediaAnalysis internal class MediaAnalysis : IMediaAnalysis
{ {
private static readonly Regex DurationRegex = new Regex("^(\\d{1,2}:\\d{1,2}:\\d{1,2}(.\\d{1,7})?)", RegexOptions.Compiled);
internal MediaAnalysis(FFProbeAnalysis analysis) internal MediaAnalysis(FFProbeAnalysis analysis)
{ {
Format = ParseFormat(analysis.Format); Format = ParseFormat(analysis.Format);
@ -20,7 +18,7 @@ private MediaFormat ParseFormat(Format analysisFormat)
{ {
return new MediaFormat return new MediaFormat
{ {
Duration = TimeSpan.Parse(analysisFormat.Duration ?? "0"), Duration = MediaAnalysisUtils.ParseDuration(analysisFormat.Duration),
FormatName = analysisFormat.FormatName, FormatName = analysisFormat.FormatName,
FormatLongName = analysisFormat.FormatLongName, FormatLongName = analysisFormat.FormatLongName,
StreamCount = analysisFormat.NbStreams, StreamCount = analysisFormat.NbStreams,
@ -50,14 +48,14 @@ private VideoStream ParseVideoStream(FFProbeStream stream)
return new VideoStream return new VideoStream
{ {
Index = stream.Index, Index = stream.Index,
AvgFrameRate = DivideRatio(ParseRatioDouble(stream.AvgFrameRate, '/')), AvgFrameRate = MediaAnalysisUtils.DivideRatio(MediaAnalysisUtils.ParseRatioDouble(stream.AvgFrameRate, '/')),
BitRate = !string.IsNullOrEmpty(stream.BitRate) ? ParseIntInvariant(stream.BitRate) : default, BitRate = !string.IsNullOrEmpty(stream.BitRate) ? MediaAnalysisUtils.ParseIntInvariant(stream.BitRate) : default,
BitsPerRawSample = !string.IsNullOrEmpty(stream.BitsPerRawSample) ? ParseIntInvariant(stream.BitsPerRawSample) : default, BitsPerRawSample = !string.IsNullOrEmpty(stream.BitsPerRawSample) ? MediaAnalysisUtils.ParseIntInvariant(stream.BitsPerRawSample) : default,
CodecName = stream.CodecName, CodecName = stream.CodecName,
CodecLongName = stream.CodecLongName, CodecLongName = stream.CodecLongName,
DisplayAspectRatio = ParseRatioInt(stream.DisplayAspectRatio, ':'), DisplayAspectRatio = MediaAnalysisUtils.ParseRatioInt(stream.DisplayAspectRatio, ':'),
Duration = ParseDuration(stream), Duration = MediaAnalysisUtils.ParseDuration(stream),
FrameRate = DivideRatio(ParseRatioDouble(stream.FrameRate, '/')), FrameRate = MediaAnalysisUtils.DivideRatio(MediaAnalysisUtils.ParseRatioDouble(stream.FrameRate, '/')),
Height = stream.Height ?? 0, Height = stream.Height ?? 0,
Width = stream.Width ?? 0, Width = stream.Width ?? 0,
Profile = stream.Profile, Profile = stream.Profile,
@ -68,57 +66,89 @@ private VideoStream ParseVideoStream(FFProbeStream stream)
}; };
} }
private static TimeSpan ParseDuration(FFProbeStream ffProbeStream)
{
return !string.IsNullOrEmpty(ffProbeStream.Duration)
? TimeSpan.Parse(ffProbeStream.Duration)
: TimeSpan.Parse(TrimTimeSpan(ffProbeStream.GetDuration()) ?? "0");
}
private static string? TrimTimeSpan(string? durationTag)
{
var durationMatch = DurationRegex.Match(durationTag ?? "");
return durationMatch.Success ? durationMatch.Groups[1].Value : null;
}
private AudioStream ParseAudioStream(FFProbeStream stream) private AudioStream ParseAudioStream(FFProbeStream stream)
{ {
return new AudioStream return new AudioStream
{ {
Index = stream.Index, Index = stream.Index,
BitRate = !string.IsNullOrEmpty(stream.BitRate) ? ParseIntInvariant(stream.BitRate) : default, BitRate = !string.IsNullOrEmpty(stream.BitRate) ? MediaAnalysisUtils.ParseIntInvariant(stream.BitRate) : default,
CodecName = stream.CodecName, CodecName = stream.CodecName,
CodecLongName = stream.CodecLongName, CodecLongName = stream.CodecLongName,
Channels = stream.Channels ?? default, Channels = stream.Channels ?? default,
ChannelLayout = stream.ChannelLayout, ChannelLayout = stream.ChannelLayout,
Duration = ParseDuration(stream), Duration = MediaAnalysisUtils.ParseDuration(stream),
SampleRateHz = !string.IsNullOrEmpty(stream.SampleRate) ? ParseIntInvariant(stream.SampleRate) : default, SampleRateHz = !string.IsNullOrEmpty(stream.SampleRate) ? MediaAnalysisUtils.ParseIntInvariant(stream.SampleRate) : default,
Profile = stream.Profile, Profile = stream.Profile,
Language = stream.GetLanguage(), Language = stream.GetLanguage(),
Tags = stream.Tags, Tags = stream.Tags,
}; };
} }
private static double DivideRatio((double, double) ratio) => ratio.Item1 / ratio.Item2;
private static (int, int) ParseRatioInt(string input, char separator) }
public static class MediaAnalysisUtils
{
private static readonly Regex DurationRegex = new Regex(@"^(\d+):(\d{1,2}):(\d{1,2})\.(\d{1,3})", RegexOptions.Compiled);
public static double DivideRatio((double, double) ratio) => ratio.Item1 / ratio.Item2;
public static (int, int) ParseRatioInt(string input, char separator)
{ {
if (string.IsNullOrEmpty(input)) return (0, 0); if (string.IsNullOrEmpty(input)) return (0, 0);
var ratio = input.Split(separator); var ratio = input.Split(separator);
return (ParseIntInvariant(ratio[0]), ParseIntInvariant(ratio[1])); return (ParseIntInvariant(ratio[0]), ParseIntInvariant(ratio[1]));
} }
private static (double, double) ParseRatioDouble(string input, char separator) public static (double, double) ParseRatioDouble(string input, char separator)
{ {
if (string.IsNullOrEmpty(input)) return (0, 0); if (string.IsNullOrEmpty(input)) return (0, 0);
var ratio = input.Split(separator); var ratio = input.Split(separator);
return (ratio.Length > 0 ? ParseDoubleInvariant(ratio[0]) : 0, ratio.Length > 1 ? ParseDoubleInvariant(ratio[1]) : 0); return (ratio.Length > 0 ? ParseDoubleInvariant(ratio[0]) : 0, ratio.Length > 1 ? ParseDoubleInvariant(ratio[1]) : 0);
} }
private static double ParseDoubleInvariant(string line) => public static double ParseDoubleInvariant(string line) =>
double.Parse(line, System.Globalization.NumberStyles.Any, System.Globalization.CultureInfo.InvariantCulture); double.Parse(line, System.Globalization.NumberStyles.Any, System.Globalization.CultureInfo.InvariantCulture);
private static int ParseIntInvariant(string line) => public static int ParseIntInvariant(string line) =>
int.Parse(line, System.Globalization.NumberStyles.Any, System.Globalization.CultureInfo.InvariantCulture); int.Parse(line, System.Globalization.NumberStyles.Any, System.Globalization.CultureInfo.InvariantCulture);
public static TimeSpan ParseDuration(string duration)
{
if (!string.IsNullOrEmpty(duration))
{
var match = DurationRegex.Match(duration);
if (match.Success)
{
// ffmpeg may provide < 3-digit number of milliseconds (omitting trailing zeros), which won't simply parse correctly
// e.g. 00:12:02.11 -> 12 minutes 2 seconds and 110 milliseconds
var millisecondsPart = match.Groups[4].Value;
if (millisecondsPart.Length < 3)
{
millisecondsPart = millisecondsPart.PadRight(3, '0');
}
var hours = int.Parse(match.Groups[1].Value);
var minutes = int.Parse(match.Groups[2].Value);
var seconds = int.Parse(match.Groups[3].Value);
var milliseconds = int.Parse(millisecondsPart);
return new TimeSpan(0, hours, minutes, seconds, milliseconds);
}
else
{
return TimeSpan.Zero;
}
}
else
{
return TimeSpan.Zero;
}
}
public static TimeSpan ParseDuration(FFProbeStream ffProbeStream)
{
return ParseDuration(ffProbeStream.Duration);
}
} }
} }

View file

@ -22,11 +22,11 @@ A .NET Standard FFMpeg/FFProbe wrapper for easily integrating media analysis and
FFProbe is used to gather media information: FFProbe is used to gather media information:
```csharp ```csharp
var mediaInfo = FFProbe.Analyse(inputFile); var mediaInfo = FFProbe.Analyse(inputPath);
``` ```
or or
```csharp ```csharp
var mediaInfo = await FFProbe.AnalyseAsync(inputFile); var mediaInfo = await FFProbe.AnalyseAsync(inputPath);
``` ```
@ -43,20 +43,19 @@ FFMpegArguments
.WithConstantRateFactor(21) .WithConstantRateFactor(21)
.WithAudioCodec(AudioCodec.Aac) .WithAudioCodec(AudioCodec.Aac)
.WithVariableBitrate(4) .WithVariableBitrate(4)
.WithFastStart() .WithVideoFilters(filterOptions => filterOptions
.Scale(VideoSize.Hd)) .Scale(VideoSize.Hd))
.WithFastStart())
.ProcessSynchronously(); .ProcessSynchronously();
``` ```
Easily capture screens from your videos: Easily capture screens from your videos:
```csharp ```csharp
var mediaFileAnalysis = FFProbe.Analyse(inputPath);
// process the snapshot in-memory and use the Bitmap directly // process the snapshot in-memory and use the Bitmap directly
var bitmap = FFMpeg.Snapshot(mediaFileAnalysis, new Size(200, 400), TimeSpan.FromMinutes(1)); var bitmap = FFMpeg.Snapshot(inputPath, new Size(200, 400), TimeSpan.FromMinutes(1));
// or persists the image on the drive // or persists the image on the drive
FFMpeg.Snapshot(mediaFileAnalysis, outputPath, new Size(200, 400), TimeSpan.FromMinutes(1)) FFMpeg.Snapshot(inputPath, outputPath, new Size(200, 400), TimeSpan.FromMinutes(1));
``` ```
Convert to and/or from streams Convert to and/or from streams
@ -89,25 +88,25 @@ FFMpeg.JoinImageSequence(@"..\joined_video.mp4", frameRate: 1,
Mute videos: Mute videos:
```csharp ```csharp
FFMpeg.Mute(inputFilePath, outputFilePath); FFMpeg.Mute(inputPath, outputPath);
``` ```
Save audio track from video: Save audio track from video:
```csharp ```csharp
FFMpeg.ExtractAudio(inputVideoFilePath, outputAudioFilePath); FFMpeg.ExtractAudio(inputPath, outputPath);
``` ```
Add or replace audio track on video: Add or replace audio track on video:
```csharp ```csharp
FFMpeg.ReplaceAudio(inputVideoFilePath, inputAudioFilePath, outputVideoFilePath); FFMpeg.ReplaceAudio(inputPath, inputAudioPath, outputPath);
``` ```
Add poster image to audio file (good for youtube videos): Add poster image to audio file (good for youtube videos):
```csharp ```csharp
FFMpeg.PosterWithAudio(inputImageFilePath, inputAudioFilePath, outputVideoFilePath); FFMpeg.PosterWithAudio(inputPath, inputAudioPath, outputPath);
// or // or
var image = Image.FromFile(inputImageFile); var image = Image.FromFile(inputImagePath);
image.AddAudio(inputAudioFilePath, outputVideoFilePath); image.AddAudio(inputAudioPath, outputPath);
``` ```
Other available arguments could be found in `FFMpegCore.Arguments` namespace. Other available arguments could be found in `FFMpegCore.Arguments` namespace.
@ -135,10 +134,11 @@ var videoFramesSource = new RawVideoPipeSource(CreateFrames(64)) //pass IEnumera
{ {
FrameRate = 30 //set source frame rate FrameRate = 30 //set source frame rate
}; };
FFMpegArguments await FFMpegArguments
.FromPipeInput(videoFramesSource, <input_stream_options>) .FromPipeInput(videoFramesSource)
.OutputToFile("temporary.mp4", false, <output_options>) .OutputToFile(outputPath, false, options => options
.ProcessSynchronously(); .WithVideoCodec(VideoCodec.LibVpx))
.ProcessAsynchronously();
``` ```
if you want to use `System.Drawing.Bitmap` as `IVideoFrame`, there is a `BitmapVideoFrameWrapper` wrapper class. if you want to use `System.Drawing.Bitmap` as `IVideoFrame`, there is a `BitmapVideoFrameWrapper` wrapper class.
@ -179,13 +179,19 @@ If these folders are not defined, it will try to find the binaries in `/root/(ff
#### Option 1 #### Option 1
The default value (`\\FFMPEG\\bin`) can be overwritten via the `FFMpegOptions` class: The default value of an empty string (expecting ffmpeg to be found through PATH) can be overwritten via the `FFOptions` class:
```c# ```c#
public Startup() // setting global options
{ GlobalFFOptions.Configure(new FFOptions { BinaryFolder = "./bin", TemporaryFilesFolder = "/tmp" });
FFMpegOptions.Configure(new FFMpegOptions { RootDirectory = "./bin", TempDirectory = "/tmp" }); // or
} GlobalFFOptions.Configure(options => options.BinaryFolder = "./bin");
// or individual, per-run options
await FFMpegArguments
.FromFileInput(inputPath)
.OutputToFile(outputPath)
.ProcessAsynchronously(true, new FFOptions { BinaryFolder = "./bin", TemporaryFilesFolder = "/tmp" });
``` ```
#### Option 2 #### Option 2
@ -194,8 +200,8 @@ The root and temp directory for the ffmpeg binaries can be configured via the `f
```json ```json
{ {
"RootDirectory": "./bin", "BinaryFolder": "./bin",
"TempDirectory": "/tmp" "TemporaryFilesFolder": "/tmp"
} }
``` ```
@ -217,6 +223,6 @@ The root and temp directory for the ffmpeg binaries can be configured via the `f
### License ### License
Copyright © 2020 Copyright © 2021
Released under [MIT license](https://github.com/rosenbjerg/FFMpegCore/blob/master/LICENSE) Released under [MIT license](https://github.com/rosenbjerg/FFMpegCore/blob/master/LICENSE)