diff --git a/.gitattributes b/.gitattributes
new file mode 100644
index 00000000..bdb0cabc
--- /dev/null
+++ b/.gitattributes
@@ -0,0 +1,17 @@
+# Auto detect text files and perform LF normalization
+* text=auto
+
+# Custom for Visual Studio
+*.cs diff=csharp
+
+# Standard to msysgit
+*.doc diff=astextplain
+*.DOC diff=astextplain
+*.docx diff=astextplain
+*.DOCX diff=astextplain
+*.dot diff=astextplain
+*.DOT diff=astextplain
+*.pdf diff=astextplain
+*.PDF diff=astextplain
+*.rtf diff=astextplain
+*.RTF diff=astextplain
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
new file mode 100644
index 00000000..6946920b
--- /dev/null
+++ b/.github/workflows/ci.yml
@@ -0,0 +1,29 @@
+name: CI
+
+on:
+ push:
+ branches:
+ - master
+ pull_request:
+ branches:
+ - master
+ - release
+
+jobs:
+ ci:
+ runs-on: ${{ matrix.os }}
+ strategy:
+ matrix:
+ os: [windows-latest, ubuntu-latest]
+ timeout-minutes: 6
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v2
+ - name: Prepare .NET
+ uses: actions/setup-dotnet@v1
+ with:
+ dotnet-version: '5.0.x'
+ - name: Prepare FFMpeg
+ uses: FedericoCarboni/setup-ffmpeg@v1
+ - name: Test with dotnet
+ run: dotnet test --logger GitHubActions
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
new file mode 100644
index 00000000..5ef0a4cc
--- /dev/null
+++ b/.github/workflows/release.yml
@@ -0,0 +1,20 @@
+name: NuGet release
+on:
+ push:
+ branches:
+ - release
+jobs:
+ release:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v2
+ - name: Prepare .NET
+ uses: actions/setup-dotnet@v1
+ with:
+ dotnet-version: '5.0.x'
+ - name: Build solution
+ run: dotnet build --output build -c Release
+ - name: Publish NuGet package
+ run: dotnet nuget push "build/*.nupkg" --source nuget.org --api-key ${{ secrets.NUGET_TOKEN }}
+
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 00000000..1298f0d1
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,343 @@
+## Ignore Visual Studio temporary files, build results, and
+## files generated by popular Visual Studio add-ons.
+##
+## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
+
+# User-specific files
+*.rsuser
+*.suo
+*.user
+*.userosscache
+*.sln.docstates
+
+# User-specific files (MonoDevelop/Xamarin Studio)
+*.userprefs
+
+# Build results
+[Dd]ebug/
+[Dd]ebugPublic/
+[Rr]elease/
+[Rr]eleases/
+x64/
+x86/
+[Aa][Rr][Mm]/
+[Aa][Rr][Mm]64/
+bld/
+[Bb]in/
+[Oo]bj/
+[Ll]og/
+
+# Visual Studio 2015/2017 cache/options directory
+.vs/
+# Uncomment if you have tasks that create the project's static files in wwwroot
+#wwwroot/
+
+# Visual Studio 2017 auto generated files
+Generated\ Files/
+
+# MSTest test Results
+[Tt]est[Rr]esult*/
+[Bb]uild[Ll]og.*
+
+# NUNIT
+*.VisualState.xml
+TestResult.xml
+
+# Build Results of an ATL Project
+[Dd]ebugPS/
+[Rr]eleasePS/
+dlldata.c
+
+# Benchmark Results
+BenchmarkDotNet.Artifacts/
+
+# .NET Core
+project.lock.json
+project.fragment.lock.json
+artifacts/
+
+# StyleCop
+StyleCopReport.xml
+
+# Files built by Visual Studio
+*_i.c
+*_p.c
+*_h.h
+*.ilk
+*.meta
+*.obj
+*.iobj
+*.pch
+*.pdb
+*.ipdb
+*.pgc
+*.pgd
+*.rsp
+*.sbr
+*.tlb
+*.tli
+*.tlh
+*.tmp
+*.tmp_proj
+*_wpftmp.csproj
+*.log
+*.vspscc
+*.vssscc
+.builds
+*.pidb
+*.svclog
+*.scc
+
+# Chutzpah Test files
+_Chutzpah*
+
+# Visual C++ cache files
+ipch/
+*.aps
+*.ncb
+*.opendb
+*.opensdf
+*.sdf
+*.cachefile
+*.VC.db
+*.VC.VC.opendb
+
+# Visual Studio profiler
+*.psess
+*.vsp
+*.vspx
+*.sap
+
+# Visual Studio Trace Files
+*.e2e
+
+# TFS 2012 Local Workspace
+$tf/
+
+# Guidance Automation Toolkit
+*.gpState
+
+# ReSharper is a .NET coding add-in
+_ReSharper*/
+*.[Rr]e[Ss]harper
+*.DotSettings.user
+
+# JustCode is a .NET coding add-in
+.JustCode
+
+# TeamCity is a build add-in
+_TeamCity*
+
+# DotCover is a Code Coverage Tool
+*.dotCover
+
+# AxoCover is a Code Coverage Tool
+.axoCover/*
+!.axoCover/settings.json
+
+# Visual Studio code coverage results
+*.coverage
+*.coveragexml
+
+# NCrunch
+_NCrunch_*
+.*crunch*.local.xml
+nCrunchTemp_*
+
+# MightyMoose
+*.mm.*
+AutoTest.Net/
+
+# Web workbench (sass)
+.sass-cache/
+
+# Installshield output folder
+[Ee]xpress/
+
+# DocProject is a documentation generator add-in
+DocProject/buildhelp/
+DocProject/Help/*.HxT
+DocProject/Help/*.HxC
+DocProject/Help/*.hhc
+DocProject/Help/*.hhk
+DocProject/Help/*.hhp
+DocProject/Help/Html2
+DocProject/Help/html
+
+# Click-Once directory
+publish/
+
+# Publish Web Output
+*.[Pp]ublish.xml
+*.azurePubxml
+# Note: Comment the next line if you want to checkin your web deploy settings,
+# but database connection strings (with potential passwords) will be unencrypted
+*.pubxml
+*.publishproj
+
+# Microsoft Azure Web App publish settings. Comment the next line if you want to
+# checkin your Azure Web App publish settings, but sensitive information contained
+# in these scripts will be unencrypted
+PublishScripts/
+
+# NuGet Packages
+*.nupkg
+# The packages folder can be ignored because of Package Restore
+**/[Pp]ackages/*
+# except build/, which is used as an MSBuild target.
+!**/[Pp]ackages/build/
+# Uncomment if necessary however generally it will be regenerated when needed
+#!**/[Pp]ackages/repositories.config
+# NuGet v3's project.json files produces more ignorable files
+*.nuget.props
+*.nuget.targets
+
+# Microsoft Azure Build Output
+csx/
+*.build.csdef
+
+# Microsoft Azure Emulator
+ecf/
+rcf/
+
+# Windows Store app package directories and files
+AppPackages/
+BundleArtifacts/
+Package.StoreAssociation.xml
+_pkginfo.txt
+*.appx
+
+# Visual Studio cache files
+# files ending in .cache can be ignored
+*.[Cc]ache
+# but keep track of directories ending in .cache
+!?*.[Cc]ache/
+
+# Others
+ClientBin/
+~$*
+*~
+*.dbmdl
+*.dbproj.schemaview
+*.jfm
+*.pfx
+*.publishsettings
+orleans.codegen.cs
+
+# Including strong name files can present a security risk
+# (https://github.com/github/gitignore/pull/2483#issue-259490424)
+#*.snk
+
+# Since there are multiple workflows, uncomment next line to ignore bower_components
+# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
+#bower_components/
+# ASP.NET Core default setup: bower directory is configured as wwwroot/lib/ and bower restore is true
+**/wwwroot/lib/
+
+# RIA/Silverlight projects
+Generated_Code/
+
+# Backup & report files from converting an old project file
+# to a newer Visual Studio version. Backup files are not needed,
+# because we have git ;-)
+_UpgradeReport_Files/
+Backup*/
+UpgradeLog*.XML
+UpgradeLog*.htm
+ServiceFabricBackup/
+*.rptproj.bak
+
+# SQL Server files
+*.mdf
+*.ldf
+*.ndf
+
+# Business Intelligence projects
+*.rdl.data
+*.bim.layout
+*.bim_*.settings
+*.rptproj.rsuser
+
+# Microsoft Fakes
+FakesAssemblies/
+
+# GhostDoc plugin setting file
+*.GhostDoc.xml
+
+# Node.js Tools for Visual Studio
+.ntvs_analysis.dat
+node_modules/
+
+# Visual Studio 6 build log
+*.plg
+
+# Visual Studio 6 workspace options file
+*.opt
+
+# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
+*.vbw
+
+# Visual Studio LightSwitch build output
+**/*.HTMLClient/GeneratedArtifacts
+**/*.DesktopClient/GeneratedArtifacts
+**/*.DesktopClient/ModelManifest.xml
+**/*.Server/GeneratedArtifacts
+**/*.Server/ModelManifest.xml
+_Pvt_Extensions
+
+# Paket dependency manager
+.paket/paket.exe
+paket-files/
+
+# FAKE - F# Make
+.fake/
+
+# JetBrains Rider
+.idea/
+*.sln.iml
+
+# CodeRush personal settings
+.cr/personal
+
+# Python Tools for Visual Studio (PTVS)
+__pycache__/
+*.pyc
+
+# Cake - Uncomment if you are using it
+# tools/**
+# !tools/packages.config
+
+# Tabs Studio
+*.tss
+
+# Telerik's JustMock configuration file
+*.jmconfig
+
+# BizTalk build output
+*.btp.cs
+*.btm.cs
+*.odx.cs
+*.xsd.cs
+
+# OpenCover UI analysis results
+OpenCover/
+
+# Azure Stream Analytics local run output
+ASALocalRun/
+
+# MSBuild Binary and Structured Log
+*.binlog
+
+# NVidia Nsight GPU debugger configuration file
+*.nvuser
+
+# MFractors (Xamarin productivity tool) working folder
+.mfractor/
+
+# Local History for Visual Studio
+.localhistory/
+
+# BeatPulse healthcheck temp database
+healthchecksdb
+
+!FFMpegCore/FFMPEG/bin/**/*
\ No newline at end of file
diff --git a/FFMpegCore.Examples/FFMpegCore.Examples.csproj b/FFMpegCore.Examples/FFMpegCore.Examples.csproj
new file mode 100644
index 00000000..f9daae75
--- /dev/null
+++ b/FFMpegCore.Examples/FFMpegCore.Examples.csproj
@@ -0,0 +1,12 @@
+
+
+
+ Exe
+ net5.0
+
+
+
+
+
+
+
diff --git a/FFMpegCore.Examples/Program.cs b/FFMpegCore.Examples/Program.cs
new file mode 100644
index 00000000..256ef3c8
--- /dev/null
+++ b/FFMpegCore.Examples/Program.cs
@@ -0,0 +1,124 @@
+using System;
+using System.Collections.Generic;
+using System.Drawing;
+using System.IO;
+using FFMpegCore;
+using FFMpegCore.Enums;
+using FFMpegCore.Pipes;
+using FFMpegCore.Extend;
+
+var inputPath = "/path/to/input";
+var outputPath = "/path/to/output";
+
+{
+ var mediaInfo = FFProbe.Analyse(inputPath);
+}
+
+{
+ var mediaInfo = await FFProbe.AnalyseAsync(inputPath);
+}
+
+{
+ FFMpegArguments
+ .FromFileInput(inputPath)
+ .OutputToFile(outputPath, false, options => options
+ .WithVideoCodec(VideoCodec.LibX264)
+ .WithConstantRateFactor(21)
+ .WithAudioCodec(AudioCodec.Aac)
+ .WithVariableBitrate(4)
+ .WithVideoFilters(filterOptions => filterOptions
+ .Scale(VideoSize.Hd))
+ .WithFastStart())
+ .ProcessSynchronously();
+}
+
+{
+ // process the snapshot in-memory and use the Bitmap directly
+ var bitmap = FFMpeg.Snapshot(inputPath, new Size(200, 400), TimeSpan.FromMinutes(1));
+
+ // or persists the image on the drive
+ FFMpeg.Snapshot(inputPath, outputPath, new Size(200, 400), TimeSpan.FromMinutes(1));
+}
+
+var inputStream = new MemoryStream();
+var outputStream = new MemoryStream();
+
+{
+ await FFMpegArguments
+ .FromPipeInput(new StreamPipeSource(inputStream))
+ .OutputToPipe(new StreamPipeSink(outputStream), options => options
+ .WithVideoCodec("vp9")
+ .ForceFormat("webm"))
+ .ProcessAsynchronously();
+}
+
+{
+ FFMpeg.Join(@"..\joined_video.mp4",
+ @"..\part1.mp4",
+ @"..\part2.mp4",
+ @"..\part3.mp4"
+ );
+}
+
+{
+ FFMpeg.JoinImageSequence(@"..\joined_video.mp4", frameRate: 1,
+ ImageInfo.FromPath(@"..\1.png"),
+ ImageInfo.FromPath(@"..\2.png"),
+ ImageInfo.FromPath(@"..\3.png")
+ );
+}
+
+{
+ FFMpeg.Mute(inputPath, outputPath);
+}
+
+{
+ FFMpeg.ExtractAudio(inputPath, outputPath);
+}
+
+var inputAudioPath = "/path/to/input/audio";
+{
+ FFMpeg.ReplaceAudio(inputPath, inputAudioPath, outputPath);
+}
+
+var inputImagePath = "/path/to/input/image";
+{
+ FFMpeg.PosterWithAudio(inputPath, inputAudioPath, outputPath);
+ // or
+ var image = Image.FromFile(inputImagePath);
+ image.AddAudio(inputAudioPath, outputPath);
+}
+
+IVideoFrame GetNextFrame() => throw new NotImplementedException();
+{
+ IEnumerable CreateFrames(int count)
+ {
+ for(int i = 0; i < count; i++)
+ {
+ yield return GetNextFrame(); //method of generating new frames
+ }
+ }
+
+ var videoFramesSource = new RawVideoPipeSource(CreateFrames(64)) //pass IEnumerable or IEnumerator to constructor of RawVideoPipeSource
+ {
+ FrameRate = 30 //set source frame rate
+ };
+ await FFMpegArguments
+ .FromPipeInput(videoFramesSource)
+ .OutputToFile(outputPath, false, options => options
+ .WithVideoCodec(VideoCodec.LibVpx))
+ .ProcessAsynchronously();
+}
+
+{
+ // setting global options
+ GlobalFFOptions.Configure(new FFOptions { BinaryFolder = "./bin", TemporaryFilesFolder = "/tmp" });
+ // or
+ GlobalFFOptions.Configure(options => options.BinaryFolder = "./bin");
+
+ // or individual, per-run options
+ await FFMpegArguments
+ .FromFileInput(inputPath)
+ .OutputToFile(outputPath)
+ .ProcessAsynchronously(true, new FFOptions { BinaryFolder = "./bin", TemporaryFilesFolder = "/tmp" });
+}
\ No newline at end of file
diff --git a/FFMpegCore.Test/ArgumentBuilderTest.cs b/FFMpegCore.Test/ArgumentBuilderTest.cs
new file mode 100644
index 00000000..daa3eda5
--- /dev/null
+++ b/FFMpegCore.Test/ArgumentBuilderTest.cs
@@ -0,0 +1,390 @@
+using Microsoft.VisualStudio.TestTools.UnitTesting;
+using System;
+using FFMpegCore.Arguments;
+using FFMpegCore.Enums;
+
+namespace FFMpegCore.Test
+{
+ [TestClass]
+ public class ArgumentBuilderTest
+ {
+ private readonly string[] _concatFiles = { "1.mp4", "2.mp4", "3.mp4", "4.mp4" };
+
+
+ [TestMethod]
+ public void Builder_BuildString_IO_1()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4").Arguments;
+ Assert.AreEqual("-i \"input.mp4\" \"output.mp4\" -y", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_Scale()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4")
+ .OutputToFile("output.mp4", true, opt => opt
+ .WithVideoFilters(filterOptions => filterOptions
+ .Scale(VideoSize.Hd)))
+ .Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -vf \"scale=-1:720\" \"output.mp4\" -y", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_AudioCodec()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4")
+ .OutputToFile("output.mp4", true, opt => opt.WithAudioCodec(AudioCodec.Aac)).Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -c:a aac \"output.mp4\" -y", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_AudioBitrate()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4")
+ .OutputToFile("output.mp4", true, opt => opt.WithAudioBitrate(AudioQuality.Normal)).Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -b:a 128k \"output.mp4\" -y", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_Quiet()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4").WithGlobalOptions(opt => opt.WithVerbosityLevel())
+ .OutputToFile("output.mp4", false).Arguments;
+ Assert.AreEqual("-hide_banner -loglevel error -i \"input.mp4\" \"output.mp4\"", str);
+ }
+
+
+ [TestMethod]
+ public void Builder_BuildString_AudioCodec_Fluent()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false,
+ opt => opt.WithAudioCodec(AudioCodec.Aac).WithAudioBitrate(128)).Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -c:a aac -b:a 128k \"output.mp4\"", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_BitStream()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false,
+ opt => opt.WithBitStreamFilter(Channel.Audio, Filter.H264_Mp4ToAnnexB)).Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -bsf:a h264_mp4toannexb \"output.mp4\"", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_HardwareAcceleration_Auto()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4")
+ .OutputToFile("output.mp4", false, opt => opt.WithHardwareAcceleration()).Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -hwaccel \"output.mp4\"", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_HardwareAcceleration_Specific()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false,
+ opt => opt.WithHardwareAcceleration(HardwareAccelerationDevice.CUVID)).Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -hwaccel cuvid \"output.mp4\"", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_Concat()
+ {
+ var str = FFMpegArguments.FromConcatInput(_concatFiles).OutputToFile("output.mp4", false).Arguments;
+ Assert.AreEqual("-i \"concat:1.mp4|2.mp4|3.mp4|4.mp4\" \"output.mp4\"", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_Copy_Audio()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4")
+ .OutputToFile("output.mp4", false, opt => opt.CopyChannel(Channel.Audio)).Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -c:a copy \"output.mp4\"", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_Copy_Video()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4")
+ .OutputToFile("output.mp4", false, opt => opt.CopyChannel(Channel.Video)).Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -c:v copy \"output.mp4\"", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_Copy_Both()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4")
+ .OutputToFile("output.mp4", false, opt => opt.CopyChannel()).Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -c copy \"output.mp4\"", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_DisableChannel_Audio()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4")
+ .OutputToFile("output.mp4", false, opt => opt.DisableChannel(Channel.Audio)).Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -an \"output.mp4\"", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_DisableChannel_Video()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4")
+ .OutputToFile("output.mp4", false, opt => opt.DisableChannel(Channel.Video)).Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -vn \"output.mp4\"", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_AudioSamplingRate_Default()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4")
+ .OutputToFile("output.mp4", false, opt => opt.WithAudioSamplingRate()).Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -ar 48000 \"output.mp4\"", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_AudioSamplingRate()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4")
+ .OutputToFile("output.mp4", false, opt => opt.WithAudioSamplingRate(44000)).Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -ar 44000 \"output.mp4\"", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_VariableBitrate()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4")
+ .OutputToFile("output.mp4", false, opt => opt.WithVariableBitrate(5)).Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -vbr 5 \"output.mp4\"", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_Faststart()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4")
+ .OutputToFile("output.mp4", false, opt => opt.WithFastStart()).Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -movflags faststart \"output.mp4\"", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_Overwrite()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4")
+ .OutputToFile("output.mp4", false, opt => opt.OverwriteExisting()).Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -y \"output.mp4\"", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_RemoveMetadata()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4")
+ .OutputToFile("output.mp4", false, opt => opt.WithoutMetadata()).Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -map_metadata -1 \"output.mp4\"", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_Transpose()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4")
+ .OutputToFile("output.mp4", false, opt => opt
+ .WithVideoFilters(filterOptions => filterOptions
+ .Transpose(Transposition.CounterClockwise90)))
+ .Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -vf \"transpose=2\" \"output.mp4\"", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_Mirroring()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4")
+ .OutputToFile("output.mp4", false, opt => opt
+ .WithVideoFilters(filterOptions => filterOptions
+ .Mirror(Mirroring.Horizontal)))
+ .Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -vf \"hflip\" \"output.mp4\"", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_TransposeScale()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4")
+ .OutputToFile("output.mp4", false, opt => opt
+ .WithVideoFilters(filterOptions => filterOptions
+ .Transpose(Transposition.CounterClockwise90)
+ .Scale(200, 300)))
+ .Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -vf \"transpose=2, scale=200:300\" \"output.mp4\"", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_ForceFormat()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4", false, opt => opt.ForceFormat(VideoType.Mp4))
+ .OutputToFile("output.mp4", false, opt => opt.ForceFormat(VideoType.Mp4)).Arguments;
+ Assert.AreEqual("-f mp4 -i \"input.mp4\" -f mp4 \"output.mp4\"", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_FrameOutputCount()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4")
+ .OutputToFile("output.mp4", false, opt => opt.WithFrameOutputCount(50)).Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -vframes 50 \"output.mp4\"", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_FrameRate()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4")
+ .OutputToFile("output.mp4", false, opt => opt.WithFramerate(50)).Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -r 50 \"output.mp4\"", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_Loop()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false, opt => opt.Loop(50))
+ .Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -loop 50 \"output.mp4\"", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_Seek()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4", false, opt => opt.Seek(TimeSpan.FromSeconds(10))).OutputToFile("output.mp4", false, opt => opt.Seek(TimeSpan.FromSeconds(10))).Arguments;
+ Assert.AreEqual("-ss 00:00:10.000 -i \"input.mp4\" -ss 00:00:10.000 \"output.mp4\"", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_Shortest()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4")
+ .OutputToFile("output.mp4", false, opt => opt.UsingShortest()).Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -shortest \"output.mp4\"", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_Size()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4")
+ .OutputToFile("output.mp4", false, opt => opt.Resize(1920, 1080)).Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -s 1920x1080 \"output.mp4\"", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_Speed()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4")
+ .OutputToFile("output.mp4", false, opt => opt.WithSpeedPreset(Speed.Fast)).Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -preset fast \"output.mp4\"", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_DrawtextFilter()
+ {
+ var str = FFMpegArguments
+ .FromFileInput("input.mp4")
+ .OutputToFile("output.mp4", false, opt => opt
+ .WithVideoFilters(filterOptions => filterOptions
+ .DrawText(DrawTextOptions
+ .Create("Stack Overflow", "/path/to/font.ttf")
+ .WithParameter("fontcolor", "white")
+ .WithParameter("fontsize", "24")
+ .WithParameter("box", "1")
+ .WithParameter("boxcolor", "black@0.5")
+ .WithParameter("boxborderw", "5")
+ .WithParameter("x", "(w-text_w)/2")
+ .WithParameter("y", "(h-text_h)/2"))))
+ .Arguments;
+
+ Assert.AreEqual(
+ "-i \"input.mp4\" -vf \"drawtext=text='Stack Overflow':fontfile=/path/to/font.ttf:fontcolor=white:fontsize=24:box=1:boxcolor=black@0.5:boxborderw=5:x=(w-text_w)/2:y=(h-text_h)/2\" \"output.mp4\"",
+ str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_DrawtextFilter_Alt()
+ {
+ var str = FFMpegArguments
+ .FromFileInput("input.mp4")
+ .OutputToFile("output.mp4", false, opt => opt
+ .WithVideoFilters(filterOptions => filterOptions
+ .DrawText(DrawTextOptions
+ .Create("Stack Overflow", "/path/to/font.ttf", ("fontcolor", "white"), ("fontsize", "24")))))
+ .Arguments;
+
+ Assert.AreEqual(
+ "-i \"input.mp4\" -vf \"drawtext=text='Stack Overflow':fontfile=/path/to/font.ttf:fontcolor=white:fontsize=24\" \"output.mp4\"",
+ str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_StartNumber()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4")
+ .OutputToFile("output.mp4", false, opt => opt.WithStartNumber(50)).Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -start_number 50 \"output.mp4\"", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_Threads_1()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4")
+ .OutputToFile("output.mp4", false, opt => opt.UsingThreads(50)).Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -threads 50 \"output.mp4\"", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_Threads_2()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4")
+ .OutputToFile("output.mp4", false, opt => opt.UsingMultithreading(true)).Arguments;
+ Assert.AreEqual($"-i \"input.mp4\" -threads {Environment.ProcessorCount} \"output.mp4\"", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_Codec()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4")
+ .OutputToFile("output.mp4", false, opt => opt.WithVideoCodec(VideoCodec.LibX264)).Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -c:v libx264 \"output.mp4\"", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_Codec_Override()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", true,
+ opt => opt.WithVideoCodec(VideoCodec.LibX264).ForcePixelFormat("yuv420p")).Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -c:v libx264 -pix_fmt yuv420p \"output.mp4\" -y", str);
+ }
+
+
+ [TestMethod]
+ public void Builder_BuildString_Duration()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4")
+ .OutputToFile("output.mp4", false, opt => opt.WithDuration(TimeSpan.FromSeconds(20))).Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -t 00:00:20 \"output.mp4\"", str);
+ }
+
+ [TestMethod]
+ public void Builder_BuildString_Raw()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4", false, opt => opt.WithCustomArgument(null!))
+ .OutputToFile("output.mp4", false, opt => opt.WithCustomArgument(null!)).Arguments;
+ Assert.AreEqual(" -i \"input.mp4\" \"output.mp4\"", str);
+
+ str = FFMpegArguments.FromFileInput("input.mp4")
+ .OutputToFile("output.mp4", false, opt => opt.WithCustomArgument("-acodec copy")).Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -acodec copy \"output.mp4\"", str);
+ }
+
+
+ [TestMethod]
+ public void Builder_BuildString_ForcePixelFormat()
+ {
+ var str = FFMpegArguments.FromFileInput("input.mp4")
+ .OutputToFile("output.mp4", false, opt => opt.ForcePixelFormat("yuv444p")).Arguments;
+ Assert.AreEqual("-i \"input.mp4\" -pix_fmt yuv444p \"output.mp4\"", str);
+ }
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore.Test/AudioTest.cs b/FFMpegCore.Test/AudioTest.cs
new file mode 100644
index 00000000..2f7e4e9d
--- /dev/null
+++ b/FFMpegCore.Test/AudioTest.cs
@@ -0,0 +1,73 @@
+using System;
+using FFMpegCore.Test.Resources;
+using Microsoft.VisualStudio.TestTools.UnitTesting;
+using System.IO;
+using System.Linq;
+using System.Threading.Tasks;
+using FFMpegCore.Pipes;
+
+namespace FFMpegCore.Test
+{
+ [TestClass]
+ public class AudioTest
+ {
+ [TestMethod]
+ public void Audio_Remove()
+ {
+ using var outputFile = new TemporaryFile("out.mp4");
+
+ FFMpeg.Mute(TestResources.Mp4Video, outputFile);
+ var analysis = FFProbe.Analyse(outputFile);
+
+ Assert.IsTrue(analysis.VideoStreams.Any());
+ Assert.IsTrue(!analysis.AudioStreams.Any());
+ }
+
+ [TestMethod]
+ public void Audio_Save()
+ {
+ using var outputFile = new TemporaryFile("out.mp3");
+
+ FFMpeg.ExtractAudio(TestResources.Mp4Video, outputFile);
+ var analysis = FFProbe.Analyse(outputFile);
+
+ Assert.IsTrue(!analysis.VideoStreams.Any());
+ Assert.IsTrue(analysis.AudioStreams.Any());
+ }
+ [TestMethod]
+ public async Task Audio_FromRaw()
+ {
+ await using var file = File.Open(TestResources.RawAudio, FileMode.Open);
+ var memoryStream = new MemoryStream();
+ await FFMpegArguments
+ .FromPipeInput(new StreamPipeSource(file), options => options.ForceFormat("s16le"))
+ .OutputToPipe(new StreamPipeSink(memoryStream), options => options.ForceFormat("mp3"))
+ .ProcessAsynchronously();
+ }
+
+ [TestMethod]
+ public void Audio_Add()
+ {
+ using var outputFile = new TemporaryFile("out.mp4");
+
+ var success = FFMpeg.ReplaceAudio(TestResources.Mp4WithoutAudio, TestResources.Mp3Audio, outputFile);
+ var videoAnalysis = FFProbe.Analyse(TestResources.Mp4WithoutAudio);
+ var audioAnalysis = FFProbe.Analyse(TestResources.Mp3Audio);
+ var outputAnalysis = FFProbe.Analyse(outputFile);
+
+ Assert.IsTrue(success);
+ Assert.AreEqual(Math.Max(videoAnalysis.Duration.TotalSeconds, audioAnalysis.Duration.TotalSeconds), outputAnalysis.Duration.TotalSeconds, 0.15);
+ Assert.IsTrue(File.Exists(outputFile));
+ }
+
+ [TestMethod]
+ public void Image_AddAudio()
+ {
+ using var outputFile = new TemporaryFile("out.mp4");
+ FFMpeg.PosterWithAudio(TestResources.PngImage, TestResources.Mp3Audio, outputFile);
+ var analysis = FFProbe.Analyse(TestResources.Mp3Audio);
+ Assert.IsTrue(analysis.Duration.TotalSeconds > 0);
+ Assert.IsTrue(File.Exists(outputFile));
+ }
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore.Test/FFMpegCore.Test.csproj b/FFMpegCore.Test/FFMpegCore.Test.csproj
new file mode 100644
index 00000000..98c92745
--- /dev/null
+++ b/FFMpegCore.Test/FFMpegCore.Test.csproj
@@ -0,0 +1,92 @@
+
+
+
+ net5.0
+
+ false
+
+ disable
+
+ default
+
+
+
+
+
+ PreserveNewest
+
+
+ PreserveNewest
+
+
+ PreserveNewest
+
+
+ PreserveNewest
+
+
+ PreserveNewest
+
+
+ Always
+
+
+
+
+
+ PreserveNewest
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Always
+
+
+ Always
+
+
+ Always
+
+
+ Always
+
+
+ Always
+
+
+ Always
+
+
+ Always
+
+
+ Always
+
+
+ Always
+
+
+ Always
+
+
+ Always
+
+
+
+
+
+
+
+
diff --git a/FFMpegCore.Test/FFMpegOptionsTests.cs b/FFMpegCore.Test/FFMpegOptionsTests.cs
new file mode 100644
index 00000000..2be810f8
--- /dev/null
+++ b/FFMpegCore.Test/FFMpegOptionsTests.cs
@@ -0,0 +1,49 @@
+using Microsoft.VisualStudio.TestTools.UnitTesting;
+using Newtonsoft.Json;
+using System.IO;
+
+namespace FFMpegCore.Test
+{
+ [TestClass]
+ public class FFMpegOptionsTest
+ {
+ [TestMethod]
+ public void Options_Initialized()
+ {
+ Assert.IsNotNull(GlobalFFOptions.Current);
+ }
+
+ [TestMethod]
+ public void Options_Defaults_Configured()
+ {
+ Assert.AreEqual(new FFOptions().BinaryFolder, $"");
+ }
+
+ [TestMethod]
+ public void Options_Loaded_From_File()
+ {
+ Assert.AreEqual(
+ GlobalFFOptions.Current.BinaryFolder,
+ JsonConvert.DeserializeObject(File.ReadAllText("ffmpeg.config.json")).BinaryFolder
+ );
+ }
+
+ [TestMethod]
+ public void Options_Set_Programmatically()
+ {
+ var original = GlobalFFOptions.Current;
+ try
+ {
+ GlobalFFOptions.Configure(new FFOptions { BinaryFolder = "Whatever" });
+ Assert.AreEqual(
+ GlobalFFOptions.Current.BinaryFolder,
+ "Whatever"
+ );
+ }
+ finally
+ {
+ GlobalFFOptions.Configure(original);
+ }
+ }
+ }
+}
diff --git a/FFMpegCore.Test/FFProbeTests.cs b/FFMpegCore.Test/FFProbeTests.cs
new file mode 100644
index 00000000..5cabc4e2
--- /dev/null
+++ b/FFMpegCore.Test/FFProbeTests.cs
@@ -0,0 +1,104 @@
+using System;
+using System.IO;
+using System.Threading.Tasks;
+using FFMpegCore.Test.Resources;
+using Microsoft.VisualStudio.TestTools.UnitTesting;
+
+namespace FFMpegCore.Test
+{
+ [TestClass]
+ public class FFProbeTests
+ {
+ [TestMethod]
+ public void Probe_TooLongOutput()
+ {
+ Assert.ThrowsException(() => FFProbe.Analyse(TestResources.Mp4Video, 5));
+ }
+
+
+ [TestMethod]
+ public async Task Audio_FromStream_Duration()
+ {
+ var fileAnalysis = await FFProbe.AnalyseAsync(TestResources.WebmVideo);
+ await using var inputStream = File.OpenRead(TestResources.WebmVideo);
+ var streamAnalysis = await FFProbe.AnalyseAsync(inputStream);
+ Assert.IsTrue(fileAnalysis.Duration == streamAnalysis.Duration);
+ }
+
+ [DataTestMethod]
+ [DataRow("0:00:03.008000", 0, 0, 0, 3, 8)]
+ [DataRow("05:12:59.177", 0, 5, 12, 59, 177)]
+ [DataRow("149:07:50.911750", 6, 5, 7, 50, 911)]
+ [DataRow("00:00:00.83", 0, 0, 0, 0, 830)]
+ public void MediaAnalysis_ParseDuration(string duration, int expectedDays, int expectedHours, int expectedMinutes, int expectedSeconds, int expectedMilliseconds)
+ {
+ var ffprobeStream = new FFProbeStream { Duration = duration };
+
+ var parsedDuration = MediaAnalysisUtils.ParseDuration(ffprobeStream);
+
+ Assert.AreEqual(expectedDays, parsedDuration.Days);
+ Assert.AreEqual(expectedHours, parsedDuration.Hours);
+ Assert.AreEqual(expectedMinutes, parsedDuration.Minutes);
+ Assert.AreEqual(expectedSeconds, parsedDuration.Seconds);
+ Assert.AreEqual(expectedMilliseconds, parsedDuration.Milliseconds);
+ }
+
+ [TestMethod]
+ public async Task Uri_Duration()
+ {
+ var fileAnalysis = await FFProbe.AnalyseAsync(new Uri("https://github.com/rosenbjerg/FFMpegCore/raw/master/FFMpegCore.Test/Resources/input_3sec.webm"));
+ Assert.IsNotNull(fileAnalysis);
+ }
+
+ [TestMethod]
+ public void Probe_Success()
+ {
+ var info = FFProbe.Analyse(TestResources.Mp4Video);
+ Assert.AreEqual(3, info.Duration.Seconds);
+
+ Assert.AreEqual("5.1", info.PrimaryAudioStream!.ChannelLayout);
+ Assert.AreEqual(6, info.PrimaryAudioStream.Channels);
+ Assert.AreEqual("AAC (Advanced Audio Coding)", info.PrimaryAudioStream.CodecLongName);
+ Assert.AreEqual("aac", info.PrimaryAudioStream.CodecName);
+ Assert.AreEqual("LC", info.PrimaryAudioStream.Profile);
+ Assert.AreEqual(377351, info.PrimaryAudioStream.BitRate);
+ Assert.AreEqual(48000, info.PrimaryAudioStream.SampleRateHz);
+
+ Assert.AreEqual(1471810, info.PrimaryVideoStream!.BitRate);
+ Assert.AreEqual(16, info.PrimaryVideoStream.DisplayAspectRatio.Width);
+ Assert.AreEqual(9, info.PrimaryVideoStream.DisplayAspectRatio.Height);
+ Assert.AreEqual("yuv420p", info.PrimaryVideoStream.PixelFormat);
+ Assert.AreEqual(1280, info.PrimaryVideoStream.Width);
+ Assert.AreEqual(720, info.PrimaryVideoStream.Height);
+ Assert.AreEqual(25, info.PrimaryVideoStream.AvgFrameRate);
+ Assert.AreEqual(25, info.PrimaryVideoStream.FrameRate);
+ Assert.AreEqual("H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10", info.PrimaryVideoStream.CodecLongName);
+ Assert.AreEqual("h264", info.PrimaryVideoStream.CodecName);
+ Assert.AreEqual(8, info.PrimaryVideoStream.BitsPerRawSample);
+ Assert.AreEqual("Main", info.PrimaryVideoStream.Profile);
+ }
+
+ [TestMethod, Timeout(10000)]
+ public async Task Probe_Async_Success()
+ {
+ var info = await FFProbe.AnalyseAsync(TestResources.Mp4Video);
+ Assert.AreEqual(3, info.Duration.Seconds);
+ }
+
+ [TestMethod, Timeout(10000)]
+ public void Probe_Success_FromStream()
+ {
+ using var stream = File.OpenRead(TestResources.WebmVideo);
+ var info = FFProbe.Analyse(stream);
+ Assert.AreEqual(3, info.Duration.Seconds);
+ }
+
+ [TestMethod, Timeout(10000)]
+ public async Task Probe_Success_FromStream_Async()
+ {
+ await using var stream = File.OpenRead(TestResources.WebmVideo);
+ var info = await FFProbe.AnalyseAsync(stream);
+ Assert.AreEqual(3, info.Duration.Seconds);
+ }
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore.Test/PixelFormatTests.cs b/FFMpegCore.Test/PixelFormatTests.cs
new file mode 100644
index 00000000..2c22fc5c
--- /dev/null
+++ b/FFMpegCore.Test/PixelFormatTests.cs
@@ -0,0 +1,41 @@
+using FFMpegCore.Exceptions;
+using Microsoft.VisualStudio.TestTools.UnitTesting;
+
+namespace FFMpegCore.Test
+{
+ [TestClass]
+ public class PixelFormatTests
+ {
+ [TestMethod]
+ public void PixelFormats_Enumerate()
+ {
+ var formats = FFMpeg.GetPixelFormats();
+ Assert.IsTrue(formats.Count > 0);
+ }
+
+ [TestMethod]
+ public void PixelFormats_TryGetExisting()
+ {
+ Assert.IsTrue(FFMpeg.TryGetPixelFormat("yuv420p", out _));
+ }
+
+ [TestMethod]
+ public void PixelFormats_TryGetNotExisting()
+ {
+ Assert.IsFalse(FFMpeg.TryGetPixelFormat("yuv420pppUnknown", out _));
+ }
+
+ [TestMethod]
+ public void PixelFormats_GetExisting()
+ {
+ var fmt = FFMpeg.GetPixelFormat("yuv420p");
+ Assert.IsTrue(fmt.Components == 3 && fmt.BitsPerPixel == 12);
+ }
+
+ [TestMethod]
+ public void PixelFormats_GetNotExisting()
+ {
+ Assert.ThrowsException(() => FFMpeg.GetPixelFormat("yuv420pppUnknown"));
+ }
+ }
+}
diff --git a/FFMpegCore.Test/Resources/TestResources.cs b/FFMpegCore.Test/Resources/TestResources.cs
new file mode 100644
index 00000000..6277dd3a
--- /dev/null
+++ b/FFMpegCore.Test/Resources/TestResources.cs
@@ -0,0 +1,24 @@
+namespace FFMpegCore.Test.Resources
+{
+ public enum AudioType
+ {
+ Mp3
+ }
+
+ public enum ImageType
+ {
+ Png
+ }
+
+ public static class TestResources
+ {
+ public static readonly string Mp4Video = "./Resources/input_3sec.mp4";
+ public static readonly string WebmVideo = "./Resources/input_3sec.webm";
+ public static readonly string Mp4WithoutVideo = "./Resources/input_audio_only_10sec.mp4";
+ public static readonly string Mp4WithoutAudio = "./Resources/input_video_only_3sec.mp4";
+ public static readonly string RawAudio = "./Resources/audio.raw";
+ public static readonly string Mp3Audio = "./Resources/audio.mp3";
+ public static readonly string PngImage = "./Resources/cover.png";
+ public static readonly string ImageCollection = "./Resources/images";
+ }
+}
diff --git a/FFMpegCore.Test/Resources/audio.mp3 b/FFMpegCore.Test/Resources/audio.mp3
new file mode 100644
index 00000000..1c323644
Binary files /dev/null and b/FFMpegCore.Test/Resources/audio.mp3 differ
diff --git a/FFMpegCore.Test/Resources/audio.raw b/FFMpegCore.Test/Resources/audio.raw
new file mode 100644
index 00000000..e131095f
Binary files /dev/null and b/FFMpegCore.Test/Resources/audio.raw differ
diff --git a/FFMpegCore.Test/Resources/cover.png b/FFMpegCore.Test/Resources/cover.png
new file mode 100644
index 00000000..71426c8c
Binary files /dev/null and b/FFMpegCore.Test/Resources/cover.png differ
diff --git a/FFMpegCore.Test/Resources/images/a.png b/FFMpegCore.Test/Resources/images/a.png
new file mode 100644
index 00000000..5c8a18ca
Binary files /dev/null and b/FFMpegCore.Test/Resources/images/a.png differ
diff --git a/FFMpegCore.Test/Resources/images/b.png b/FFMpegCore.Test/Resources/images/b.png
new file mode 100644
index 00000000..159f7eb5
Binary files /dev/null and b/FFMpegCore.Test/Resources/images/b.png differ
diff --git a/FFMpegCore.Test/Resources/images/c.png b/FFMpegCore.Test/Resources/images/c.png
new file mode 100644
index 00000000..1fa3ecca
Binary files /dev/null and b/FFMpegCore.Test/Resources/images/c.png differ
diff --git a/FFMpegCore.Test/Resources/images/d.png b/FFMpegCore.Test/Resources/images/d.png
new file mode 100644
index 00000000..15d316a6
Binary files /dev/null and b/FFMpegCore.Test/Resources/images/d.png differ
diff --git a/FFMpegCore.Test/Resources/images/e.png b/FFMpegCore.Test/Resources/images/e.png
new file mode 100644
index 00000000..205cd4b9
Binary files /dev/null and b/FFMpegCore.Test/Resources/images/e.png differ
diff --git a/FFMpegCore.Test/Resources/images/f.png b/FFMpegCore.Test/Resources/images/f.png
new file mode 100644
index 00000000..5c845d67
Binary files /dev/null and b/FFMpegCore.Test/Resources/images/f.png differ
diff --git a/FFMpegCore.Test/Resources/input_3sec.mp4 b/FFMpegCore.Test/Resources/input_3sec.mp4
new file mode 100644
index 00000000..7b59bc72
Binary files /dev/null and b/FFMpegCore.Test/Resources/input_3sec.mp4 differ
diff --git a/FFMpegCore.Test/Resources/input_3sec.webm b/FFMpegCore.Test/Resources/input_3sec.webm
new file mode 100644
index 00000000..8f6790fc
Binary files /dev/null and b/FFMpegCore.Test/Resources/input_3sec.webm differ
diff --git a/FFMpegCore.Test/Resources/input_audio_only_10sec.mp4 b/FFMpegCore.Test/Resources/input_audio_only_10sec.mp4
new file mode 100644
index 00000000..67243df6
Binary files /dev/null and b/FFMpegCore.Test/Resources/input_audio_only_10sec.mp4 differ
diff --git a/FFMpegCore.Test/Resources/input_video_only_3sec.mp4 b/FFMpegCore.Test/Resources/input_video_only_3sec.mp4
new file mode 100644
index 00000000..7d138489
Binary files /dev/null and b/FFMpegCore.Test/Resources/input_video_only_3sec.mp4 differ
diff --git a/FFMpegCore.Test/TemporaryFile.cs b/FFMpegCore.Test/TemporaryFile.cs
new file mode 100644
index 00000000..f64f5fea
--- /dev/null
+++ b/FFMpegCore.Test/TemporaryFile.cs
@@ -0,0 +1,22 @@
+using System;
+using System.IO;
+
+namespace FFMpegCore.Test
+{
+ public class TemporaryFile : IDisposable
+ {
+ private readonly string _path;
+
+ public TemporaryFile(string filename)
+ {
+ _path = Path.Combine(Path.GetTempPath(), $"{Guid.NewGuid()}-{filename}");
+ }
+
+ public static implicit operator string(TemporaryFile temporaryFile) => temporaryFile._path;
+ public void Dispose()
+ {
+ if (File.Exists(_path))
+ File.Delete(_path);
+ }
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore.Test/Utilities/BitmapSources.cs b/FFMpegCore.Test/Utilities/BitmapSources.cs
new file mode 100644
index 00000000..8ea02e85
--- /dev/null
+++ b/FFMpegCore.Test/Utilities/BitmapSources.cs
@@ -0,0 +1,220 @@
+using FFMpegCore.Extend;
+using System;
+using System.Collections.Generic;
+using System.Drawing;
+using System.Drawing.Imaging;
+using System.Numerics;
+using FFMpegCore.Pipes;
+
+namespace FFMpegCore.Test
+{
+ static class BitmapSource
+ {
+ public static IEnumerable CreateBitmaps(int count, PixelFormat fmt, int w, int h)
+ {
+ for (int i = 0; i < count; i++)
+ {
+ using (var frame = CreateVideoFrame(i, fmt, w, h, 0.025f, 0.025f * w * 0.03f))
+ {
+ yield return frame;
+ }
+ }
+ }
+
+ public static BitmapVideoFrameWrapper CreateVideoFrame(int index, PixelFormat fmt, int w, int h, float scaleNoise, float offset)
+ {
+ var bitmap = new Bitmap(w, h, fmt);
+
+ offset = offset * index;
+
+ for (int y = 0; y < h; y++)
+ for (int x = 0; x < w; x++)
+ {
+ var xf = x / (float)w;
+ var yf = y / (float)h;
+ var nx = x * scaleNoise + offset;
+ var ny = y * scaleNoise + offset;
+
+ var value = (int)((Perlin.Noise(nx, ny) + 1.0f) / 2.0f * 255);
+
+ var color = Color.FromArgb((int)(value * xf), (int)(value * yf), value);
+
+ bitmap.SetPixel(x, y, color);
+ }
+
+ return new BitmapVideoFrameWrapper(bitmap);
+ }
+
+ //
+ // Perlin noise generator for Unity
+ // Keijiro Takahashi, 2013, 2015
+ // https://github.com/keijiro/PerlinNoise
+ //
+ // Based on the original implementation by Ken Perlin
+ // http://mrl.nyu.edu/~perlin/noise/
+ //
+ static class Perlin
+ {
+ #region Noise functions
+
+ public static float Noise(float x)
+ {
+ var X = (int)MathF.Floor(x) & 0xff;
+ x -= MathF.Floor(x);
+ var u = Fade(x);
+ return Lerp(u, Grad(perm[X], x), Grad(perm[X + 1], x - 1)) * 2;
+ }
+
+ public static float Noise(float x, float y)
+ {
+ var X = (int)MathF.Floor(x) & 0xff;
+ var Y = (int)MathF.Floor(y) & 0xff;
+ x -= MathF.Floor(x);
+ y -= MathF.Floor(y);
+ var u = Fade(x);
+ var v = Fade(y);
+ var A = (perm[X] + Y) & 0xff;
+ var B = (perm[X + 1] + Y) & 0xff;
+ return Lerp(v, Lerp(u, Grad(perm[A], x, y), Grad(perm[B], x - 1, y)),
+ Lerp(u, Grad(perm[A + 1], x, y - 1), Grad(perm[B + 1], x - 1, y - 1)));
+ }
+
+ public static float Noise(Vector2 coord)
+ {
+ return Noise(coord.X, coord.Y);
+ }
+
+ public static float Noise(float x, float y, float z)
+ {
+ var X = (int)MathF.Floor(x) & 0xff;
+ var Y = (int)MathF.Floor(y) & 0xff;
+ var Z = (int)MathF.Floor(z) & 0xff;
+ x -= MathF.Floor(x);
+ y -= MathF.Floor(y);
+ z -= MathF.Floor(z);
+ var u = Fade(x);
+ var v = Fade(y);
+ var w = Fade(z);
+ var A = (perm[X] + Y) & 0xff;
+ var B = (perm[X + 1] + Y) & 0xff;
+ var AA = (perm[A] + Z) & 0xff;
+ var BA = (perm[B] + Z) & 0xff;
+ var AB = (perm[A + 1] + Z) & 0xff;
+ var BB = (perm[B + 1] + Z) & 0xff;
+ return Lerp(w, Lerp(v, Lerp(u, Grad(perm[AA], x, y, z), Grad(perm[BA], x - 1, y, z)),
+ Lerp(u, Grad(perm[AB], x, y - 1, z), Grad(perm[BB], x - 1, y - 1, z))),
+ Lerp(v, Lerp(u, Grad(perm[AA + 1], x, y, z - 1), Grad(perm[BA + 1], x - 1, y, z - 1)),
+ Lerp(u, Grad(perm[AB + 1], x, y - 1, z - 1), Grad(perm[BB + 1], x - 1, y - 1, z - 1))));
+ }
+
+ public static float Noise(Vector3 coord)
+ {
+ return Noise(coord.X, coord.Y, coord.Z);
+ }
+
+ #endregion
+
+ #region fBm functions
+
+ public static float Fbm(float x, int octave)
+ {
+ var f = 0.0f;
+ var w = 0.5f;
+ for (var i = 0; i < octave; i++)
+ {
+ f += w * Noise(x);
+ x *= 2.0f;
+ w *= 0.5f;
+ }
+ return f;
+ }
+
+ public static float Fbm(Vector2 coord, int octave)
+ {
+ var f = 0.0f;
+ var w = 0.5f;
+ for (var i = 0; i < octave; i++)
+ {
+ f += w * Noise(coord);
+ coord *= 2.0f;
+ w *= 0.5f;
+ }
+ return f;
+ }
+
+ public static float Fbm(float x, float y, int octave)
+ {
+ return Fbm(new Vector2(x, y), octave);
+ }
+
+ public static float Fbm(Vector3 coord, int octave)
+ {
+ var f = 0.0f;
+ var w = 0.5f;
+ for (var i = 0; i < octave; i++)
+ {
+ f += w * Noise(coord);
+ coord *= 2.0f;
+ w *= 0.5f;
+ }
+ return f;
+ }
+
+ public static float Fbm(float x, float y, float z, int octave)
+ {
+ return Fbm(new Vector3(x, y, z), octave);
+ }
+
+ #endregion
+
+ #region Private functions
+
+ static float Fade(float t)
+ {
+ return t * t * t * (t * (t * 6 - 15) + 10);
+ }
+
+ static float Lerp(float t, float a, float b)
+ {
+ return a + t * (b - a);
+ }
+
+ static float Grad(int hash, float x)
+ {
+ return (hash & 1) == 0 ? x : -x;
+ }
+
+ static float Grad(int hash, float x, float y)
+ {
+ return ((hash & 1) == 0 ? x : -x) + ((hash & 2) == 0 ? y : -y);
+ }
+
+ static float Grad(int hash, float x, float y, float z)
+ {
+ var h = hash & 15;
+ var u = h < 8 ? x : y;
+ var v = h < 4 ? y : (h == 12 || h == 14 ? x : z);
+ return ((h & 1) == 0 ? u : -u) + ((h & 2) == 0 ? v : -v);
+ }
+
+ static int[] perm = {
+ 151,160,137,91,90,15,
+ 131,13,201,95,96,53,194,233,7,225,140,36,103,30,69,142,8,99,37,240,21,10,23,
+ 190, 6,148,247,120,234,75,0,26,197,62,94,252,219,203,117,35,11,32,57,177,33,
+ 88,237,149,56,87,174,20,125,136,171,168, 68,175,74,165,71,134,139,48,27,166,
+ 77,146,158,231,83,111,229,122,60,211,133,230,220,105,92,41,55,46,245,40,244,
+ 102,143,54, 65,25,63,161, 1,216,80,73,209,76,132,187,208, 89,18,169,200,196,
+ 135,130,116,188,159,86,164,100,109,198,173,186, 3,64,52,217,226,250,124,123,
+ 5,202,38,147,118,126,255,82,85,212,207,206,59,227,47,16,58,17,182,189,28,42,
+ 223,183,170,213,119,248,152, 2,44,154,163, 70,221,153,101,155,167, 43,172,9,
+ 129,22,39,253, 19,98,108,110,79,113,224,232,178,185, 112,104,218,246,97,228,
+ 251,34,242,193,238,210,144,12,191,179,162,241, 81,51,145,235,249,14,239,107,
+ 49,192,214, 31,181,199,106,157,184, 84,204,176,115,121,50,45,127, 4,150,254,
+ 138,236,205,93,222,114,67,29,24,72,243,141,128,195,78,66,215,61,156,180,
+ 151
+ };
+
+ #endregion
+ }
+ }
+}
diff --git a/FFMpegCore.Test/VideoTest.cs b/FFMpegCore.Test/VideoTest.cs
new file mode 100644
index 00000000..149dabdf
--- /dev/null
+++ b/FFMpegCore.Test/VideoTest.cs
@@ -0,0 +1,616 @@
+using FFMpegCore.Enums;
+using FFMpegCore.Test.Resources;
+using Microsoft.VisualStudio.TestTools.UnitTesting;
+using System;
+using System.Collections.Generic;
+using System.Drawing;
+using System.Drawing.Imaging;
+using System.IO;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+using FFMpegCore.Arguments;
+using FFMpegCore.Exceptions;
+using FFMpegCore.Pipes;
+
+namespace FFMpegCore.Test
+{
+ [TestClass]
+ public class VideoTest
+ {
+ [TestMethod, Timeout(10000)]
+ public void Video_ToOGV()
+ {
+ using var outputFile = new TemporaryFile($"out{VideoType.Ogv.Extension}");
+
+ var success = FFMpegArguments
+ .FromFileInput(TestResources.WebmVideo)
+ .OutputToFile(outputFile, false)
+ .ProcessSynchronously();
+ Assert.IsTrue(success);
+ }
+
+ [TestMethod, Timeout(10000)]
+ public void Video_ToMP4()
+ {
+ using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
+
+ var success = FFMpegArguments
+ .FromFileInput(TestResources.WebmVideo)
+ .OutputToFile(outputFile, false)
+ .ProcessSynchronously();
+ Assert.IsTrue(success);
+ }
+
+ [TestMethod, Timeout(10000)]
+ public void Video_ToMP4_YUV444p()
+ {
+ using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
+
+ var success = FFMpegArguments
+ .FromFileInput(TestResources.WebmVideo)
+ .OutputToFile(outputFile, false, opt => opt
+ .WithVideoCodec(VideoCodec.LibX264)
+ .ForcePixelFormat("yuv444p"))
+ .ProcessSynchronously();
+ Assert.IsTrue(success);
+ var analysis = FFProbe.Analyse(outputFile);
+ Assert.IsTrue(analysis.VideoStreams.First().PixelFormat == "yuv444p");
+ }
+
+ [TestMethod, Timeout(10000)]
+ public void Video_ToMP4_Args()
+ {
+ using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
+
+ var success = FFMpegArguments
+ .FromFileInput(TestResources.WebmVideo)
+ .OutputToFile(outputFile, false, opt => opt
+ .WithVideoCodec(VideoCodec.LibX264))
+ .ProcessSynchronously();
+ Assert.IsTrue(success);
+ }
+
+ [DataTestMethod, Timeout(10000)]
+ [DataRow(System.Drawing.Imaging.PixelFormat.Format24bppRgb)]
+ [DataRow(System.Drawing.Imaging.PixelFormat.Format32bppArgb)]
+ public void Video_ToMP4_Args_Pipe(System.Drawing.Imaging.PixelFormat pixelFormat)
+ {
+ using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
+
+ var videoFramesSource = new RawVideoPipeSource(BitmapSource.CreateBitmaps(128, pixelFormat, 256, 256));
+ var success = FFMpegArguments
+ .FromPipeInput(videoFramesSource)
+ .OutputToFile(outputFile, false, opt => opt
+ .WithVideoCodec(VideoCodec.LibX264))
+ .ProcessSynchronously();
+ Assert.IsTrue(success);
+ }
+
+ [TestMethod, Timeout(10000)]
+ public void Video_ToMP4_Args_Pipe_DifferentImageSizes()
+ {
+ using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
+
+ var frames = new List
+ {
+ BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 255, 255, 1, 0),
+ BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 256, 256, 1, 0)
+ };
+
+ var videoFramesSource = new RawVideoPipeSource(frames);
+ var ex = Assert.ThrowsException(() => FFMpegArguments
+ .FromPipeInput(videoFramesSource)
+ .OutputToFile(outputFile, false, opt => opt
+ .WithVideoCodec(VideoCodec.LibX264))
+ .ProcessSynchronously());
+
+ Assert.IsInstanceOfType(ex.GetBaseException(), typeof(FFMpegStreamFormatException));
+ }
+
+
+ [TestMethod, Timeout(10000)]
+ public async Task Video_ToMP4_Args_Pipe_DifferentImageSizes_Async()
+ {
+ using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
+
+ var frames = new List
+ {
+ BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 255, 255, 1, 0),
+ BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 256, 256, 1, 0)
+ };
+
+ var videoFramesSource = new RawVideoPipeSource(frames);
+ var ex = await Assert.ThrowsExceptionAsync(() => FFMpegArguments
+ .FromPipeInput(videoFramesSource)
+ .OutputToFile(outputFile, false, opt => opt
+ .WithVideoCodec(VideoCodec.LibX264))
+ .ProcessAsynchronously());
+
+ Assert.IsInstanceOfType(ex.GetBaseException(), typeof(FFMpegStreamFormatException));
+ }
+
+ [TestMethod, Timeout(10000)]
+ public void Video_ToMP4_Args_Pipe_DifferentPixelFormats()
+ {
+ using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
+
+ var frames = new List
+ {
+ BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 255, 255, 1, 0),
+ BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format32bppRgb, 255, 255, 1, 0)
+ };
+
+ var videoFramesSource = new RawVideoPipeSource(frames);
+ var ex = Assert.ThrowsException(() => FFMpegArguments
+ .FromPipeInput(videoFramesSource)
+ .OutputToFile(outputFile, false, opt => opt
+ .WithVideoCodec(VideoCodec.LibX264))
+ .ProcessSynchronously());
+
+ Assert.IsInstanceOfType(ex.GetBaseException(), typeof(FFMpegStreamFormatException));
+ }
+
+
+ [TestMethod, Timeout(10000)]
+ public async Task Video_ToMP4_Args_Pipe_DifferentPixelFormats_Async()
+ {
+ using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
+
+ var frames = new List
+ {
+ BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 255, 255, 1, 0),
+ BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format32bppRgb, 255, 255, 1, 0)
+ };
+
+ var videoFramesSource = new RawVideoPipeSource(frames);
+ var ex = await Assert.ThrowsExceptionAsync(() => FFMpegArguments
+ .FromPipeInput(videoFramesSource)
+ .OutputToFile(outputFile, false, opt => opt
+ .WithVideoCodec(VideoCodec.LibX264))
+ .ProcessAsynchronously());
+
+ Assert.IsInstanceOfType(ex.GetBaseException(), typeof(FFMpegStreamFormatException));
+ }
+
+ [TestMethod, Timeout(10000)]
+ public void Video_ToMP4_Args_StreamPipe()
+ {
+ using var input = File.OpenRead(TestResources.WebmVideo);
+ using var output = new TemporaryFile($"out{VideoType.Mp4.Extension}");
+
+ var success = FFMpegArguments
+ .FromPipeInput(new StreamPipeSource(input))
+ .OutputToFile(output, false, opt => opt
+ .WithVideoCodec(VideoCodec.LibX264))
+ .ProcessSynchronously();
+ Assert.IsTrue(success);
+ }
+
+ [TestMethod, Timeout(10000)]
+ public async Task Video_ToMP4_Args_StreamOutputPipe_Async_Failure()
+ {
+ await Assert.ThrowsExceptionAsync(async () =>
+ {
+ await using var ms = new MemoryStream();
+ var pipeSource = new StreamPipeSink(ms);
+ await FFMpegArguments
+ .FromFileInput(TestResources.Mp4Video)
+ .OutputToPipe(pipeSource, opt => opt.ForceFormat("mp4"))
+ .ProcessAsynchronously();
+ });
+ }
+
+
+ [TestMethod, Timeout(10000)]
+ public void Video_StreamFile_OutputToMemoryStream()
+ {
+ var output = new MemoryStream();
+
+ FFMpegArguments
+ .FromPipeInput(new StreamPipeSource(File.OpenRead(TestResources.WebmVideo)), opt => opt
+ .ForceFormat("webm"))
+ .OutputToPipe(new StreamPipeSink(output), opt => opt
+ .ForceFormat("mpegts"))
+ .ProcessSynchronously();
+
+ output.Position = 0;
+ var result = FFProbe.Analyse(output);
+ Console.WriteLine(result.Duration);
+ }
+
+ [TestMethod, Timeout(10000)]
+ public void Video_ToMP4_Args_StreamOutputPipe_Failure()
+ {
+ Assert.ThrowsException(() =>
+ {
+ using var ms = new MemoryStream();
+ FFMpegArguments
+ .FromFileInput(TestResources.Mp4Video)
+ .OutputToPipe(new StreamPipeSink(ms), opt => opt
+ .ForceFormat("mkv"))
+ .ProcessSynchronously();
+ });
+ }
+
+
+ [TestMethod, Timeout(10000)]
+ public async Task Video_ToMP4_Args_StreamOutputPipe_Async()
+ {
+ await using var ms = new MemoryStream();
+ var pipeSource = new StreamPipeSink(ms);
+ await FFMpegArguments
+ .FromFileInput(TestResources.Mp4Video)
+ .OutputToPipe(pipeSource, opt => opt
+ .WithVideoCodec(VideoCodec.LibX264)
+ .ForceFormat("matroska"))
+ .ProcessAsynchronously();
+ }
+
+ [TestMethod, Timeout(10000)]
+ public async Task TestDuplicateRun()
+ {
+ FFMpegArguments
+ .FromFileInput(TestResources.Mp4Video)
+ .OutputToFile("temporary.mp4")
+ .ProcessSynchronously();
+
+ await FFMpegArguments
+ .FromFileInput(TestResources.Mp4Video)
+ .OutputToFile("temporary.mp4")
+ .ProcessAsynchronously();
+
+ File.Delete("temporary.mp4");
+ }
+
+ [TestMethod, Timeout(10000)]
+ public void TranscodeToMemoryStream_Success()
+ {
+ using var output = new MemoryStream();
+ var success = FFMpegArguments
+ .FromFileInput(TestResources.WebmVideo)
+ .OutputToPipe(new StreamPipeSink(output), opt => opt
+ .WithVideoCodec(VideoCodec.LibVpx)
+ .ForceFormat("matroska"))
+ .ProcessSynchronously();
+ Assert.IsTrue(success);
+
+ output.Position = 0;
+ var inputAnalysis = FFProbe.Analyse(TestResources.WebmVideo);
+ var outputAnalysis = FFProbe.Analyse(output);
+ Assert.AreEqual(inputAnalysis.Duration.TotalSeconds, outputAnalysis.Duration.TotalSeconds, 0.3);
+ }
+
+ [TestMethod, Timeout(10000)]
+ public void Video_ToTS()
+ {
+ using var outputFile = new TemporaryFile($"out{VideoType.MpegTs.Extension}");
+
+ var success = FFMpegArguments
+ .FromFileInput(TestResources.Mp4Video)
+ .OutputToFile(outputFile, false)
+ .ProcessSynchronously();
+ Assert.IsTrue(success);
+ }
+
+ [TestMethod, Timeout(10000)]
+ public void Video_ToTS_Args()
+ {
+ using var outputFile = new TemporaryFile($"out{VideoType.MpegTs.Extension}");
+
+ var success = FFMpegArguments
+ .FromFileInput(TestResources.Mp4Video)
+ .OutputToFile(outputFile, false, opt => opt
+ .CopyChannel()
+ .WithBitStreamFilter(Channel.Video, Filter.H264_Mp4ToAnnexB)
+ .ForceFormat(VideoType.MpegTs))
+ .ProcessSynchronously();
+ Assert.IsTrue(success);
+ }
+
+ [DataTestMethod, Timeout(10000)]
+ [DataRow(System.Drawing.Imaging.PixelFormat.Format24bppRgb)]
+ [DataRow(System.Drawing.Imaging.PixelFormat.Format32bppArgb)]
+ public async Task Video_ToTS_Args_Pipe(System.Drawing.Imaging.PixelFormat pixelFormat)
+ {
+ using var output = new TemporaryFile($"out{VideoType.Ts.Extension}");
+ var input = new RawVideoPipeSource(BitmapSource.CreateBitmaps(128, pixelFormat, 256, 256));
+
+ var success = await FFMpegArguments
+ .FromPipeInput(input)
+ .OutputToFile(output, false, opt => opt
+ .ForceFormat(VideoType.Ts))
+ .ProcessAsynchronously();
+ Assert.IsTrue(success);
+
+ var analysis = await FFProbe.AnalyseAsync(output);
+ Assert.AreEqual(VideoType.Ts.Name, analysis.Format.FormatName);
+ }
+
+ [TestMethod, Timeout(10000)]
+ public async Task Video_ToOGV_Resize()
+ {
+ using var outputFile = new TemporaryFile($"out{VideoType.Ogv.Extension}");
+ var success = await FFMpegArguments
+ .FromFileInput(TestResources.Mp4Video)
+ .OutputToFile(outputFile, false, opt => opt
+ .Resize(200, 200)
+ .WithVideoCodec(VideoCodec.LibTheora))
+ .ProcessAsynchronously();
+ Assert.IsTrue(success);
+ }
+
+ [DataTestMethod, Timeout(10000)]
+ [DataRow(System.Drawing.Imaging.PixelFormat.Format24bppRgb)]
+ [DataRow(System.Drawing.Imaging.PixelFormat.Format32bppArgb)]
+ // [DataRow(PixelFormat.Format48bppRgb)]
+ public void RawVideoPipeSource_Ogv_Scale(System.Drawing.Imaging.PixelFormat pixelFormat)
+ {
+ using var outputFile = new TemporaryFile($"out{VideoType.Ogv.Extension}");
+ var videoFramesSource = new RawVideoPipeSource(BitmapSource.CreateBitmaps(128, pixelFormat, 256, 256));
+
+ FFMpegArguments
+ .FromPipeInput(videoFramesSource)
+ .OutputToFile(outputFile, false, opt => opt
+ .WithVideoFilters(filterOptions => filterOptions
+ .Scale(VideoSize.Ed))
+ .WithVideoCodec(VideoCodec.LibTheora))
+ .ProcessSynchronously();
+
+ var analysis = FFProbe.Analyse(outputFile);
+ Assert.AreEqual((int)VideoSize.Ed, analysis.PrimaryVideoStream!.Width);
+ }
+
+ [TestMethod, Timeout(10000)]
+ public void Scale_Mp4_Multithreaded()
+ {
+ using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
+
+ var success = FFMpegArguments
+ .FromFileInput(TestResources.Mp4Video)
+ .OutputToFile(outputFile, false, opt => opt
+ .UsingMultithreading(true)
+ .WithVideoCodec(VideoCodec.LibX264))
+ .ProcessSynchronously();
+ Assert.IsTrue(success);
+ }
+
+ [DataTestMethod, Timeout(10000)]
+ [DataRow(System.Drawing.Imaging.PixelFormat.Format24bppRgb)]
+ [DataRow(System.Drawing.Imaging.PixelFormat.Format32bppArgb)]
+ // [DataRow(PixelFormat.Format48bppRgb)]
+ public void Video_ToMP4_Resize_Args_Pipe(System.Drawing.Imaging.PixelFormat pixelFormat)
+ {
+ using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
+ var videoFramesSource = new RawVideoPipeSource(BitmapSource.CreateBitmaps(128, pixelFormat, 256, 256));
+
+ var success = FFMpegArguments
+ .FromPipeInput(videoFramesSource)
+ .OutputToFile(outputFile, false, opt => opt
+ .WithVideoCodec(VideoCodec.LibX264))
+ .ProcessSynchronously();
+ Assert.IsTrue(success);
+ }
+
+ [TestMethod, Timeout(10000)]
+ public void Video_Snapshot_InMemory()
+ {
+ var input = FFProbe.Analyse(TestResources.Mp4Video);
+ using var bitmap = FFMpeg.Snapshot(TestResources.Mp4Video);
+
+ Assert.AreEqual(input.PrimaryVideoStream!.Width, bitmap.Width);
+ Assert.AreEqual(input.PrimaryVideoStream.Height, bitmap.Height);
+ Assert.AreEqual(bitmap.RawFormat, ImageFormat.Png);
+ }
+
+ [TestMethod, Timeout(10000)]
+ public void Video_Snapshot_PersistSnapshot()
+ {
+ var outputPath = new TemporaryFile("out.png");
+ var input = FFProbe.Analyse(TestResources.Mp4Video);
+
+ FFMpeg.Snapshot(TestResources.Mp4Video, outputPath);
+
+ using var bitmap = Image.FromFile(outputPath);
+ Assert.AreEqual(input.PrimaryVideoStream!.Width, bitmap.Width);
+ Assert.AreEqual(input.PrimaryVideoStream.Height, bitmap.Height);
+ Assert.AreEqual(bitmap.RawFormat, ImageFormat.Png);
+ }
+
+ [TestMethod, Timeout(10000)]
+ public void Video_Join()
+ {
+ var inputCopy = new TemporaryFile("copy-input.mp4");
+ File.Copy(TestResources.Mp4Video, inputCopy);
+
+ var outputPath = new TemporaryFile("out.mp4");
+ var input = FFProbe.Analyse(TestResources.Mp4Video);
+ var success = FFMpeg.Join(outputPath, TestResources.Mp4Video, inputCopy);
+ Assert.IsTrue(success);
+ Assert.IsTrue(File.Exists(outputPath));
+
+ var expectedDuration = input.Duration * 2;
+ var result = FFProbe.Analyse(outputPath);
+ Assert.AreEqual(expectedDuration.Days, result.Duration.Days);
+ Assert.AreEqual(expectedDuration.Hours, result.Duration.Hours);
+ Assert.AreEqual(expectedDuration.Minutes, result.Duration.Minutes);
+ Assert.AreEqual(expectedDuration.Seconds, result.Duration.Seconds);
+ Assert.AreEqual(input.PrimaryVideoStream!.Height, result.PrimaryVideoStream!.Height);
+ Assert.AreEqual(input.PrimaryVideoStream.Width, result.PrimaryVideoStream.Width);
+ }
+
+ [TestMethod, Timeout(10000)]
+ public void Video_Join_Image_Sequence()
+ {
+ var imageSet = new List();
+ Directory.EnumerateFiles(TestResources.ImageCollection)
+ .Where(file => file.ToLower().EndsWith(".png"))
+ .ToList()
+ .ForEach(file =>
+ {
+ for (var i = 0; i < 15; i++)
+ {
+ imageSet.Add(new ImageInfo(file));
+ }
+ });
+
+ var outputFile = new TemporaryFile("out.mp4");
+ var success = FFMpeg.JoinImageSequence(outputFile, images: imageSet.ToArray());
+ Assert.IsTrue(success);
+ var result = FFProbe.Analyse(outputFile);
+ Assert.AreEqual(3, result.Duration.Seconds);
+ Assert.AreEqual(imageSet.First().Width, result.PrimaryVideoStream!.Width);
+ Assert.AreEqual(imageSet.First().Height, result.PrimaryVideoStream.Height);
+ }
+
+ [TestMethod, Timeout(10000)]
+ public void Video_With_Only_Audio_Should_Extract_Metadata()
+ {
+ var video = FFProbe.Analyse(TestResources.Mp4WithoutVideo);
+ Assert.AreEqual(null, video.PrimaryVideoStream);
+ Assert.AreEqual("aac", video.PrimaryAudioStream!.CodecName);
+ Assert.AreEqual(10, video.Duration.TotalSeconds, 0.5);
+ }
+
+ [TestMethod, Timeout(10000)]
+ public void Video_Duration()
+ {
+ var video = FFProbe.Analyse(TestResources.Mp4Video);
+ var outputFile = new TemporaryFile("out.mp4");
+
+ FFMpegArguments
+ .FromFileInput(TestResources.Mp4Video)
+ .OutputToFile(outputFile, false, opt => opt.WithDuration(TimeSpan.FromSeconds(video.Duration.TotalSeconds - 2)))
+ .ProcessSynchronously();
+
+ Assert.IsTrue(File.Exists(outputFile));
+ var outputVideo = FFProbe.Analyse(outputFile);
+
+ Assert.AreEqual(video.Duration.Days, outputVideo.Duration.Days);
+ Assert.AreEqual(video.Duration.Hours, outputVideo.Duration.Hours);
+ Assert.AreEqual(video.Duration.Minutes, outputVideo.Duration.Minutes);
+ Assert.AreEqual(video.Duration.Seconds - 2, outputVideo.Duration.Seconds);
+ }
+
+ [TestMethod, Timeout(10000)]
+ public void Video_UpdatesProgress()
+ {
+ var outputFile = new TemporaryFile("out.mp4");
+
+ var percentageDone = 0.0;
+ var timeDone = TimeSpan.Zero;
+ void OnPercentageProgess(double percentage) => percentageDone = percentage;
+ void OnTimeProgess(TimeSpan time) => timeDone = time;
+
+ var analysis = FFProbe.Analyse(TestResources.Mp4Video);
+ var success = FFMpegArguments
+ .FromFileInput(TestResources.Mp4Video)
+ .OutputToFile(outputFile, false, opt => opt
+ .WithDuration(TimeSpan.FromSeconds(2)))
+ .NotifyOnProgress(OnPercentageProgess, analysis.Duration)
+ .NotifyOnProgress(OnTimeProgess)
+ .ProcessSynchronously();
+
+ Assert.IsTrue(success);
+ Assert.IsTrue(File.Exists(outputFile));
+ Assert.AreNotEqual(0.0, percentageDone);
+ Assert.AreNotEqual(TimeSpan.Zero, timeDone);
+ }
+
+ [TestMethod, Timeout(10000)]
+ public void Video_OutputsData()
+ {
+ var outputFile = new TemporaryFile("out.mp4");
+ var dataReceived = false;
+
+ GlobalFFOptions.Configure(opt => opt.Encoding = Encoding.UTF8);
+ var success = FFMpegArguments
+ .FromFileInput(TestResources.Mp4Video)
+ .WithGlobalOptions(options => options
+ .WithVerbosityLevel(VerbosityLevel.Info))
+ .OutputToFile(outputFile, false, opt => opt
+ .WithDuration(TimeSpan.FromSeconds(2)))
+ .NotifyOnOutput((_, _) => dataReceived = true)
+ .ProcessSynchronously();
+
+ Assert.IsTrue(dataReceived);
+ Assert.IsTrue(success);
+ Assert.IsTrue(File.Exists(outputFile));
+ }
+
+ [TestMethod, Timeout(10000)]
+ public void Video_TranscodeInMemory()
+ {
+ using var resStream = new MemoryStream();
+ var reader = new StreamPipeSink(resStream);
+ var writer = new RawVideoPipeSource(BitmapSource.CreateBitmaps(128, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 128, 128));
+
+ FFMpegArguments
+ .FromPipeInput(writer)
+ .OutputToPipe(reader, opt => opt
+ .WithVideoCodec("vp9")
+ .ForceFormat("webm"))
+ .ProcessSynchronously();
+
+ resStream.Position = 0;
+ var vi = FFProbe.Analyse(resStream);
+ Assert.AreEqual(vi.PrimaryVideoStream!.Width, 128);
+ Assert.AreEqual(vi.PrimaryVideoStream.Height, 128);
+ }
+
+ [TestMethod, Timeout(10000)]
+ public async Task Video_Cancel_Async()
+ {
+ var outputFile = new TemporaryFile("out.mp4");
+
+ var task = FFMpegArguments
+ .FromFileInput("testsrc2=size=320x240[out0]; sine[out1]", false, args => args
+ .WithCustomArgument("-re")
+ .ForceFormat("lavfi"))
+ .OutputToFile(outputFile, false, opt => opt
+ .WithAudioCodec(AudioCodec.Aac)
+ .WithVideoCodec(VideoCodec.LibX264)
+ .WithSpeedPreset(Speed.VeryFast))
+ .CancellableThrough(out var cancel)
+ .ProcessAsynchronously(false);
+
+ await Task.Delay(300);
+ cancel();
+
+ var result = await task;
+
+ Assert.IsFalse(result);
+ }
+
+ [TestMethod, Timeout(10000)]
+ public async Task Video_Cancel_Async_With_Timeout()
+ {
+ var outputFile = new TemporaryFile("out.mp4");
+
+ var task = FFMpegArguments
+ .FromFileInput("testsrc2=size=320x240[out0]; sine[out1]", false, args => args
+ .WithCustomArgument("-re")
+ .ForceFormat("lavfi"))
+ .OutputToFile(outputFile, false, opt => opt
+ .WithAudioCodec(AudioCodec.Aac)
+ .WithVideoCodec(VideoCodec.LibX264)
+ .WithSpeedPreset(Speed.VeryFast))
+ .CancellableThrough(out var cancel, 10000)
+ .ProcessAsynchronously(false);
+
+ await Task.Delay(300);
+ cancel();
+
+ var result = await task;
+
+ var outputInfo = await FFProbe.AnalyseAsync(outputFile);
+
+ Assert.IsTrue(result);
+ Assert.IsNotNull(outputInfo);
+ Assert.AreEqual(320, outputInfo.PrimaryVideoStream!.Width);
+ Assert.AreEqual(240, outputInfo.PrimaryVideoStream.Height);
+ Assert.AreEqual("h264", outputInfo.PrimaryVideoStream.CodecName);
+ Assert.AreEqual("aac", outputInfo.PrimaryAudioStream!.CodecName);
+ }
+ }
+}
diff --git a/FFMpegCore.Test/ffmpeg.config.json b/FFMpegCore.Test/ffmpeg.config.json
new file mode 100644
index 00000000..b9c9a56b
--- /dev/null
+++ b/FFMpegCore.Test/ffmpeg.config.json
@@ -0,0 +1,3 @@
+{
+ "RootDirectory": ""
+}
\ No newline at end of file
diff --git a/FFMpegCore.sln b/FFMpegCore.sln
new file mode 100644
index 00000000..7a279806
--- /dev/null
+++ b/FFMpegCore.sln
@@ -0,0 +1,37 @@
+
+Microsoft Visual Studio Solution File, Format Version 12.00
+# Visual Studio Version 16
+VisualStudioVersion = 16.0.31005.135
+MinimumVisualStudioVersion = 10.0.40219.1
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "FFMpegCore", "FFMpegCore\FFMpegCore.csproj", "{19DE2EC2-9955-4712-8096-C22EF6713E4F}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "FFMpegCore.Test", "FFMpegCore.Test\FFMpegCore.Test.csproj", "{F20C8353-72D9-454B-9F16-3624DBAD2328}"
+EndProject
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "FFMpegCore.Examples", "FFMpegCore.Examples\FFMpegCore.Examples.csproj", "{3125CF91-FFBD-4E4E-8930-247116AFE772}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|Any CPU = Debug|Any CPU
+ Release|Any CPU = Release|Any CPU
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {19DE2EC2-9955-4712-8096-C22EF6713E4F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {19DE2EC2-9955-4712-8096-C22EF6713E4F}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {19DE2EC2-9955-4712-8096-C22EF6713E4F}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {19DE2EC2-9955-4712-8096-C22EF6713E4F}.Release|Any CPU.Build.0 = Release|Any CPU
+ {F20C8353-72D9-454B-9F16-3624DBAD2328}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {F20C8353-72D9-454B-9F16-3624DBAD2328}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {F20C8353-72D9-454B-9F16-3624DBAD2328}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {F20C8353-72D9-454B-9F16-3624DBAD2328}.Release|Any CPU.Build.0 = Release|Any CPU
+ {3125CF91-FFBD-4E4E-8930-247116AFE772}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {3125CF91-FFBD-4E4E-8930-247116AFE772}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {3125CF91-FFBD-4E4E-8930-247116AFE772}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {3125CF91-FFBD-4E4E-8930-247116AFE772}.Release|Any CPU.Build.0 = Release|Any CPU
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
+ EndGlobalSection
+ GlobalSection(ExtensibilityGlobals) = postSolution
+ SolutionGuid = {F1B53337-60E7-49CB-A171-D4AEF6B4D5F0}
+ EndGlobalSection
+EndGlobal
diff --git a/FFMpegCore/Assembly.cs b/FFMpegCore/Assembly.cs
new file mode 100644
index 00000000..01176712
--- /dev/null
+++ b/FFMpegCore/Assembly.cs
@@ -0,0 +1,3 @@
+using System.Runtime.CompilerServices;
+
+[assembly: InternalsVisibleTo("FFMpegCore.Test")]
\ No newline at end of file
diff --git a/FFMpegCore/Extend/BitmapExtensions.cs b/FFMpegCore/Extend/BitmapExtensions.cs
new file mode 100644
index 00000000..e2f55059
--- /dev/null
+++ b/FFMpegCore/Extend/BitmapExtensions.cs
@@ -0,0 +1,23 @@
+using System;
+using System.Drawing;
+using System.IO;
+
+namespace FFMpegCore.Extend
+{
+ public static class BitmapExtensions
+ {
+ public static bool AddAudio(this Image poster, string audio, string output)
+ {
+ var destination = $"{Environment.TickCount}.png";
+ poster.Save(destination);
+ try
+ {
+ return FFMpeg.PosterWithAudio(destination, audio, output);
+ }
+ finally
+ {
+ if (File.Exists(destination)) File.Delete(destination);
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/Extend/BitmapVideoFrameWrapper.cs b/FFMpegCore/Extend/BitmapVideoFrameWrapper.cs
new file mode 100644
index 00000000..b7f4c656
--- /dev/null
+++ b/FFMpegCore/Extend/BitmapVideoFrameWrapper.cs
@@ -0,0 +1,89 @@
+using System;
+using System.Drawing;
+using System.Drawing.Imaging;
+using System.IO;
+using System.Runtime.InteropServices;
+using System.Threading;
+using System.Threading.Tasks;
+using FFMpegCore.Pipes;
+
+namespace FFMpegCore.Extend
+{
+ public class BitmapVideoFrameWrapper : IVideoFrame, IDisposable
+ {
+ public int Width => Source.Width;
+
+ public int Height => Source.Height;
+
+ public string Format { get; private set; }
+
+ public Bitmap Source { get; private set; }
+
+ public BitmapVideoFrameWrapper(Bitmap bitmap)
+ {
+ Source = bitmap ?? throw new ArgumentNullException(nameof(bitmap));
+ Format = ConvertStreamFormat(bitmap.PixelFormat);
+ }
+
+ public void Serialize(Stream stream)
+ {
+ var data = Source.LockBits(new Rectangle(0, 0, Width, Height), ImageLockMode.ReadOnly, Source.PixelFormat);
+
+ try
+ {
+ var buffer = new byte[data.Stride * data.Height];
+ Marshal.Copy(data.Scan0, buffer, 0, buffer.Length);
+ stream.Write(buffer, 0, buffer.Length);
+ }
+ finally
+ {
+ Source.UnlockBits(data);
+ }
+ }
+
+ public async Task SerializeAsync(Stream stream, CancellationToken token)
+ {
+ var data = Source.LockBits(new Rectangle(0, 0, Width, Height), ImageLockMode.ReadOnly, Source.PixelFormat);
+
+ try
+ {
+ var buffer = new byte[data.Stride * data.Height];
+ Marshal.Copy(data.Scan0, buffer, 0, buffer.Length);
+ await stream.WriteAsync(buffer, 0, buffer.Length, token);
+ }
+ finally
+ {
+ Source.UnlockBits(data);
+ }
+ }
+
+ public void Dispose()
+ {
+ Source.Dispose();
+ }
+
+ private static string ConvertStreamFormat(PixelFormat fmt)
+ {
+ switch (fmt)
+ {
+ case PixelFormat.Format16bppGrayScale:
+ return "gray16le";
+ case PixelFormat.Format16bppRgb565:
+ return "bgr565le";
+ case PixelFormat.Format24bppRgb:
+ return "bgr24";
+ case PixelFormat.Format32bppArgb:
+ return "bgra";
+ case PixelFormat.Format32bppPArgb:
+ //This is not really same as argb32
+ return "argb";
+ case PixelFormat.Format32bppRgb:
+ return "rgba";
+ case PixelFormat.Format48bppRgb:
+ return "rgb48le";
+ default:
+ throw new NotSupportedException($"Not supported pixel format {fmt}");
+ }
+ }
+ }
+}
diff --git a/FFMpegCore/Extend/UriExtensions.cs b/FFMpegCore/Extend/UriExtensions.cs
new file mode 100644
index 00000000..ebe92c0b
--- /dev/null
+++ b/FFMpegCore/Extend/UriExtensions.cs
@@ -0,0 +1,12 @@
+using System;
+
+namespace FFMpegCore.Extend
+{
+ public static class UriExtensions
+ {
+ public static bool SaveStream(this Uri uri, string output)
+ {
+ return FFMpeg.SaveM3U8Stream(uri, output);
+ }
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFMpeg/Arguments/AudioBitrateArgument.cs b/FFMpegCore/FFMpeg/Arguments/AudioBitrateArgument.cs
new file mode 100644
index 00000000..9c7e813a
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/AudioBitrateArgument.cs
@@ -0,0 +1,20 @@
+using FFMpegCore.Enums;
+
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Represents parameter of audio codec and it's quality
+ ///
+ public class AudioBitrateArgument : IArgument
+ {
+ public readonly int Bitrate;
+ public AudioBitrateArgument(AudioQuality value) : this((int)value) { }
+ public AudioBitrateArgument(int bitrate)
+ {
+ Bitrate = bitrate;
+ }
+
+
+ public string Text => $"-b:a {Bitrate}k";
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFMpeg/Arguments/AudioCodecArgument.cs b/FFMpegCore/FFMpeg/Arguments/AudioCodecArgument.cs
new file mode 100644
index 00000000..273bb023
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/AudioCodecArgument.cs
@@ -0,0 +1,28 @@
+using FFMpegCore.Enums;
+using FFMpegCore.Exceptions;
+
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Represents parameter of audio codec and it's quality
+ ///
+ public class AudioCodecArgument : IArgument
+ {
+ public readonly string AudioCodec;
+
+ public AudioCodecArgument(Codec audioCodec)
+ {
+ if (audioCodec.Type != CodecType.Audio)
+ throw new FFMpegException(FFMpegExceptionType.Operation, $"Codec \"{audioCodec.Name}\" is not an audio codec");
+
+ AudioCodec = audioCodec.Name;
+ }
+
+ public AudioCodecArgument(string audioCodec)
+ {
+ AudioCodec = audioCodec;
+ }
+
+ public string Text => $"-c:a {AudioCodec.ToString().ToLowerInvariant()}";
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Arguments/AudioSamplingRateArgument.cs b/FFMpegCore/FFMpeg/Arguments/AudioSamplingRateArgument.cs
new file mode 100644
index 00000000..6f1365da
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/AudioSamplingRateArgument.cs
@@ -0,0 +1,16 @@
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Audio sampling rate argument. Defaults to 48000 (Hz)
+ ///
+ public class AudioSamplingRateArgument : IArgument
+ {
+ public readonly int SamplingRate;
+ public AudioSamplingRateArgument(int samplingRate = 48000)
+ {
+ SamplingRate = samplingRate;
+ }
+
+ public string Text => $"-ar {SamplingRate}";
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFMpeg/Arguments/BitStreamFilterArgument.cs b/FFMpegCore/FFMpeg/Arguments/BitStreamFilterArgument.cs
new file mode 100644
index 00000000..e5a4b352
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/BitStreamFilterArgument.cs
@@ -0,0 +1,26 @@
+using FFMpegCore.Enums;
+
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Represents parameter of bitstream filter
+ ///
+ public class BitStreamFilterArgument : IArgument
+ {
+ public readonly Channel Channel;
+ public readonly Filter Filter;
+
+ public BitStreamFilterArgument(Channel channel, Filter filter)
+ {
+ Channel = channel;
+ Filter = filter;
+ }
+
+ public string Text => Channel switch
+ {
+ Channel.Audio => $"-bsf:a {Filter.ToString().ToLowerInvariant()}",
+ Channel.Video => $"-bsf:v {Filter.ToString().ToLowerInvariant()}",
+ _ => string.Empty
+ };
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Arguments/ConcatArgument.cs b/FFMpegCore/FFMpeg/Arguments/ConcatArgument.cs
new file mode 100644
index 00000000..9c6ffa24
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/ConcatArgument.cs
@@ -0,0 +1,26 @@
+using System.Collections.Generic;
+using System.Threading;
+using System.Threading.Tasks;
+
+namespace FFMpegCore.Arguments
+{
+
+ ///
+ /// Represents parameter of concat argument
+ /// Used for creating video from multiple images or videos
+ ///
+ public class ConcatArgument : IInputArgument
+ {
+ public readonly IEnumerable Values;
+ public ConcatArgument(IEnumerable values)
+ {
+ Values = values;
+ }
+
+ public void Pre() { }
+ public Task During(CancellationToken cancellationToken = default) => Task.CompletedTask;
+ public void Post() { }
+
+ public string Text => $"-i \"concat:{string.Join(@"|", Values)}\"";
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Arguments/ConstantRateFactorArgument.cs b/FFMpegCore/FFMpeg/Arguments/ConstantRateFactorArgument.cs
new file mode 100644
index 00000000..c02cfa33
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/ConstantRateFactorArgument.cs
@@ -0,0 +1,24 @@
+using System;
+
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Constant Rate Factor (CRF) argument
+ ///
+ public class ConstantRateFactorArgument : IArgument
+ {
+ public readonly int Crf;
+
+ public ConstantRateFactorArgument(int crf)
+ {
+ if (crf < 0 || crf > 63)
+ {
+ throw new ArgumentException("Argument is outside range (0 - 63)", nameof(crf));
+ }
+
+ Crf = crf;
+ }
+
+ public string Text => $"-crf {Crf}";
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFMpeg/Arguments/CopyArgument.cs b/FFMpegCore/FFMpeg/Arguments/CopyArgument.cs
new file mode 100644
index 00000000..91419d58
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/CopyArgument.cs
@@ -0,0 +1,24 @@
+using FFMpegCore.Enums;
+
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Represents parameter of copy parameter
+ /// Defines if channel (audio, video or both) should be copied to output file
+ ///
+ public class CopyArgument : IArgument
+ {
+ public readonly Channel Channel;
+ public CopyArgument(Channel channel = Channel.Both)
+ {
+ Channel = channel;
+ }
+
+ public string Text => Channel switch
+ {
+ Channel.Audio => "-c:a copy",
+ Channel.Video => "-c:v copy",
+ _ => "-c copy"
+ };
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Arguments/CustomArgument.cs b/FFMpegCore/FFMpeg/Arguments/CustomArgument.cs
new file mode 100644
index 00000000..8eedb12f
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/CustomArgument.cs
@@ -0,0 +1,14 @@
+namespace FFMpegCore.Arguments
+{
+ public class CustomArgument : IArgument
+ {
+ public readonly string Argument;
+
+ public CustomArgument(string argument)
+ {
+ Argument = argument;
+ }
+
+ public string Text => Argument ?? string.Empty;
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFMpeg/Arguments/DemuxConcatArgument.cs b/FFMpegCore/FFMpeg/Arguments/DemuxConcatArgument.cs
new file mode 100644
index 00000000..c672c742
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/DemuxConcatArgument.cs
@@ -0,0 +1,29 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Threading;
+using System.Threading.Tasks;
+
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Represents parameter of concat argument
+ /// Used for creating video from multiple images or videos
+ ///
+ public class DemuxConcatArgument : IInputArgument
+ {
+ public readonly IEnumerable Values;
+ public DemuxConcatArgument(IEnumerable values)
+ {
+ Values = values.Select(value => $"file '{value}'");
+ }
+ private readonly string _tempFileName = Path.Combine(GlobalFFOptions.Current.TemporaryFilesFolder, $"concat_{Guid.NewGuid()}.txt");
+
+ public void Pre() => File.WriteAllLines(_tempFileName, Values);
+ public Task During(CancellationToken cancellationToken = default) => Task.CompletedTask;
+ public void Post() => File.Delete(_tempFileName);
+
+ public string Text => $"-f concat -safe 0 -i \"{_tempFileName}\"";
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Arguments/DisableChannelArgument.cs b/FFMpegCore/FFMpeg/Arguments/DisableChannelArgument.cs
new file mode 100644
index 00000000..d6837751
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/DisableChannelArgument.cs
@@ -0,0 +1,27 @@
+using FFMpegCore.Enums;
+using FFMpegCore.Exceptions;
+
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Represents cpu speed parameter
+ ///
+ public class DisableChannelArgument : IArgument
+ {
+ public readonly Channel Channel;
+
+ public DisableChannelArgument(Channel channel)
+ {
+ if (channel == Channel.Both)
+ throw new FFMpegException(FFMpegExceptionType.Conversion, "Cannot disable both channels");
+ Channel = channel;
+ }
+
+ public string Text => Channel switch
+ {
+ Channel.Video => "-vn",
+ Channel.Audio => "-an",
+ _ => string.Empty
+ };
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Arguments/DrawTextArgument.cs b/FFMpegCore/FFMpeg/Arguments/DrawTextArgument.cs
new file mode 100644
index 00000000..c148328b
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/DrawTextArgument.cs
@@ -0,0 +1,62 @@
+using System.Collections.Generic;
+using System.Linq;
+
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Drawtext video filter argument
+ ///
+ public class DrawTextArgument : IVideoFilterArgument
+ {
+ public readonly DrawTextOptions Options;
+
+ public DrawTextArgument(DrawTextOptions options)
+ {
+ Options = options;
+ }
+
+ public string Key { get; } = "drawtext";
+ public string Value => Options.TextInternal;
+ }
+
+ public class DrawTextOptions
+ {
+ public readonly string Text;
+ public readonly string Font;
+ public readonly List<(string key, string value)> Parameters;
+
+ public static DrawTextOptions Create(string text, string font)
+ {
+ return new DrawTextOptions(text, font, new List<(string, string)>());
+ }
+ public static DrawTextOptions Create(string text, string font, params (string key, string value)[] parameters)
+ {
+ return new DrawTextOptions(text, font, parameters);
+ }
+
+ internal string TextInternal => string.Join(":", new[] {("text", Text), ("fontfile", Font)}.Concat(Parameters).Select(FormatArgumentPair));
+
+ private static string FormatArgumentPair((string key, string value) pair)
+ {
+ return $"{pair.key}={EncloseIfContainsSpace(pair.value)}";
+ }
+
+ private static string EncloseIfContainsSpace(string input)
+ {
+ return input.Contains(" ") ? $"'{input}'" : input;
+ }
+
+ private DrawTextOptions(string text, string font, IEnumerable<(string, string)> parameters)
+ {
+ Text = text;
+ Font = font;
+ Parameters = parameters.ToList();
+ }
+
+ public DrawTextOptions WithParameter(string key, string value)
+ {
+ Parameters.Add((key, value));
+ return this;
+ }
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFMpeg/Arguments/DurationArgument.cs b/FFMpegCore/FFMpeg/Arguments/DurationArgument.cs
new file mode 100644
index 00000000..e47b966a
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/DurationArgument.cs
@@ -0,0 +1,18 @@
+using System;
+
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Represents duration parameter
+ ///
+ public class DurationArgument : IArgument
+ {
+ public readonly TimeSpan? Duration;
+ public DurationArgument(TimeSpan? duration)
+ {
+ Duration = duration;
+ }
+
+ public string Text => !Duration.HasValue ? string.Empty : $"-t {Duration.Value}";
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Arguments/FaststartArgument.cs b/FFMpegCore/FFMpeg/Arguments/FaststartArgument.cs
new file mode 100644
index 00000000..54cdd6fa
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/FaststartArgument.cs
@@ -0,0 +1,10 @@
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Faststart argument - for moving moov atom to the start of file
+ ///
+ public class FaststartArgument : IArgument
+ {
+ public string Text => "-movflags faststart";
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFMpeg/Arguments/ForceFormatArgument.cs b/FFMpegCore/FFMpeg/Arguments/ForceFormatArgument.cs
new file mode 100644
index 00000000..9524698f
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/ForceFormatArgument.cs
@@ -0,0 +1,23 @@
+using FFMpegCore.Enums;
+
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Represents force format parameter
+ ///
+ public class ForceFormatArgument : IArgument
+ {
+ private readonly string _format;
+ public ForceFormatArgument(string format)
+ {
+ _format = format;
+ }
+
+ public ForceFormatArgument(ContainerFormat format)
+ {
+ _format = format.Name;
+ }
+
+ public string Text => $"-f {_format}";
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Arguments/ForcePixelFormat.cs b/FFMpegCore/FFMpeg/Arguments/ForcePixelFormat.cs
new file mode 100644
index 00000000..8402552e
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/ForcePixelFormat.cs
@@ -0,0 +1,17 @@
+using FFMpegCore.Enums;
+
+namespace FFMpegCore.Arguments
+{
+ public class ForcePixelFormat : IArgument
+ {
+ public string PixelFormat { get; }
+ public string Text => $"-pix_fmt {PixelFormat}";
+
+ public ForcePixelFormat(string format)
+ {
+ PixelFormat = format;
+ }
+
+ public ForcePixelFormat(PixelFormat format) : this(format.Name) { }
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Arguments/FrameOutputCountArgument.cs b/FFMpegCore/FFMpeg/Arguments/FrameOutputCountArgument.cs
new file mode 100644
index 00000000..08bc56b5
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/FrameOutputCountArgument.cs
@@ -0,0 +1,16 @@
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Represents frame output count parameter
+ ///
+ public class FrameOutputCountArgument : IArgument
+ {
+ public readonly int Frames;
+ public FrameOutputCountArgument(int frames)
+ {
+ Frames = frames;
+ }
+
+ public string Text => $"-vframes {Frames}";
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Arguments/FrameRateArgument.cs b/FFMpegCore/FFMpeg/Arguments/FrameRateArgument.cs
new file mode 100644
index 00000000..7c921af3
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/FrameRateArgument.cs
@@ -0,0 +1,17 @@
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Represents frame rate parameter
+ ///
+ public class FrameRateArgument : IArgument
+ {
+ public readonly double Framerate;
+
+ public FrameRateArgument(double framerate)
+ {
+ Framerate = framerate;
+ }
+
+ public string Text => $"-r {Framerate.ToString(System.Globalization.CultureInfo.InvariantCulture)}";
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Arguments/HardwareAccelerationArgument.cs b/FFMpegCore/FFMpeg/Arguments/HardwareAccelerationArgument.cs
new file mode 100644
index 00000000..da4b9ee6
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/HardwareAccelerationArgument.cs
@@ -0,0 +1,18 @@
+using FFMpegCore.Enums;
+
+namespace FFMpegCore.Arguments
+{
+ public class HardwareAccelerationArgument : IArgument
+ {
+ public HardwareAccelerationDevice HardwareAccelerationDevice { get; }
+
+ public HardwareAccelerationArgument(HardwareAccelerationDevice hardwareAccelerationDevice)
+ {
+ HardwareAccelerationDevice = hardwareAccelerationDevice;
+ }
+
+ public string Text => HardwareAccelerationDevice != HardwareAccelerationDevice.Auto
+ ? $"-hwaccel {HardwareAccelerationDevice.ToString().ToLower()}"
+ : "-hwaccel";
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFMpeg/Arguments/IArgument.cs b/FFMpegCore/FFMpeg/Arguments/IArgument.cs
new file mode 100644
index 00000000..2a6c11aa
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/IArgument.cs
@@ -0,0 +1,10 @@
+namespace FFMpegCore.Arguments
+{
+ public interface IArgument
+ {
+ ///
+ /// The textual representation of the argument
+ ///
+ string Text { get; }
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFMpeg/Arguments/IInputArgument.cs b/FFMpegCore/FFMpeg/Arguments/IInputArgument.cs
new file mode 100644
index 00000000..81a1cbe6
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/IInputArgument.cs
@@ -0,0 +1,6 @@
+namespace FFMpegCore.Arguments
+{
+ public interface IInputArgument : IInputOutputArgument
+ {
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFMpeg/Arguments/IInputOutputArgument.cs b/FFMpegCore/FFMpeg/Arguments/IInputOutputArgument.cs
new file mode 100644
index 00000000..99def827
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/IInputOutputArgument.cs
@@ -0,0 +1,12 @@
+using System.Threading;
+using System.Threading.Tasks;
+
+namespace FFMpegCore.Arguments
+{
+ public interface IInputOutputArgument : IArgument
+ {
+ void Pre();
+ Task During(CancellationToken cancellationToken = default);
+ void Post();
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFMpeg/Arguments/IOutputArgument.cs b/FFMpegCore/FFMpeg/Arguments/IOutputArgument.cs
new file mode 100644
index 00000000..09ccc83c
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/IOutputArgument.cs
@@ -0,0 +1,6 @@
+namespace FFMpegCore.Arguments
+{
+ public interface IOutputArgument : IInputOutputArgument
+ {
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFMpeg/Arguments/InputArgument.cs b/FFMpegCore/FFMpeg/Arguments/InputArgument.cs
new file mode 100644
index 00000000..68c34b46
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/InputArgument.cs
@@ -0,0 +1,34 @@
+using System.IO;
+using System.Threading;
+using System.Threading.Tasks;
+
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Represents input parameter
+ ///
+ public class InputArgument : IInputArgument
+ {
+ public readonly bool VerifyExists;
+ public readonly string FilePath;
+
+ public InputArgument(bool verifyExists, string filePaths)
+ {
+ VerifyExists = verifyExists;
+ FilePath = filePaths;
+ }
+
+ public InputArgument(string path, bool verifyExists) : this(verifyExists, path) { }
+
+ public void Pre()
+ {
+ if (VerifyExists && !File.Exists(FilePath))
+ throw new FileNotFoundException("Input file not found", FilePath);
+ }
+
+ public Task During(CancellationToken cancellationToken = default) => Task.CompletedTask;
+ public void Post() { }
+
+ public string Text => $"-i \"{FilePath}\"";
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Arguments/InputDeviceArgument.cs b/FFMpegCore/FFMpeg/Arguments/InputDeviceArgument.cs
new file mode 100644
index 00000000..f276bbb3
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/InputDeviceArgument.cs
@@ -0,0 +1,26 @@
+using System.Threading;
+using System.Threading.Tasks;
+
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Represents an input device parameter
+ ///
+ public class InputDeviceArgument : IInputArgument
+ {
+ private readonly string Device;
+
+ public InputDeviceArgument(string device)
+ {
+ Device = device;
+ }
+
+ public Task During(CancellationToken cancellationToken = default) => Task.CompletedTask;
+
+ public void Pre() { }
+
+ public void Post() { }
+
+ public string Text => $"-i {Device}";
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Arguments/InputPipeArgument.cs b/FFMpegCore/FFMpeg/Arguments/InputPipeArgument.cs
new file mode 100644
index 00000000..479fa905
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/InputPipeArgument.cs
@@ -0,0 +1,30 @@
+using System.IO.Pipes;
+using System.Threading;
+using System.Threading.Tasks;
+using FFMpegCore.Pipes;
+
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Represents input parameter for a named pipe
+ ///
+ public class InputPipeArgument : PipeArgument, IInputArgument
+ {
+ public readonly IPipeSource Writer;
+
+ public InputPipeArgument(IPipeSource writer) : base(PipeDirection.Out)
+ {
+ Writer = writer;
+ }
+
+ public override string Text => $"-y {Writer.GetStreamArguments()} -i \"{PipePath}\"";
+
+ protected override async Task ProcessDataAsync(CancellationToken token)
+ {
+ await Pipe.WaitForConnectionAsync(token).ConfigureAwait(false);
+ if (!Pipe.IsConnected)
+ throw new TaskCanceledException();
+ await Writer.WriteAsync(Pipe, token).ConfigureAwait(false);
+ }
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Arguments/LoopArgument.cs b/FFMpegCore/FFMpeg/Arguments/LoopArgument.cs
new file mode 100644
index 00000000..26adc3eb
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/LoopArgument.cs
@@ -0,0 +1,16 @@
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Represents loop parameter
+ ///
+ public class LoopArgument : IArgument
+ {
+ public readonly int Times;
+ public LoopArgument(int times)
+ {
+ Times = times;
+ }
+
+ public string Text => $"-loop {Times}";
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Arguments/OutputArgument.cs b/FFMpegCore/FFMpeg/Arguments/OutputArgument.cs
new file mode 100644
index 00000000..c2aad382
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/OutputArgument.cs
@@ -0,0 +1,39 @@
+using System;
+using System.IO;
+using System.Threading;
+using System.Threading.Tasks;
+using FFMpegCore.Exceptions;
+
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Represents output parameter
+ ///
+ public class OutputArgument : IOutputArgument
+ {
+ public readonly string Path;
+ public readonly bool Overwrite;
+
+ public OutputArgument(string path, bool overwrite = true)
+ {
+ Path = path;
+ Overwrite = overwrite;
+ }
+
+ public void Pre()
+ {
+ if (!Overwrite && File.Exists(Path))
+ throw new FFMpegException(FFMpegExceptionType.File, "Output file already exists and overwrite is disabled");
+ }
+ public Task During(CancellationToken cancellationToken = default) => Task.CompletedTask;
+ public void Post()
+ {
+ }
+
+ public OutputArgument(FileInfo value) : this(value.FullName) { }
+
+ public OutputArgument(Uri value) : this(value.AbsolutePath) { }
+
+ public string Text => $"\"{Path}\"{(Overwrite ? " -y" : string.Empty)}";
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Arguments/OutputPipeArgument.cs b/FFMpegCore/FFMpeg/Arguments/OutputPipeArgument.cs
new file mode 100644
index 00000000..f089a1e4
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/OutputPipeArgument.cs
@@ -0,0 +1,27 @@
+using System.IO.Pipes;
+using System.Threading;
+using System.Threading.Tasks;
+using FFMpegCore.Pipes;
+
+namespace FFMpegCore.Arguments
+{
+ public class OutputPipeArgument : PipeArgument, IOutputArgument
+ {
+ public readonly IPipeSink Reader;
+
+ public OutputPipeArgument(IPipeSink reader) : base(PipeDirection.In)
+ {
+ Reader = reader;
+ }
+
+ public override string Text => $"\"{PipePath}\" -y";
+
+ protected override async Task ProcessDataAsync(CancellationToken token)
+ {
+ await Pipe.WaitForConnectionAsync(token).ConfigureAwait(false);
+ if (!Pipe.IsConnected)
+ throw new TaskCanceledException();
+ await Reader.ReadAsync(Pipe, token).ConfigureAwait(false);
+ }
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Arguments/OutputUrlArgument.cs b/FFMpegCore/FFMpeg/Arguments/OutputUrlArgument.cs
new file mode 100644
index 00000000..15cbef90
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/OutputUrlArgument.cs
@@ -0,0 +1,27 @@
+using System.Threading;
+using System.Threading.Tasks;
+
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Represents outputting to url using supported protocols
+ /// See http://ffmpeg.org/ffmpeg-protocols.html
+ ///
+ public class OutputUrlArgument : IOutputArgument
+ {
+ public readonly string Url;
+
+ public OutputUrlArgument(string url)
+ {
+ Url = url;
+ }
+
+ public void Post() { }
+
+ public Task During(CancellationToken cancellationToken = default) => Task.CompletedTask;
+
+ public void Pre() { }
+
+ public string Text => Url;
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Arguments/OverwriteArgument.cs b/FFMpegCore/FFMpeg/Arguments/OverwriteArgument.cs
new file mode 100644
index 00000000..3a633af6
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/OverwriteArgument.cs
@@ -0,0 +1,11 @@
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Represents overwrite parameter
+ /// If output file should be overwritten if exists
+ ///
+ public class OverwriteArgument : IArgument
+ {
+ public string Text => "-y";
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Arguments/PipeArgument.cs b/FFMpegCore/FFMpeg/Arguments/PipeArgument.cs
new file mode 100644
index 00000000..fcb944ad
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/PipeArgument.cs
@@ -0,0 +1,59 @@
+using System;
+using System.Diagnostics;
+using System.IO.Pipes;
+using System.Threading;
+using System.Threading.Tasks;
+using FFMpegCore.Pipes;
+
+namespace FFMpegCore.Arguments
+{
+ public abstract class PipeArgument
+ {
+ private string PipeName { get; }
+ public string PipePath => PipeHelpers.GetPipePath(PipeName);
+
+ protected NamedPipeServerStream Pipe { get; private set; } = null!;
+ private readonly PipeDirection _direction;
+
+ protected PipeArgument(PipeDirection direction)
+ {
+ PipeName = PipeHelpers.GetUnqiuePipeName();
+ _direction = direction;
+ }
+
+ public void Pre()
+ {
+ if (Pipe != null)
+ throw new InvalidOperationException("Pipe already has been opened");
+
+ Pipe = new NamedPipeServerStream(PipeName, _direction, 1, PipeTransmissionMode.Byte, PipeOptions.Asynchronous);
+ }
+
+ public void Post()
+ {
+ Debug.WriteLine($"Disposing NamedPipeServerStream on {GetType().Name}");
+ Pipe?.Dispose();
+ Pipe = null!;
+ }
+
+ public async Task During(CancellationToken cancellationToken = default)
+ {
+ try
+ {
+ await ProcessDataAsync(cancellationToken);
+ }
+ catch (TaskCanceledException)
+ {
+ Debug.WriteLine($"ProcessDataAsync on {GetType().Name} cancelled");
+ }
+ finally
+ {
+ Debug.WriteLine($"Disconnecting NamedPipeServerStream on {GetType().Name}");
+ Pipe?.Disconnect();
+ }
+ }
+
+ protected abstract Task ProcessDataAsync(CancellationToken token);
+ public abstract string Text { get; }
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Arguments/RemoveMetadataArgument.cs b/FFMpegCore/FFMpeg/Arguments/RemoveMetadataArgument.cs
new file mode 100644
index 00000000..29cdac62
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/RemoveMetadataArgument.cs
@@ -0,0 +1,10 @@
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Remove metadata argument
+ ///
+ public class RemoveMetadataArgument : IArgument
+ {
+ public string Text => "-map_metadata -1";
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFMpeg/Arguments/ScaleArgument.cs b/FFMpegCore/FFMpeg/Arguments/ScaleArgument.cs
new file mode 100644
index 00000000..6ed2b316
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/ScaleArgument.cs
@@ -0,0 +1,27 @@
+using System.Drawing;
+using FFMpegCore.Enums;
+
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Represents scale parameter
+ ///
+ public class ScaleArgument : IVideoFilterArgument
+ {
+ public readonly Size? Size;
+ public ScaleArgument(Size? size)
+ {
+ Size = size;
+ }
+
+ public ScaleArgument(int width, int height) : this(new Size(width, height)) { }
+
+ public ScaleArgument(VideoSize videosize)
+ {
+ Size = videosize == VideoSize.Original ? null : (Size?)new Size(-1, (int)videosize);
+ }
+
+ public string Key { get; } = "scale";
+ public string Value => Size == null ? string.Empty : $"{Size.Value.Width}:{Size.Value.Height}";
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Arguments/SeekArgument.cs b/FFMpegCore/FFMpeg/Arguments/SeekArgument.cs
new file mode 100644
index 00000000..1b58890e
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/SeekArgument.cs
@@ -0,0 +1,35 @@
+using System;
+
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Represents seek parameter
+ ///
+ public class SeekArgument : IArgument
+ {
+ public readonly TimeSpan? SeekTo;
+
+ public SeekArgument(TimeSpan? seekTo)
+ {
+ SeekTo = seekTo;
+ }
+
+ public string Text {
+ get {
+ if(SeekTo.HasValue)
+ {
+ int hours = SeekTo.Value.Hours;
+ if(SeekTo.Value.Days > 0)
+ {
+ hours += SeekTo.Value.Days * 24;
+ }
+ return $"-ss {hours.ToString("00")}:{SeekTo.Value.Minutes.ToString("00")}:{SeekTo.Value.Seconds.ToString("00")}.{SeekTo.Value.Milliseconds.ToString("000")}";
+ }
+ else
+ {
+ return string.Empty;
+ }
+ }
+ }
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Arguments/SetMirroringArgument.cs b/FFMpegCore/FFMpeg/Arguments/SetMirroringArgument.cs
new file mode 100644
index 00000000..fff98f3d
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/SetMirroringArgument.cs
@@ -0,0 +1,30 @@
+using FFMpegCore.Enums;
+using System;
+
+namespace FFMpegCore.Arguments
+{
+ public class SetMirroringArgument : IVideoFilterArgument
+ {
+ public SetMirroringArgument(Mirroring mirroring)
+ {
+ Mirroring = mirroring;
+ }
+
+ public Mirroring Mirroring { get; set; }
+
+ public string Key => string.Empty;
+
+ public string Value
+ {
+ get
+ {
+ return Mirroring switch
+ {
+ Mirroring.Horizontal => "hflip",
+ Mirroring.Vertical => "vflip",
+ _ => throw new ArgumentOutOfRangeException(nameof(Mirroring))
+ };
+ }
+ }
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Arguments/ShortestArgument.cs b/FFMpegCore/FFMpeg/Arguments/ShortestArgument.cs
new file mode 100644
index 00000000..d85813e2
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/ShortestArgument.cs
@@ -0,0 +1,17 @@
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Represents shortest parameter
+ ///
+ public class ShortestArgument : IArgument
+ {
+ public readonly bool Shortest;
+
+ public ShortestArgument(bool shortest)
+ {
+ Shortest = shortest;
+ }
+
+ public string Text => Shortest ? "-shortest" : string.Empty;
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Arguments/SizeArgument.cs b/FFMpegCore/FFMpeg/Arguments/SizeArgument.cs
new file mode 100644
index 00000000..924c0a0a
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/SizeArgument.cs
@@ -0,0 +1,20 @@
+using System.Drawing;
+
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Represents size parameter
+ ///
+ public class SizeArgument : IArgument
+ {
+ public readonly Size? Size;
+ public SizeArgument(Size? size)
+ {
+ Size = size;
+ }
+
+ public SizeArgument(int width, int height) : this(new Size(width, height)) { }
+
+ public string Text => Size == null ? string.Empty : $"-s {Size.Value.Width}x{Size.Value.Height}";
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Arguments/SpeedPresetArgument.cs b/FFMpegCore/FFMpeg/Arguments/SpeedPresetArgument.cs
new file mode 100644
index 00000000..6046c3c0
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/SpeedPresetArgument.cs
@@ -0,0 +1,19 @@
+using FFMpegCore.Enums;
+
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Represents speed parameter
+ ///
+ public class SpeedPresetArgument : IArgument
+ {
+ public readonly Speed Speed;
+
+ public SpeedPresetArgument(Speed speed)
+ {
+ Speed = speed;
+ }
+
+ public string Text => $"-preset {Speed.ToString().ToLowerInvariant()}";
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Arguments/StartNumberArgument.cs b/FFMpegCore/FFMpeg/Arguments/StartNumberArgument.cs
new file mode 100644
index 00000000..f7c09da8
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/StartNumberArgument.cs
@@ -0,0 +1,17 @@
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Represents start number parameter
+ ///
+ public class StartNumberArgument : IArgument
+ {
+ public readonly int StartNumber;
+
+ public StartNumberArgument(int startNumber)
+ {
+ StartNumber = startNumber;
+ }
+
+ public string Text => $"-start_number {StartNumber}";
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Arguments/ThreadsArgument.cs b/FFMpegCore/FFMpeg/Arguments/ThreadsArgument.cs
new file mode 100644
index 00000000..6fd94e64
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/ThreadsArgument.cs
@@ -0,0 +1,21 @@
+using System;
+
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Represents threads parameter
+ /// Number of threads used for video encoding
+ ///
+ public class ThreadsArgument : IArgument
+ {
+ public readonly int Threads;
+ public ThreadsArgument(int threads)
+ {
+ Threads = threads;
+ }
+
+ public ThreadsArgument(bool isMultiThreaded) : this(isMultiThreaded ? Environment.ProcessorCount : 1) { }
+
+ public string Text => $"-threads {Threads}";
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Arguments/TransposeArgument.cs b/FFMpegCore/FFMpeg/Arguments/TransposeArgument.cs
new file mode 100644
index 00000000..bd15c47e
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/TransposeArgument.cs
@@ -0,0 +1,23 @@
+using FFMpegCore.Enums;
+
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Transpose argument.
+ /// 0 = 90CounterCLockwise and Vertical Flip (default)
+ /// 1 = 90Clockwise
+ /// 2 = 90CounterClockwise
+ /// 3 = 90Clockwise and Vertical Flip
+ ///
+ public class TransposeArgument : IVideoFilterArgument
+ {
+ public readonly Transposition Transposition;
+ public TransposeArgument(Transposition transposition)
+ {
+ Transposition = transposition;
+ }
+
+ public string Key { get; } = "transpose";
+ public string Value => ((int)Transposition).ToString();
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFMpeg/Arguments/VariableBitRateArgument.cs b/FFMpegCore/FFMpeg/Arguments/VariableBitRateArgument.cs
new file mode 100644
index 00000000..b656ec4a
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/VariableBitRateArgument.cs
@@ -0,0 +1,24 @@
+using System;
+
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Variable Bitrate Argument (VBR) argument
+ ///
+ public class VariableBitRateArgument : IArgument
+ {
+ public readonly int Vbr;
+
+ public VariableBitRateArgument(int vbr)
+ {
+ if (vbr < 0 || vbr > 5)
+ {
+ throw new ArgumentException("Argument is outside range (0 - 5)", nameof(vbr));
+ }
+
+ Vbr = vbr;
+ }
+
+ public string Text => $"-vbr {Vbr}";
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFMpeg/Arguments/VerbosityLevelArgument.cs b/FFMpegCore/FFMpeg/Arguments/VerbosityLevelArgument.cs
new file mode 100644
index 00000000..f128aeb3
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/VerbosityLevelArgument.cs
@@ -0,0 +1,25 @@
+namespace FFMpegCore.Arguments
+{
+ public class VerbosityLevelArgument : IArgument
+ {
+ private readonly VerbosityLevel _verbosityLevel;
+
+ public VerbosityLevelArgument(VerbosityLevel verbosityLevel)
+ {
+ _verbosityLevel = verbosityLevel;
+ }
+ public string Text => $"{((int)_verbosityLevel < 32 ? "-hide_banner " : "")}-loglevel {_verbosityLevel.ToString().ToLowerInvariant()}";
+ }
+
+ public enum VerbosityLevel
+ {
+ Quiet = -8,
+ Fatal = 8,
+ Error = 16,
+ Warning = 24,
+ Info = 32,
+ Verbose = 40,
+ Debug = 48,
+ Trace = 56
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFMpeg/Arguments/VideoBitrateArgument.cs b/FFMpegCore/FFMpeg/Arguments/VideoBitrateArgument.cs
new file mode 100644
index 00000000..ea5e641d
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/VideoBitrateArgument.cs
@@ -0,0 +1,17 @@
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Represents video bitrate parameter
+ ///
+ public class VideoBitrateArgument : IArgument
+ {
+ public readonly int Bitrate;
+
+ public VideoBitrateArgument(int bitrate)
+ {
+ Bitrate = bitrate;
+ }
+
+ public string Text => $"-b:v {Bitrate}k";
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFMpeg/Arguments/VideoCodecArgument.cs b/FFMpegCore/FFMpeg/Arguments/VideoCodecArgument.cs
new file mode 100644
index 00000000..93868225
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/VideoCodecArgument.cs
@@ -0,0 +1,28 @@
+using FFMpegCore.Enums;
+using FFMpegCore.Exceptions;
+
+namespace FFMpegCore.Arguments
+{
+ ///
+ /// Represents video codec parameter
+ ///
+ public class VideoCodecArgument : IArgument
+ {
+ public readonly string Codec;
+
+ public VideoCodecArgument(string codec)
+ {
+ Codec = codec;
+ }
+
+ public VideoCodecArgument(Codec value)
+ {
+ if (value.Type != CodecType.Video)
+ throw new FFMpegException(FFMpegExceptionType.Operation, $"Codec \"{value.Name}\" is not a video codec");
+
+ Codec = value.Name;
+ }
+
+ public string Text => $"-c:v {Codec}";
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Arguments/VideoFiltersArgument.cs b/FFMpegCore/FFMpeg/Arguments/VideoFiltersArgument.cs
new file mode 100644
index 00000000..fa4ae1ee
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Arguments/VideoFiltersArgument.cs
@@ -0,0 +1,60 @@
+using System.Collections.Generic;
+using System.Drawing;
+using System.Linq;
+using FFMpegCore.Enums;
+using FFMpegCore.Exceptions;
+
+namespace FFMpegCore.Arguments
+{
+ public class VideoFiltersArgument : IArgument
+ {
+ public readonly VideoFilterOptions Options;
+
+ public VideoFiltersArgument(VideoFilterOptions options)
+ {
+ Options = options;
+ }
+
+ public string Text => GetText();
+
+ private string GetText()
+ {
+ if (!Options.Arguments.Any())
+ throw new FFMpegArgumentException("No video-filter arguments provided");
+
+ var arguments = Options.Arguments
+ .Where(arg => !string.IsNullOrEmpty(arg.Value))
+ .Select(arg =>
+ {
+ var escapedValue = arg.Value.Replace(",", "\\,");
+ return string.IsNullOrEmpty(arg.Key) ? escapedValue : $"{arg.Key}={escapedValue}";
+ });
+
+ return $"-vf \"{string.Join(", ", arguments)}\"";
+ }
+ }
+
+ public interface IVideoFilterArgument
+ {
+ public string Key { get; }
+ public string Value { get; }
+ }
+
+ public class VideoFilterOptions
+ {
+ public List Arguments { get; } = new List();
+
+ public VideoFilterOptions Scale(VideoSize videoSize) => WithArgument(new ScaleArgument(videoSize));
+ public VideoFilterOptions Scale(int width, int height) => WithArgument(new ScaleArgument(width, height));
+ public VideoFilterOptions Scale(Size size) => WithArgument(new ScaleArgument(size));
+ public VideoFilterOptions Transpose(Transposition transposition) => WithArgument(new TransposeArgument(transposition));
+ public VideoFilterOptions Mirror(Mirroring mirroring) => WithArgument(new SetMirroringArgument(mirroring));
+ public VideoFilterOptions DrawText(DrawTextOptions drawTextOptions) => WithArgument(new DrawTextArgument(drawTextOptions));
+
+ private VideoFilterOptions WithArgument(IVideoFilterArgument argument)
+ {
+ Arguments.Add(argument);
+ return this;
+ }
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFMpeg/Enums/AudioQuality.cs b/FFMpegCore/FFMpeg/Enums/AudioQuality.cs
new file mode 100644
index 00000000..60ba0eb0
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Enums/AudioQuality.cs
@@ -0,0 +1,12 @@
+namespace FFMpegCore.Enums
+{
+ public enum AudioQuality
+ {
+ Ultra = 384,
+ VeryHigh = 256,
+ Good = 192,
+ Normal = 128,
+ BelowNormal = 96,
+ Low = 64
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFMpeg/Enums/Codec.cs b/FFMpegCore/FFMpeg/Enums/Codec.cs
new file mode 100644
index 00000000..8ac84565
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Enums/Codec.cs
@@ -0,0 +1,152 @@
+using FFMpegCore.Exceptions;
+using System;
+using System.Text.RegularExpressions;
+
+namespace FFMpegCore.Enums
+{
+ public enum FeatureStatus
+ {
+ Unknown,
+ NotSupported,
+ Supported,
+ }
+
+ public class Codec
+ {
+ private static readonly Regex _codecsFormatRegex = new Regex(@"([D\.])([E\.])([VASD\.])([I\.])([L\.])([S\.])\s+([a-z0-9_-]+)\s+(.+)");
+ private static readonly Regex _decodersEncodersFormatRegex = new Regex(@"([VASD\.])([F\.])([S\.])([X\.])([B\.])([D\.])\s+([a-z0-9_-]+)\s+(.+)");
+
+ public class FeatureLevel
+ {
+ public bool IsExperimental { get; internal set; }
+ public FeatureStatus SupportsFrameLevelMultithreading { get; internal set; }
+ public FeatureStatus SupportsSliceLevelMultithreading { get; internal set; }
+ public FeatureStatus SupportsDrawHorizBand { get; internal set; }
+ public FeatureStatus SupportsDirectRendering { get; internal set; }
+
+ internal void Merge(FeatureLevel other)
+ {
+ IsExperimental |= other.IsExperimental;
+ SupportsFrameLevelMultithreading = (FeatureStatus)Math.Max((int)SupportsFrameLevelMultithreading, (int)other.SupportsFrameLevelMultithreading);
+ SupportsSliceLevelMultithreading = (FeatureStatus)Math.Max((int)SupportsSliceLevelMultithreading, (int)other.SupportsSliceLevelMultithreading);
+ SupportsDrawHorizBand = (FeatureStatus)Math.Max((int)SupportsDrawHorizBand, (int)other.SupportsDrawHorizBand);
+ SupportsDirectRendering = (FeatureStatus)Math.Max((int)SupportsDirectRendering, (int)other.SupportsDirectRendering);
+ }
+ }
+
+ public string Name { get; private set; }
+ public CodecType Type { get; private set; }
+ public bool DecodingSupported { get; private set; }
+ public bool EncodingSupported { get; private set; }
+ public bool IsIntraFrameOnly { get; private set; }
+ public bool IsLossy { get; private set; }
+ public bool IsLossless { get; private set; }
+ public string Description { get; private set; } = null!;
+
+ public FeatureLevel EncoderFeatureLevel { get; private set; }
+ public FeatureLevel DecoderFeatureLevel { get; private set; }
+
+ internal Codec(string name, CodecType type)
+ {
+ EncoderFeatureLevel = new FeatureLevel();
+ DecoderFeatureLevel = new FeatureLevel();
+ Name = name;
+ Type = type;
+ }
+
+ internal static bool TryParseFromCodecs(string line, out Codec codec)
+ {
+ var match = _codecsFormatRegex.Match(line);
+ if (!match.Success)
+ {
+ codec = null!;
+ return false;
+ }
+
+ var name = match.Groups[7].Value;
+ var type = match.Groups[3].Value switch
+ {
+ "V" => CodecType.Video,
+ "A" => CodecType.Audio,
+ "D" => CodecType.Data,
+ "S" => CodecType.Subtitle,
+ _ => CodecType.Unknown
+ };
+
+ if(type == CodecType.Unknown)
+ {
+ codec = null!;
+ return false;
+ }
+
+ codec = new Codec(name, type);
+
+ codec.DecodingSupported = match.Groups[1].Value != ".";
+ codec.EncodingSupported = match.Groups[2].Value != ".";
+ codec.IsIntraFrameOnly = match.Groups[4].Value != ".";
+ codec.IsLossy = match.Groups[5].Value != ".";
+ codec.IsLossless = match.Groups[6].Value != ".";
+ codec.Description = match.Groups[8].Value;
+
+ return true;
+ }
+ internal static bool TryParseFromEncodersDecoders(string line, out Codec codec, bool isEncoder)
+ {
+ var match = _decodersEncodersFormatRegex.Match(line);
+ if (!match.Success)
+ {
+ codec = null!;
+ return false;
+ }
+
+ var name = match.Groups[7].Value;
+ var type = match.Groups[1].Value switch
+ {
+ "V" => CodecType.Video,
+ "A" => CodecType.Audio,
+ "D" => CodecType.Data,
+ "S" => CodecType.Subtitle,
+ _ => CodecType.Unknown
+ };
+
+ if (type == CodecType.Unknown)
+ {
+ codec = null!;
+ return false;
+ }
+
+ codec = new Codec(name, type);
+
+ var featureLevel = isEncoder ? codec.EncoderFeatureLevel : codec.DecoderFeatureLevel;
+
+ codec.DecodingSupported = !isEncoder;
+ codec.EncodingSupported = isEncoder;
+ featureLevel.SupportsFrameLevelMultithreading = match.Groups[2].Value != "." ? FeatureStatus.Supported : FeatureStatus.NotSupported;
+ featureLevel.SupportsSliceLevelMultithreading = match.Groups[3].Value != "." ? FeatureStatus.Supported : FeatureStatus.NotSupported;
+ featureLevel.IsExperimental = match.Groups[4].Value != ".";
+ featureLevel.SupportsDrawHorizBand = match.Groups[5].Value != "." ? FeatureStatus.Supported : FeatureStatus.NotSupported;
+ featureLevel.SupportsDirectRendering = match.Groups[6].Value != "." ? FeatureStatus.Supported : FeatureStatus.NotSupported;
+ codec.Description = match.Groups[8].Value;
+
+ return true;
+ }
+ internal void Merge(Codec other)
+ {
+ if (Name != other.Name)
+ throw new FFMpegException(FFMpegExceptionType.Operation, "different codecs enable to merge");
+
+ Type |= other.Type;
+ DecodingSupported |= other.DecodingSupported;
+ EncodingSupported |= other.EncodingSupported;
+ IsIntraFrameOnly |= other.IsIntraFrameOnly;
+ IsLossy |= other.IsLossy;
+ IsLossless |= other.IsLossless;
+
+ EncoderFeatureLevel.Merge(other.EncoderFeatureLevel);
+ DecoderFeatureLevel.Merge(other.DecoderFeatureLevel);
+
+ if (Description != other.Description)
+ Description += "\r\n" + other.Description;
+ }
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Enums/ContainerFormat.cs b/FFMpegCore/FFMpeg/Enums/ContainerFormat.cs
new file mode 100644
index 00000000..2da1572a
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Enums/ContainerFormat.cs
@@ -0,0 +1,47 @@
+using System.Text.RegularExpressions;
+
+namespace FFMpegCore.Enums
+{
+ public class ContainerFormat
+ {
+ private static readonly Regex FormatRegex = new Regex(@"([D ])([E ])\s+([a-z0-9_]+)\s+(.+)");
+
+ public string Name { get; private set; }
+ public bool DemuxingSupported { get; private set; }
+ public bool MuxingSupported { get; private set; }
+ public string Description { get; private set; } = null!;
+
+ public string Extension
+ {
+ get
+ {
+ if (GlobalFFOptions.Current.ExtensionOverrides.ContainsKey(Name))
+ return GlobalFFOptions.Current.ExtensionOverrides[Name];
+ return "." + Name;
+ }
+ }
+
+ internal ContainerFormat(string name)
+ {
+ Name = name;
+ }
+
+ internal static bool TryParse(string line, out ContainerFormat fmt)
+ {
+ var match = FormatRegex.Match(line);
+ if (!match.Success)
+ {
+ fmt = null!;
+ return false;
+ }
+
+ fmt = new ContainerFormat(match.Groups[3].Value)
+ {
+ DemuxingSupported = match.Groups[1].Value != " ",
+ MuxingSupported = match.Groups[2].Value != " ",
+ Description = match.Groups[4].Value
+ };
+ return true;
+ }
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFMpeg/Enums/Enums.cs b/FFMpegCore/FFMpeg/Enums/Enums.cs
new file mode 100644
index 00000000..31a5f1ed
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Enums/Enums.cs
@@ -0,0 +1,54 @@
+namespace FFMpegCore.Enums
+{
+ public enum CodecType
+ {
+ Unknown = 0,
+ Video = 1 << 1,
+ Audio = 1 << 2,
+ Subtitle = 1 << 3,
+ Data = 1 << 4,
+ }
+
+ public static class VideoCodec
+ {
+ public static Codec LibX264 => FFMpeg.GetCodec("libx264");
+ public static Codec LibVpx => FFMpeg.GetCodec("libvpx");
+ public static Codec LibTheora => FFMpeg.GetCodec("libtheora");
+ public static Codec Png => FFMpeg.GetCodec("png");
+ public static Codec MpegTs => FFMpeg.GetCodec("mpegts");
+ }
+
+ public static class AudioCodec
+ {
+ public static Codec Aac => FFMpeg.GetCodec("aac");
+ public static Codec LibVorbis => FFMpeg.GetCodec("libvorbis");
+ public static Codec LibFdk_Aac => FFMpeg.GetCodec("libfdk_aac");
+ public static Codec Ac3 => FFMpeg.GetCodec("ac3");
+ public static Codec Eac3 => FFMpeg.GetCodec("eac3");
+ public static Codec LibMp3Lame => FFMpeg.GetCodec("libmp3lame");
+ }
+
+ public static class VideoType
+ {
+ public static ContainerFormat MpegTs => FFMpeg.GetContainerFormat("mpegts");
+ public static ContainerFormat Ts => FFMpeg.GetContainerFormat("mpegts");
+ public static ContainerFormat Mp4 => FFMpeg.GetContainerFormat("mp4");
+ public static ContainerFormat Mov => FFMpeg.GetContainerFormat("mov");
+ public static ContainerFormat Avi => FFMpeg.GetContainerFormat("avi");
+ public static ContainerFormat Ogv => FFMpeg.GetContainerFormat("ogv");
+ public static ContainerFormat WebM => FFMpeg.GetContainerFormat("webm");
+ }
+
+ public enum Filter
+ {
+ H264_Mp4ToAnnexB,
+ Aac_AdtstoAsc
+ }
+
+ public enum Channel
+ {
+ Audio,
+ Video,
+ Both
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFMpeg/Enums/FileExtension.cs b/FFMpegCore/FFMpeg/Enums/FileExtension.cs
new file mode 100644
index 00000000..d45faf6b
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Enums/FileExtension.cs
@@ -0,0 +1,26 @@
+using System;
+
+namespace FFMpegCore.Enums
+{
+ public static class FileExtension
+ {
+ public static string Extension(this Codec type)
+ {
+ return type.Name switch
+ {
+ "libx264" => Mp4,
+ "libxvpx" => WebM,
+ "libxtheora" => Ogv,
+ "mpegts" => Ts,
+ "png" => Png,
+ _ => throw new Exception("The extension for this video type is not defined.")
+ };
+ }
+ public static readonly string Mp4 = VideoType.Mp4.Extension;
+ public static readonly string Ts = VideoType.MpegTs.Extension;
+ public static readonly string Ogv = VideoType.Ogv.Extension;
+ public static readonly string WebM = VideoType.WebM.Extension;
+ public static readonly string Png = ".png";
+ public static readonly string Mp3 = ".mp3";
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Enums/HardwareAccelerationDevice.cs b/FFMpegCore/FFMpeg/Enums/HardwareAccelerationDevice.cs
new file mode 100644
index 00000000..1d92f53d
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Enums/HardwareAccelerationDevice.cs
@@ -0,0 +1,14 @@
+namespace FFMpegCore.Enums
+{
+ public enum HardwareAccelerationDevice
+ {
+ Auto,
+ D3D11VA,
+ DXVA2,
+ QSV,
+ CUVID,
+ VDPAU,
+ VAAPI,
+ LibMFX
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFMpeg/Enums/Mirroring.cs b/FFMpegCore/FFMpeg/Enums/Mirroring.cs
new file mode 100644
index 00000000..57681637
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Enums/Mirroring.cs
@@ -0,0 +1,8 @@
+namespace FFMpegCore.Enums
+{
+ public enum Mirroring
+ {
+ Vertical,
+ Horizontal
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Enums/PixelFormat.cs b/FFMpegCore/FFMpeg/Enums/PixelFormat.cs
new file mode 100644
index 00000000..9808e437
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Enums/PixelFormat.cs
@@ -0,0 +1,53 @@
+using System.Text.RegularExpressions;
+
+namespace FFMpegCore.Enums
+{
+ public class PixelFormat
+ {
+ private static readonly Regex _formatRegex = new Regex(@"([I\.])([O\.])([H\.])([P\.])([B\.])\s+(\S+)\s+([0-9]+)\s+([0-9]+)");
+
+ public bool InputConversionSupported { get; private set; }
+ public bool OutputConversionSupported { get; private set; }
+ public bool HardwareAccelerationSupported { get; private set; }
+ public bool IsPaletted { get; private set; }
+ public bool IsBitstream { get; private set; }
+ public string Name { get; private set; }
+ public int Components { get; private set; }
+ public int BitsPerPixel { get; private set; }
+
+ public bool CanConvertTo(PixelFormat other)
+ {
+ return InputConversionSupported && other.OutputConversionSupported;
+ }
+
+ internal PixelFormat(string name)
+ {
+ Name = name;
+ }
+
+ internal static bool TryParse(string line, out PixelFormat fmt)
+ {
+ var match = _formatRegex.Match(line);
+ if (!match.Success)
+ {
+ fmt = null!;
+ return false;
+ }
+
+ fmt = new PixelFormat(match.Groups[6].Value);
+ fmt.InputConversionSupported = match.Groups[1].Value != ".";
+ fmt.OutputConversionSupported = match.Groups[2].Value != ".";
+ fmt.HardwareAccelerationSupported = match.Groups[3].Value != ".";
+ fmt.IsPaletted = match.Groups[4].Value != ".";
+ fmt.IsBitstream = match.Groups[5].Value != ".";
+ if (!int.TryParse(match.Groups[7].Value, out var nbComponents))
+ return false;
+ fmt.Components = nbComponents;
+ if (!int.TryParse(match.Groups[8].Value, out var bpp))
+ return false;
+ fmt.BitsPerPixel = bpp;
+
+ return true;
+ }
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Enums/Speed.cs b/FFMpegCore/FFMpeg/Enums/Speed.cs
new file mode 100644
index 00000000..52272f0b
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Enums/Speed.cs
@@ -0,0 +1,15 @@
+namespace FFMpegCore.Enums
+{
+ public enum Speed
+ {
+ VerySlow,
+ Slower,
+ Slow,
+ Medium,
+ Fast,
+ Faster,
+ VeryFast,
+ SuperFast,
+ UltraFast
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFMpeg/Enums/Transposition.cs b/FFMpegCore/FFMpeg/Enums/Transposition.cs
new file mode 100644
index 00000000..bacfccc2
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Enums/Transposition.cs
@@ -0,0 +1,10 @@
+namespace FFMpegCore.Enums
+{
+ public enum Transposition
+ {
+ CounterClockwise90VerticalFlip = 0,
+ Clockwise90 = 1,
+ CounterClockwise90 = 2,
+ Clockwise90VerticalFlip = 3
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFMpeg/Enums/VideoSize.cs b/FFMpegCore/FFMpeg/Enums/VideoSize.cs
new file mode 100644
index 00000000..d774b95b
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Enums/VideoSize.cs
@@ -0,0 +1,11 @@
+namespace FFMpegCore.Enums
+{
+ public enum VideoSize
+ {
+ FullHd = 1080,
+ Hd = 720,
+ Ed = 480,
+ Ld = 360,
+ Original = -1
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFMpeg/Exceptions/FFMpegException.cs b/FFMpegCore/FFMpeg/Exceptions/FFMpegException.cs
new file mode 100644
index 00000000..485cf204
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Exceptions/FFMpegException.cs
@@ -0,0 +1,60 @@
+using System;
+
+namespace FFMpegCore.Exceptions
+{
+ public enum FFMpegExceptionType
+ {
+ Conversion,
+ File,
+ Operation,
+ Process
+ }
+
+ public class FFMpegException : Exception
+ {
+ public FFMpegException(FFMpegExceptionType type, string message, Exception? innerException = null, string ffMpegErrorOutput = "")
+ : base(message, innerException)
+ {
+ FFMpegErrorOutput = ffMpegErrorOutput;
+ Type = type;
+ }
+ public FFMpegException(FFMpegExceptionType type, string message, string ffMpegErrorOutput = "")
+ : base(message)
+ {
+ FFMpegErrorOutput = ffMpegErrorOutput;
+ Type = type;
+ }
+ public FFMpegException(FFMpegExceptionType type, string message)
+ : base(message)
+ {
+ FFMpegErrorOutput = string.Empty;
+ Type = type;
+ }
+
+ public FFMpegExceptionType Type { get; }
+ public string FFMpegErrorOutput { get; }
+ }
+ public class FFOptionsException : Exception
+ {
+ public FFOptionsException(string message, Exception? innerException = null)
+ : base(message, innerException)
+ {
+ }
+ }
+
+ public class FFMpegArgumentException : Exception
+ {
+ public FFMpegArgumentException(string? message = null, Exception? innerException = null)
+ : base(message, innerException)
+ {
+ }
+ }
+
+ public class FFMpegStreamFormatException : FFMpegException
+ {
+ public FFMpegStreamFormatException(FFMpegExceptionType type, string message, Exception? innerException = null)
+ : base(type, message, innerException)
+ {
+ }
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFMpeg/FFMpeg.cs b/FFMpegCore/FFMpeg/FFMpeg.cs
new file mode 100644
index 00000000..42c344b3
--- /dev/null
+++ b/FFMpegCore/FFMpeg/FFMpeg.cs
@@ -0,0 +1,578 @@
+using FFMpegCore.Enums;
+using FFMpegCore.Exceptions;
+using FFMpegCore.Helpers;
+using FFMpegCore.Pipes;
+using System;
+using System.Collections.Generic;
+using System.Drawing;
+using System.IO;
+using System.Linq;
+using System.Threading.Tasks;
+
+namespace FFMpegCore
+{
+ public static class FFMpeg
+ {
+ ///
+ /// Saves a 'png' thumbnail from the input video to drive
+ ///
+ /// Source video analysis
+ /// Output video file path
+ /// Seek position where the thumbnail should be taken.
+ /// Thumbnail size. If width or height equal 0, the other will be computed automatically.
+ /// Bitmap with the requested snapshot.
+ public static bool Snapshot(string input, string output, Size? size = null, TimeSpan? captureTime = null)
+ {
+ if (Path.GetExtension(output) != FileExtension.Png)
+ output = Path.GetFileNameWithoutExtension(output) + FileExtension.Png;
+
+ var source = FFProbe.Analyse(input);
+ var (arguments, outputOptions) = BuildSnapshotArguments(input, source, size, captureTime);
+
+ return arguments
+ .OutputToFile(output, true, outputOptions)
+ .ProcessSynchronously();
+ }
+ ///
+ /// Saves a 'png' thumbnail from the input video to drive
+ ///
+ /// Source video analysis
+ /// Output video file path
+ /// Seek position where the thumbnail should be taken.
+ /// Thumbnail size. If width or height equal 0, the other will be computed automatically.
+ /// Bitmap with the requested snapshot.
+ public static async Task SnapshotAsync(string input, string output, Size? size = null, TimeSpan? captureTime = null)
+ {
+ if (Path.GetExtension(output) != FileExtension.Png)
+ output = Path.GetFileNameWithoutExtension(output) + FileExtension.Png;
+
+ var source = await FFProbe.AnalyseAsync(input);
+ var (arguments, outputOptions) = BuildSnapshotArguments(input, source, size, captureTime);
+
+ return await arguments
+ .OutputToFile(output, true, outputOptions)
+ .ProcessAsynchronously();
+ }
+
+ ///
+ /// Saves a 'png' thumbnail to an in-memory bitmap
+ ///
+ /// Source video file.
+ /// Seek position where the thumbnail should be taken.
+ /// Thumbnail size. If width or height equal 0, the other will be computed automatically.
+ /// Bitmap with the requested snapshot.
+ public static Bitmap Snapshot(string input, Size? size = null, TimeSpan? captureTime = null)
+ {
+ var source = FFProbe.Analyse(input);
+ var (arguments, outputOptions) = BuildSnapshotArguments(input, source, size, captureTime);
+ using var ms = new MemoryStream();
+
+ arguments
+ .OutputToPipe(new StreamPipeSink(ms), options => outputOptions(options
+ .ForceFormat("rawvideo")))
+ .ProcessSynchronously();
+
+ ms.Position = 0;
+ using var bitmap = new Bitmap(ms);
+ return bitmap.Clone(new Rectangle(0, 0, bitmap.Width, bitmap.Height), bitmap.PixelFormat);
+ }
+ ///
+ /// Saves a 'png' thumbnail to an in-memory bitmap
+ ///
+ /// Source video file.
+ /// Seek position where the thumbnail should be taken.
+ /// Thumbnail size. If width or height equal 0, the other will be computed automatically.
+ /// Bitmap with the requested snapshot.
+ public static async Task SnapshotAsync(string input, Size? size = null, TimeSpan? captureTime = null)
+ {
+ var source = await FFProbe.AnalyseAsync(input);
+ var (arguments, outputOptions) = BuildSnapshotArguments(input, source, size, captureTime);
+ using var ms = new MemoryStream();
+
+ await arguments
+ .OutputToPipe(new StreamPipeSink(ms), options => outputOptions(options
+ .ForceFormat("rawvideo")))
+ .ProcessAsynchronously();
+
+ ms.Position = 0;
+ return new Bitmap(ms);
+ }
+
+ private static (FFMpegArguments, Action outputOptions) BuildSnapshotArguments(string input, IMediaAnalysis source, Size? size = null, TimeSpan? captureTime = null)
+ {
+ captureTime ??= TimeSpan.FromSeconds(source.Duration.TotalSeconds / 3);
+ size = PrepareSnapshotSize(source, size);
+
+ return (FFMpegArguments
+ .FromFileInput(input, false, options => options
+ .Seek(captureTime)),
+ options => options
+ .WithVideoCodec(VideoCodec.Png)
+ .WithFrameOutputCount(1)
+ .Resize(size));
+ }
+
+ private static Size? PrepareSnapshotSize(IMediaAnalysis source, Size? wantedSize)
+ {
+ if (wantedSize == null || (wantedSize.Value.Height <= 0 && wantedSize.Value.Width <= 0) || source.PrimaryVideoStream == null)
+ return null;
+
+ var currentSize = new Size(source.PrimaryVideoStream.Width, source.PrimaryVideoStream.Height);
+ if (source.PrimaryVideoStream.Rotation == 90 || source.PrimaryVideoStream.Rotation == 180)
+ currentSize = new Size(source.PrimaryVideoStream.Height, source.PrimaryVideoStream.Width);
+
+ if (wantedSize.Value.Width != currentSize.Width || wantedSize.Value.Height != currentSize.Height)
+ {
+ if (wantedSize.Value.Width <= 0 && wantedSize.Value.Height > 0)
+ {
+ var ratio = (double)wantedSize.Value.Height / currentSize.Height;
+ return new Size((int)(currentSize.Width * ratio), (int)(currentSize.Height * ratio));
+ }
+ if (wantedSize.Value.Height <= 0 && wantedSize.Value.Width > 0)
+ {
+ var ratio = (double)wantedSize.Value.Width / currentSize.Width;
+ return new Size((int)(currentSize.Width * ratio), (int)(currentSize.Height * ratio));
+ }
+ return wantedSize;
+ }
+
+ return null;
+ }
+
+ ///
+ /// Convert a video do a different format.
+ ///
+ /// Input video source.
+ /// Output information.
+ /// Target conversion video type.
+ /// Conversion target speed/quality (faster speed = lower quality).
+ /// Video size.
+ /// Conversion target audio quality.
+ /// Is encoding multithreaded.
+ /// Output video information.
+ public static bool Convert(
+ string input,
+ string output,
+ ContainerFormat format,
+ Speed speed = Speed.SuperFast,
+ VideoSize size = VideoSize.Original,
+ AudioQuality audioQuality = AudioQuality.Normal,
+ bool multithreaded = false)
+ {
+ FFMpegHelper.ExtensionExceptionCheck(output, format.Extension);
+ var source = FFProbe.Analyse(input);
+ FFMpegHelper.ConversionSizeExceptionCheck(source);
+
+ var scale = VideoSize.Original == size ? 1 : (double)source.PrimaryVideoStream!.Height / (int)size;
+ var outputSize = new Size((int)(source.PrimaryVideoStream!.Width / scale), (int)(source.PrimaryVideoStream.Height / scale));
+
+ if (outputSize.Width % 2 != 0)
+ outputSize.Width += 1;
+
+ return format.Name switch
+ {
+ "mp4" => FFMpegArguments
+ .FromFileInput(input)
+ .OutputToFile(output, true, options => options
+ .UsingMultithreading(multithreaded)
+ .WithVideoCodec(VideoCodec.LibX264)
+ .WithVideoBitrate(2400)
+ .WithVideoFilters(filterOptions => filterOptions
+ .Scale(outputSize))
+ .WithSpeedPreset(speed)
+ .WithAudioCodec(AudioCodec.Aac)
+ .WithAudioBitrate(audioQuality))
+ .ProcessSynchronously(),
+ "ogv" => FFMpegArguments
+ .FromFileInput(input)
+ .OutputToFile(output, true, options => options
+ .UsingMultithreading(multithreaded)
+ .WithVideoCodec(VideoCodec.LibTheora)
+ .WithVideoBitrate(2400)
+ .WithVideoFilters(filterOptions => filterOptions
+ .Scale(outputSize))
+ .WithSpeedPreset(speed)
+ .WithAudioCodec(AudioCodec.LibVorbis)
+ .WithAudioBitrate(audioQuality))
+ .ProcessSynchronously(),
+ "mpegts" => FFMpegArguments
+ .FromFileInput(input)
+ .OutputToFile(output, true, options => options
+ .CopyChannel()
+ .WithBitStreamFilter(Channel.Video, Filter.H264_Mp4ToAnnexB)
+ .ForceFormat(VideoType.Ts))
+ .ProcessSynchronously(),
+ "webm" => FFMpegArguments
+ .FromFileInput(input)
+ .OutputToFile(output, true, options => options
+ .UsingMultithreading(multithreaded)
+ .WithVideoCodec(VideoCodec.LibVpx)
+ .WithVideoBitrate(2400)
+ .WithVideoFilters(filterOptions => filterOptions
+ .Scale(outputSize))
+ .WithSpeedPreset(speed)
+ .WithAudioCodec(AudioCodec.LibVorbis)
+ .WithAudioBitrate(audioQuality))
+ .ProcessSynchronously(),
+ _ => throw new ArgumentOutOfRangeException(nameof(format))
+ };
+ }
+
+ ///
+ /// Adds a poster image to an audio file.
+ ///
+ /// Source image file.
+ /// Source audio file.
+ /// Output video file.
+ ///
+ public static bool PosterWithAudio(string image, string audio, string output)
+ {
+ FFMpegHelper.ExtensionExceptionCheck(output, FileExtension.Mp4);
+ FFMpegHelper.ConversionSizeExceptionCheck(Image.FromFile(image));
+
+ return FFMpegArguments
+ .FromFileInput(image)
+ .AddFileInput(audio)
+ .OutputToFile(output, true, options => options
+ .Loop(1)
+ .WithVideoCodec(VideoCodec.LibX264)
+ .WithConstantRateFactor(21)
+ .WithAudioBitrate(AudioQuality.Normal)
+ .UsingShortest())
+ .ProcessSynchronously();
+ }
+
+ ///
+ /// Joins a list of video files.
+ ///
+ /// Output video file.
+ /// List of vides that need to be joined together.
+ /// Output video information.
+ public static bool Join(string output, params string[] videos)
+ {
+ var temporaryVideoParts = videos.Select(videoPath =>
+ {
+ var video = FFProbe.Analyse(videoPath);
+ FFMpegHelper.ConversionSizeExceptionCheck(video);
+ var destinationPath = Path.Combine(GlobalFFOptions.Current.TemporaryFilesFolder, $"{Path.GetFileNameWithoutExtension(videoPath)}{FileExtension.Ts}");
+ Directory.CreateDirectory(GlobalFFOptions.Current.TemporaryFilesFolder);
+ Convert(videoPath, destinationPath, VideoType.Ts);
+ return destinationPath;
+ }).ToArray();
+
+ try
+ {
+ return FFMpegArguments
+ .FromConcatInput(temporaryVideoParts)
+ .OutputToFile(output, true, options => options
+ .CopyChannel()
+ .WithBitStreamFilter(Channel.Audio, Filter.Aac_AdtstoAsc))
+ .ProcessSynchronously();
+ }
+ finally
+ {
+ Cleanup(temporaryVideoParts);
+ }
+ }
+
+ ///
+ /// Converts an image sequence to a video.
+ ///
+ /// Output video file.
+ /// FPS
+ /// Image sequence collection
+ /// Output video information.
+ public static bool JoinImageSequence(string output, double frameRate = 30, params ImageInfo[] images)
+ {
+ var tempFolderName = Path.Combine(GlobalFFOptions.Current.TemporaryFilesFolder, Guid.NewGuid().ToString());
+ var temporaryImageFiles = images.Select((image, index) =>
+ {
+ FFMpegHelper.ConversionSizeExceptionCheck(Image.FromFile(image.FullName));
+ var destinationPath = Path.Combine(tempFolderName, $"{index.ToString().PadLeft(9, '0')}{image.Extension}");
+ Directory.CreateDirectory(tempFolderName);
+ File.Copy(image.FullName, destinationPath);
+ return destinationPath;
+ }).ToArray();
+
+ var firstImage = images.First();
+ try
+ {
+ return FFMpegArguments
+ .FromFileInput(Path.Combine(tempFolderName, "%09d.png"), false)
+ .OutputToFile(output, true, options => options
+ .Resize(firstImage.Width, firstImage.Height)
+ .WithFramerate(frameRate))
+ .ProcessSynchronously();
+ }
+ finally
+ {
+ Cleanup(temporaryImageFiles);
+ Directory.Delete(tempFolderName);
+ }
+ }
+
+ ///
+ /// Records M3U8 streams to the specified output.
+ ///
+ /// URI to pointing towards stream.
+ /// Output file
+ /// Success state.
+ public static bool SaveM3U8Stream(Uri uri, string output)
+ {
+ FFMpegHelper.ExtensionExceptionCheck(output, FileExtension.Mp4);
+
+ if (uri.Scheme != "http" && uri.Scheme != "https")
+ throw new ArgumentException($"Uri: {uri.AbsoluteUri}, does not point to a valid http(s) stream.");
+
+ return FFMpegArguments
+ .FromUrlInput(uri)
+ .OutputToFile(output)
+ .ProcessSynchronously();
+ }
+
+ ///
+ /// Strips a video file of audio.
+ ///
+ /// Input video file.
+ /// Output video file.
+ ///
+ public static bool Mute(string input, string output)
+ {
+ var source = FFProbe.Analyse(input);
+ FFMpegHelper.ConversionSizeExceptionCheck(source);
+ // FFMpegHelper.ExtensionExceptionCheck(output, source.Extension);
+
+ return FFMpegArguments
+ .FromFileInput(input)
+ .OutputToFile(output, true, options => options
+ .CopyChannel(Channel.Video)
+ .DisableChannel(Channel.Audio))
+ .ProcessSynchronously();
+ }
+
+ ///
+ /// Saves audio from a specific video file to disk.
+ ///
+ /// Source video file.
+ /// Output audio file.
+ /// Success state.
+ public static bool ExtractAudio(string input, string output)
+ {
+ FFMpegHelper.ExtensionExceptionCheck(output, FileExtension.Mp3);
+
+ return FFMpegArguments
+ .FromFileInput(input)
+ .OutputToFile(output, true, options => options
+ .DisableChannel(Channel.Video))
+ .ProcessSynchronously();
+ }
+
+ ///
+ /// Adds audio to a video file.
+ ///
+ /// Source video file.
+ /// Source audio file.
+ /// Output video file.
+ /// Indicates if the encoding should stop at the shortest input file.
+ /// Success state
+ public static bool ReplaceAudio(string input, string inputAudio, string output, bool stopAtShortest = false)
+ {
+ var source = FFProbe.Analyse(input);
+ FFMpegHelper.ConversionSizeExceptionCheck(source);
+ // FFMpegHelper.ExtensionExceptionCheck(output, source.Format.);
+
+ return FFMpegArguments
+ .FromFileInput(input)
+ .AddFileInput(inputAudio)
+ .OutputToFile(output, true, options => options
+ .CopyChannel()
+ .WithAudioCodec(AudioCodec.Aac)
+ .WithAudioBitrate(AudioQuality.Good)
+ .UsingShortest(stopAtShortest))
+ .ProcessSynchronously();
+ }
+
+ #region PixelFormats
+ internal static IReadOnlyList GetPixelFormatsInternal()
+ {
+ FFMpegHelper.RootExceptionCheck();
+
+ var list = new List();
+ using var instance = new Instances.Instance(GlobalFFOptions.GetFFMpegBinaryPath(), "-pix_fmts");
+ instance.DataReceived += (e, args) =>
+ {
+ if (PixelFormat.TryParse(args.Data, out var format))
+ list.Add(format);
+ };
+
+ var exitCode = instance.BlockUntilFinished();
+ if (exitCode != 0) throw new FFMpegException(FFMpegExceptionType.Process, string.Join("\r\n", instance.OutputData));
+
+ return list.AsReadOnly();
+ }
+
+ public static IReadOnlyList GetPixelFormats()
+ {
+ if (!GlobalFFOptions.Current.UseCache)
+ return GetPixelFormatsInternal();
+ return FFMpegCache.PixelFormats.Values.ToList().AsReadOnly();
+ }
+
+ public static bool TryGetPixelFormat(string name, out PixelFormat fmt)
+ {
+ if (!GlobalFFOptions.Current.UseCache)
+ {
+ fmt = GetPixelFormatsInternal().FirstOrDefault(x => x.Name == name.ToLowerInvariant().Trim());
+ return fmt != null;
+ }
+ else
+ return FFMpegCache.PixelFormats.TryGetValue(name, out fmt);
+ }
+
+ public static PixelFormat GetPixelFormat(string name)
+ {
+ if (TryGetPixelFormat(name, out var fmt))
+ return fmt;
+ throw new FFMpegException(FFMpegExceptionType.Operation, $"Pixel format \"{name}\" not supported");
+ }
+ #endregion
+
+ #region Codecs
+
+ private static void ParsePartOfCodecs(Dictionary codecs, string arguments, Func parser)
+ {
+ FFMpegHelper.RootExceptionCheck();
+
+ using var instance = new Instances.Instance(GlobalFFOptions.GetFFMpegBinaryPath(), arguments);
+ instance.DataReceived += (e, args) =>
+ {
+ var codec = parser(args.Data);
+ if(codec != null)
+ if (codecs.TryGetValue(codec.Name, out var parentCodec))
+ parentCodec.Merge(codec);
+ else
+ codecs.Add(codec.Name, codec);
+ };
+
+ var exitCode = instance.BlockUntilFinished();
+ if (exitCode != 0) throw new FFMpegException(FFMpegExceptionType.Process, string.Join("\r\n", instance.OutputData));
+ }
+
+ internal static Dictionary GetCodecsInternal()
+ {
+ var res = new Dictionary();
+ ParsePartOfCodecs(res, "-codecs", (s) =>
+ {
+ if (Codec.TryParseFromCodecs(s, out var codec))
+ return codec;
+ return null;
+ });
+ ParsePartOfCodecs(res, "-encoders", (s) =>
+ {
+ if (Codec.TryParseFromEncodersDecoders(s, out var codec, true))
+ return codec;
+ return null;
+ });
+ ParsePartOfCodecs(res, "-decoders", (s) =>
+ {
+ if (Codec.TryParseFromEncodersDecoders(s, out var codec, false))
+ return codec;
+ return null;
+ });
+
+ return res;
+ }
+
+ public static IReadOnlyList GetCodecs()
+ {
+ if (!GlobalFFOptions.Current.UseCache)
+ return GetCodecsInternal().Values.ToList().AsReadOnly();
+ return FFMpegCache.Codecs.Values.ToList().AsReadOnly();
+ }
+
+ public static IReadOnlyList GetCodecs(CodecType type)
+ {
+ if (!GlobalFFOptions.Current.UseCache)
+ return GetCodecsInternal().Values.Where(x => x.Type == type).ToList().AsReadOnly();
+ return FFMpegCache.Codecs.Values.Where(x=>x.Type == type).ToList().AsReadOnly();
+ }
+
+ public static IReadOnlyList GetVideoCodecs() => GetCodecs(CodecType.Video);
+ public static IReadOnlyList GetAudioCodecs() => GetCodecs(CodecType.Audio);
+ public static IReadOnlyList GetSubtitleCodecs() => GetCodecs(CodecType.Subtitle);
+ public static IReadOnlyList GetDataCodecs() => GetCodecs(CodecType.Data);
+
+ public static bool TryGetCodec(string name, out Codec codec)
+ {
+ if (!GlobalFFOptions.Current.UseCache)
+ {
+ codec = GetCodecsInternal().Values.FirstOrDefault(x => x.Name == name.ToLowerInvariant().Trim());
+ return codec != null;
+ }
+ else
+ return FFMpegCache.Codecs.TryGetValue(name, out codec);
+ }
+
+ public static Codec GetCodec(string name)
+ {
+ if (TryGetCodec(name, out var codec) && codec != null)
+ return codec;
+ throw new FFMpegException(FFMpegExceptionType.Operation, $"Codec \"{name}\" not supported");
+ }
+ #endregion
+
+ #region ContainerFormats
+ internal static IReadOnlyList GetContainersFormatsInternal()
+ {
+ FFMpegHelper.RootExceptionCheck();
+
+ var list = new List();
+ using var instance = new Instances.Instance(GlobalFFOptions.GetFFMpegBinaryPath(), "-formats");
+ instance.DataReceived += (e, args) =>
+ {
+ if (ContainerFormat.TryParse(args.Data, out var fmt))
+ list.Add(fmt);
+ };
+
+ var exitCode = instance.BlockUntilFinished();
+ if (exitCode != 0) throw new FFMpegException(FFMpegExceptionType.Process, string.Join("\r\n", instance.OutputData));
+
+ return list.AsReadOnly();
+ }
+
+ public static IReadOnlyList GetContainerFormats()
+ {
+ if (!GlobalFFOptions.Current.UseCache)
+ return GetContainersFormatsInternal();
+ return FFMpegCache.ContainerFormats.Values.ToList().AsReadOnly();
+ }
+
+ public static bool TryGetContainerFormat(string name, out ContainerFormat fmt)
+ {
+ if (!GlobalFFOptions.Current.UseCache)
+ {
+ fmt = GetContainersFormatsInternal().FirstOrDefault(x => x.Name == name.ToLowerInvariant().Trim());
+ return fmt != null;
+ }
+ else
+ return FFMpegCache.ContainerFormats.TryGetValue(name, out fmt);
+ }
+
+ public static ContainerFormat GetContainerFormat(string name)
+ {
+ if (TryGetContainerFormat(name, out var fmt))
+ return fmt;
+ throw new FFMpegException(FFMpegExceptionType.Operation, $"Container format \"{name}\" not supported");
+ }
+ #endregion
+
+ private static void Cleanup(IEnumerable pathList)
+ {
+ foreach (var path in pathList)
+ {
+ if (File.Exists(path))
+ File.Delete(path);
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFMpeg/FFMpegArgumentOptions.cs b/FFMpegCore/FFMpeg/FFMpegArgumentOptions.cs
new file mode 100644
index 00000000..be342c40
--- /dev/null
+++ b/FFMpegCore/FFMpeg/FFMpegArgumentOptions.cs
@@ -0,0 +1,67 @@
+using System;
+using System.Drawing;
+using FFMpegCore.Arguments;
+using FFMpegCore.Enums;
+
+namespace FFMpegCore
+{
+ public class FFMpegArgumentOptions : FFMpegArgumentsBase
+ {
+ internal FFMpegArgumentOptions() { }
+
+ public FFMpegArgumentOptions WithAudioCodec(Codec audioCodec) => WithArgument(new AudioCodecArgument(audioCodec));
+ public FFMpegArgumentOptions WithAudioCodec(string audioCodec) => WithArgument(new AudioCodecArgument(audioCodec));
+ public FFMpegArgumentOptions WithAudioBitrate(AudioQuality audioQuality) => WithArgument(new AudioBitrateArgument(audioQuality));
+ public FFMpegArgumentOptions WithAudioBitrate(int bitrate) => WithArgument(new AudioBitrateArgument(bitrate));
+ public FFMpegArgumentOptions WithAudioSamplingRate(int samplingRate = 48000) => WithArgument(new AudioSamplingRateArgument(samplingRate));
+ public FFMpegArgumentOptions WithVariableBitrate(int vbr) => WithArgument(new VariableBitRateArgument(vbr));
+ public FFMpegArgumentOptions Resize(int width, int height) => WithArgument(new SizeArgument(width, height));
+ public FFMpegArgumentOptions Resize(Size? size) => WithArgument(new SizeArgument(size));
+
+
+
+ public FFMpegArgumentOptions WithBitStreamFilter(Channel channel, Filter filter) => WithArgument(new BitStreamFilterArgument(channel, filter));
+ public FFMpegArgumentOptions WithConstantRateFactor(int crf) => WithArgument(new ConstantRateFactorArgument(crf));
+ public FFMpegArgumentOptions CopyChannel(Channel channel = Channel.Both) => WithArgument(new CopyArgument(channel));
+ public FFMpegArgumentOptions DisableChannel(Channel channel) => WithArgument(new DisableChannelArgument(channel));
+ public FFMpegArgumentOptions WithDuration(TimeSpan? duration) => WithArgument(new DurationArgument(duration));
+ public FFMpegArgumentOptions WithFastStart() => WithArgument(new FaststartArgument());
+ public FFMpegArgumentOptions WithFrameOutputCount(int frames) => WithArgument(new FrameOutputCountArgument(frames));
+ public FFMpegArgumentOptions WithHardwareAcceleration(HardwareAccelerationDevice hardwareAccelerationDevice = HardwareAccelerationDevice.Auto) => WithArgument(new HardwareAccelerationArgument(hardwareAccelerationDevice));
+
+ public FFMpegArgumentOptions UsingShortest(bool shortest = true) => WithArgument(new ShortestArgument(shortest));
+ public FFMpegArgumentOptions UsingMultithreading(bool multithread) => WithArgument(new ThreadsArgument(multithread));
+ public FFMpegArgumentOptions UsingThreads(int threads) => WithArgument(new ThreadsArgument(threads));
+
+ public FFMpegArgumentOptions WithVideoCodec(Codec videoCodec) => WithArgument(new VideoCodecArgument(videoCodec));
+ public FFMpegArgumentOptions WithVideoCodec(string videoCodec) => WithArgument(new VideoCodecArgument(videoCodec));
+ public FFMpegArgumentOptions WithVideoBitrate(int bitrate) => WithArgument(new VideoBitrateArgument(bitrate));
+ public FFMpegArgumentOptions WithVideoFilters(Action videoFilterOptions)
+ {
+ var videoFilterOptionsObj = new VideoFilterOptions();
+ videoFilterOptions(videoFilterOptionsObj);
+ return WithArgument(new VideoFiltersArgument(videoFilterOptionsObj));
+ }
+
+ public FFMpegArgumentOptions WithFramerate(double framerate) => WithArgument(new FrameRateArgument(framerate));
+ public FFMpegArgumentOptions WithoutMetadata() => WithArgument(new RemoveMetadataArgument());
+ public FFMpegArgumentOptions WithSpeedPreset(Speed speed) => WithArgument(new SpeedPresetArgument(speed));
+ public FFMpegArgumentOptions WithStartNumber(int startNumber) => WithArgument(new StartNumberArgument(startNumber));
+ public FFMpegArgumentOptions WithCustomArgument(string argument) => WithArgument(new CustomArgument(argument));
+
+ public FFMpegArgumentOptions Seek(TimeSpan? seekTo) => WithArgument(new SeekArgument(seekTo));
+ public FFMpegArgumentOptions Loop(int times) => WithArgument(new LoopArgument(times));
+ public FFMpegArgumentOptions OverwriteExisting() => WithArgument(new OverwriteArgument());
+
+ public FFMpegArgumentOptions ForceFormat(ContainerFormat format) => WithArgument(new ForceFormatArgument(format));
+ public FFMpegArgumentOptions ForceFormat(string format) => WithArgument(new ForceFormatArgument(format));
+ public FFMpegArgumentOptions ForcePixelFormat(string pixelFormat) => WithArgument(new ForcePixelFormat(pixelFormat));
+ public FFMpegArgumentOptions ForcePixelFormat(PixelFormat pixelFormat) => WithArgument(new ForcePixelFormat(pixelFormat));
+
+ public FFMpegArgumentOptions WithArgument(IArgument argument)
+ {
+ Arguments.Add(argument);
+ return this;
+ }
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFMpeg/FFMpegArgumentProcessor.cs b/FFMpegCore/FFMpeg/FFMpegArgumentProcessor.cs
new file mode 100644
index 00000000..67607afa
--- /dev/null
+++ b/FFMpegCore/FFMpeg/FFMpegArgumentProcessor.cs
@@ -0,0 +1,189 @@
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Globalization;
+using System.Text.RegularExpressions;
+using System.Threading;
+using System.Threading.Tasks;
+using FFMpegCore.Exceptions;
+using FFMpegCore.Helpers;
+using Instances;
+
+namespace FFMpegCore
+{
+ public class FFMpegArgumentProcessor
+ {
+ private static readonly Regex ProgressRegex = new Regex(@"time=(\d\d:\d\d:\d\d.\d\d?)", RegexOptions.Compiled);
+ private readonly FFMpegArguments _ffMpegArguments;
+ private Action? _onPercentageProgress;
+ private Action? _onTimeProgress;
+ private Action? _onOutput;
+ private TimeSpan? _totalTimespan;
+
+ internal FFMpegArgumentProcessor(FFMpegArguments ffMpegArguments)
+ {
+ _ffMpegArguments = ffMpegArguments;
+ }
+
+ public string Arguments => _ffMpegArguments.Text;
+
+ private event EventHandler CancelEvent = null!;
+
+ public FFMpegArgumentProcessor NotifyOnProgress(Action onPercentageProgress, TimeSpan totalTimeSpan)
+ {
+ _totalTimespan = totalTimeSpan;
+ _onPercentageProgress = onPercentageProgress;
+ return this;
+ }
+ public FFMpegArgumentProcessor NotifyOnProgress(Action onTimeProgress)
+ {
+ _onTimeProgress = onTimeProgress;
+ return this;
+ }
+ public FFMpegArgumentProcessor NotifyOnOutput(Action onOutput)
+ {
+ _onOutput = onOutput;
+ return this;
+ }
+ public FFMpegArgumentProcessor CancellableThrough(out Action cancel, int timeout = 0)
+ {
+ cancel = () => CancelEvent?.Invoke(this, timeout);
+ return this;
+ }
+ public bool ProcessSynchronously(bool throwOnError = true, FFOptions? ffMpegOptions = null)
+ {
+ using var instance = PrepareInstance(ffMpegOptions ?? GlobalFFOptions.Current, out var cancellationTokenSource);
+ var errorCode = -1;
+
+ void OnCancelEvent(object sender, int timeout)
+ {
+ instance.SendInput("q");
+
+ if (!cancellationTokenSource.Token.WaitHandle.WaitOne(timeout, true))
+ {
+ cancellationTokenSource.Cancel();
+ instance.Started = false;
+ }
+ }
+ CancelEvent += OnCancelEvent;
+ instance.Exited += delegate { cancellationTokenSource.Cancel(); };
+
+ try
+ {
+ _ffMpegArguments.Pre();
+ Task.WaitAll(instance.FinishedRunning().ContinueWith(t =>
+ {
+ errorCode = t.Result;
+ cancellationTokenSource.Cancel();
+ _ffMpegArguments.Post();
+ }), _ffMpegArguments.During(cancellationTokenSource.Token));
+ }
+ catch (Exception e)
+ {
+ if (!HandleException(throwOnError, e, instance.ErrorData)) return false;
+ }
+ finally
+ {
+ CancelEvent -= OnCancelEvent;
+ }
+
+ return HandleCompletion(throwOnError, errorCode, instance.ErrorData);
+ }
+
+ public async Task ProcessAsynchronously(bool throwOnError = true, FFOptions? ffMpegOptions = null)
+ {
+ using var instance = PrepareInstance(ffMpegOptions ?? GlobalFFOptions.Current, out var cancellationTokenSource);
+ var errorCode = -1;
+
+ void OnCancelEvent(object sender, int timeout)
+ {
+ instance.SendInput("q");
+
+ if (!cancellationTokenSource.Token.WaitHandle.WaitOne(timeout, true))
+ {
+ cancellationTokenSource.Cancel();
+ instance.Started = false;
+ }
+ }
+ CancelEvent += OnCancelEvent;
+
+ try
+ {
+ _ffMpegArguments.Pre();
+ await Task.WhenAll(instance.FinishedRunning().ContinueWith(t =>
+ {
+ errorCode = t.Result;
+ cancellationTokenSource.Cancel();
+ _ffMpegArguments.Post();
+ }), _ffMpegArguments.During(cancellationTokenSource.Token)).ConfigureAwait(false);
+ }
+ catch (Exception e)
+ {
+ if (!HandleException(throwOnError, e, instance.ErrorData)) return false;
+ }
+ finally
+ {
+ CancelEvent -= OnCancelEvent;
+ }
+
+ return HandleCompletion(throwOnError, errorCode, instance.ErrorData);
+ }
+
+ private bool HandleCompletion(bool throwOnError, int exitCode, IReadOnlyList errorData)
+ {
+ if (throwOnError && exitCode != 0)
+ throw new FFMpegException(FFMpegExceptionType.Process, $"ffmpeg exited with non-zero exit-code ({exitCode} - {string.Join("\n", errorData)})", null, string.Join("\n", errorData));
+
+ _onPercentageProgress?.Invoke(100.0);
+ if (_totalTimespan.HasValue) _onTimeProgress?.Invoke(_totalTimespan.Value);
+
+ return exitCode == 0;
+ }
+
+ private Instance PrepareInstance(FFOptions ffMpegOptions,
+ out CancellationTokenSource cancellationTokenSource)
+ {
+ FFMpegHelper.RootExceptionCheck();
+ FFMpegHelper.VerifyFFMpegExists(ffMpegOptions);
+ var startInfo = new ProcessStartInfo
+ {
+ FileName = GlobalFFOptions.GetFFMpegBinaryPath(ffMpegOptions),
+ Arguments = _ffMpegArguments.Text,
+ StandardOutputEncoding = ffMpegOptions.Encoding,
+ StandardErrorEncoding = ffMpegOptions.Encoding,
+ };
+ var instance = new Instance(startInfo);
+ cancellationTokenSource = new CancellationTokenSource();
+
+ if (_onOutput != null || _onTimeProgress != null || (_onPercentageProgress != null && _totalTimespan != null))
+ instance.DataReceived += OutputData;
+
+ return instance;
+ }
+
+
+ private static bool HandleException(bool throwOnError, Exception e, IReadOnlyList errorData)
+ {
+ if (!throwOnError)
+ return false;
+
+ throw new FFMpegException(FFMpegExceptionType.Process, "Exception thrown during processing", e, string.Join("\n", errorData));
+ }
+
+ private void OutputData(object sender, (DataType Type, string Data) msg)
+ {
+ Debug.WriteLine(msg.Data);
+ _onOutput?.Invoke(msg.Data, msg.Type);
+
+ var match = ProgressRegex.Match(msg.Data);
+ if (!match.Success) return;
+
+ var processed = TimeSpan.Parse(match.Groups[1].Value, CultureInfo.InvariantCulture);
+ _onTimeProgress?.Invoke(processed);
+
+ if (_onPercentageProgress == null || _totalTimespan == null) return;
+ var percentage = Math.Round(processed.TotalSeconds / _totalTimespan.Value.TotalSeconds * 100, 2);
+ _onPercentageProgress(percentage);
+ }
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFMpeg/FFMpegArguments.cs b/FFMpegCore/FFMpeg/FFMpegArguments.cs
new file mode 100644
index 00000000..847e68c7
--- /dev/null
+++ b/FFMpegCore/FFMpeg/FFMpegArguments.cs
@@ -0,0 +1,81 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Threading;
+using System.Threading.Tasks;
+using FFMpegCore.Arguments;
+using FFMpegCore.Pipes;
+
+namespace FFMpegCore
+{
+ public sealed class FFMpegArguments : FFMpegArgumentsBase
+ {
+ private readonly FFMpegGlobalArguments _globalArguments = new FFMpegGlobalArguments();
+
+ private FFMpegArguments() { }
+
+ public string Text => string.Join(" ", _globalArguments.Arguments.Concat(Arguments).Select(arg => arg.Text));
+
+ public static FFMpegArguments FromConcatInput(IEnumerable filePaths, Action? addArguments = null) => new FFMpegArguments().WithInput(new ConcatArgument(filePaths), addArguments);
+ public static FFMpegArguments FromDemuxConcatInput(IEnumerable filePaths, Action? addArguments = null) => new FFMpegArguments().WithInput(new DemuxConcatArgument(filePaths), addArguments);
+ public static FFMpegArguments FromFileInput(string filePath, bool verifyExists = true, Action? addArguments = null) => new FFMpegArguments().WithInput(new InputArgument(verifyExists, filePath), addArguments);
+ public static FFMpegArguments FromFileInput(FileInfo fileInfo, Action? addArguments = null) => new FFMpegArguments().WithInput(new InputArgument(fileInfo.FullName, false), addArguments);
+ public static FFMpegArguments FromUrlInput(Uri uri, Action? addArguments = null) => new FFMpegArguments().WithInput(new InputArgument(uri.AbsoluteUri, false), addArguments);
+ public static FFMpegArguments FromDeviceInput(string device, Action? addArguments = null) => new FFMpegArguments().WithInput(new InputDeviceArgument(device), addArguments);
+ public static FFMpegArguments FromPipeInput(IPipeSource sourcePipe, Action? addArguments = null) => new FFMpegArguments().WithInput(new InputPipeArgument(sourcePipe), addArguments);
+
+
+ public FFMpegArguments WithGlobalOptions(Action configureOptions)
+ {
+ configureOptions(_globalArguments);
+ return this;
+ }
+
+ public FFMpegArguments AddConcatInput(IEnumerable filePaths, Action? addArguments = null) => WithInput(new ConcatArgument(filePaths), addArguments);
+ public FFMpegArguments AddDemuxConcatInput(IEnumerable filePaths, Action? addArguments = null) => WithInput(new DemuxConcatArgument(filePaths), addArguments);
+ public FFMpegArguments AddFileInput(string filePath, bool verifyExists = true, Action? addArguments = null) => WithInput(new InputArgument(verifyExists, filePath), addArguments);
+ public FFMpegArguments AddFileInput(FileInfo fileInfo, Action? addArguments = null) => WithInput(new InputArgument(fileInfo.FullName, false), addArguments);
+ public FFMpegArguments AddUrlInput(Uri uri, Action? addArguments = null) => WithInput(new InputArgument(uri.AbsoluteUri, false), addArguments);
+ public FFMpegArguments AddPipeInput(IPipeSource sourcePipe, Action? addArguments = null) => WithInput(new InputPipeArgument(sourcePipe), addArguments);
+
+ private FFMpegArguments WithInput(IInputArgument inputArgument, Action? addArguments)
+ {
+ var arguments = new FFMpegArgumentOptions();
+ addArguments?.Invoke(arguments);
+ Arguments.AddRange(arguments.Arguments);
+ Arguments.Add(inputArgument);
+ return this;
+ }
+
+ public FFMpegArgumentProcessor OutputToFile(string file, bool overwrite = true, Action? addArguments = null) => ToProcessor(new OutputArgument(file, overwrite), addArguments);
+ public FFMpegArgumentProcessor OutputToUrl(string uri, Action? addArguments = null) => ToProcessor(new OutputUrlArgument(uri), addArguments);
+ public FFMpegArgumentProcessor OutputToUrl(Uri uri, Action? addArguments = null) => ToProcessor(new OutputUrlArgument(uri.ToString()), addArguments);
+ public FFMpegArgumentProcessor OutputToPipe(IPipeSink reader, Action? addArguments = null) => ToProcessor(new OutputPipeArgument(reader), addArguments);
+
+ private FFMpegArgumentProcessor ToProcessor(IOutputArgument argument, Action? addArguments)
+ {
+ var args = new FFMpegArgumentOptions();
+ addArguments?.Invoke(args);
+ Arguments.AddRange(args.Arguments);
+ Arguments.Add(argument);
+ return new FFMpegArgumentProcessor(this);
+ }
+
+ internal void Pre()
+ {
+ foreach (var argument in Arguments.OfType())
+ argument.Pre();
+ }
+ internal async Task During(CancellationToken cancellationToken = default)
+ {
+ var inputOutputArguments = Arguments.OfType();
+ await Task.WhenAll(inputOutputArguments.Select(io => io.During(cancellationToken))).ConfigureAwait(false);
+ }
+ internal void Post()
+ {
+ foreach (var argument in Arguments.OfType())
+ argument.Post();
+ }
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFMpeg/FFMpegArgumentsBase.cs b/FFMpegCore/FFMpeg/FFMpegArgumentsBase.cs
new file mode 100644
index 00000000..fc51ab1f
--- /dev/null
+++ b/FFMpegCore/FFMpeg/FFMpegArgumentsBase.cs
@@ -0,0 +1,10 @@
+using System.Collections.Generic;
+using FFMpegCore.Arguments;
+
+namespace FFMpegCore
+{
+ public abstract class FFMpegArgumentsBase
+ {
+ internal readonly List Arguments = new List();
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFMpeg/FFMpegCache.cs b/FFMpegCore/FFMpeg/FFMpegCache.cs
new file mode 100644
index 00000000..08472021
--- /dev/null
+++ b/FFMpegCore/FFMpeg/FFMpegCache.cs
@@ -0,0 +1,54 @@
+using FFMpegCore.Enums;
+using System.Collections.Generic;
+using System.Linq;
+
+namespace FFMpegCore
+{
+ static class FFMpegCache
+ {
+ private static readonly object _syncObject = new object();
+ private static Dictionary? _pixelFormats;
+ private static Dictionary? _codecs;
+ private static Dictionary? _containers;
+
+ public static IReadOnlyDictionary PixelFormats
+ {
+ get
+ {
+ if (_pixelFormats == null) //First check not thread safe
+ lock (_syncObject)
+ if (_pixelFormats == null)//Second check thread safe
+ _pixelFormats = FFMpeg.GetPixelFormatsInternal().ToDictionary(x => x.Name);
+
+ return _pixelFormats;
+ }
+
+ }
+ public static IReadOnlyDictionary Codecs
+ {
+ get
+ {
+ if (_codecs == null) //First check not thread safe
+ lock (_syncObject)
+ if (_codecs == null)//Second check thread safe
+ _codecs = FFMpeg.GetCodecsInternal();
+
+ return _codecs;
+ }
+
+ }
+ public static IReadOnlyDictionary ContainerFormats
+ {
+ get
+ {
+ if (_containers == null) //First check not thread safe
+ lock (_syncObject)
+ if (_containers == null)//Second check thread safe
+ _containers = FFMpeg.GetContainersFormatsInternal().ToDictionary(x => x.Name);
+
+ return _containers;
+ }
+
+ }
+ }
+}
diff --git a/FFMpegCore/FFMpeg/FFMpegGlobalArguments.cs b/FFMpegCore/FFMpeg/FFMpegGlobalArguments.cs
new file mode 100644
index 00000000..e7d6e249
--- /dev/null
+++ b/FFMpegCore/FFMpeg/FFMpegGlobalArguments.cs
@@ -0,0 +1,18 @@
+using FFMpegCore.Arguments;
+
+namespace FFMpegCore
+{
+ public sealed class FFMpegGlobalArguments : FFMpegArgumentsBase
+ {
+ internal FFMpegGlobalArguments() { }
+
+ public FFMpegGlobalArguments WithVerbosityLevel(VerbosityLevel verbosityLevel = VerbosityLevel.Error) => WithOption(new VerbosityLevelArgument(verbosityLevel));
+
+ private FFMpegGlobalArguments WithOption(IArgument argument)
+ {
+ Arguments.Add(argument);
+ return this;
+ }
+
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFMpeg/Pipes/IPipeSink.cs b/FFMpegCore/FFMpeg/Pipes/IPipeSink.cs
new file mode 100644
index 00000000..e5f2bf46
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Pipes/IPipeSink.cs
@@ -0,0 +1,12 @@
+using System.IO;
+using System.Threading;
+using System.Threading.Tasks;
+
+namespace FFMpegCore.Pipes
+{
+ public interface IPipeSink
+ {
+ Task ReadAsync(Stream inputStream, CancellationToken cancellationToken);
+ string GetFormat();
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Pipes/IPipeSource.cs b/FFMpegCore/FFMpeg/Pipes/IPipeSource.cs
new file mode 100644
index 00000000..c2504216
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Pipes/IPipeSource.cs
@@ -0,0 +1,15 @@
+using System.IO;
+using System.Threading;
+using System.Threading.Tasks;
+
+namespace FFMpegCore.Pipes
+{
+ ///
+ /// Interface for ffmpeg pipe source data IO
+ ///
+ public interface IPipeSource
+ {
+ string GetStreamArguments();
+ Task WriteAsync(Stream outputStream, CancellationToken cancellationToken);
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Pipes/IVideoFrame.cs b/FFMpegCore/FFMpeg/Pipes/IVideoFrame.cs
new file mode 100644
index 00000000..dd583d90
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Pipes/IVideoFrame.cs
@@ -0,0 +1,19 @@
+using System.IO;
+using System.Threading;
+using System.Threading.Tasks;
+
+namespace FFMpegCore.Pipes
+{
+ ///
+ /// Interface for Video frame
+ ///
+ public interface IVideoFrame
+ {
+ int Width { get; }
+ int Height { get; }
+ string Format { get; }
+
+ void Serialize(Stream pipe);
+ Task SerializeAsync(Stream pipe, CancellationToken token);
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Pipes/PipeHelpers.cs b/FFMpegCore/FFMpeg/Pipes/PipeHelpers.cs
new file mode 100644
index 00000000..c680c3e1
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Pipes/PipeHelpers.cs
@@ -0,0 +1,18 @@
+using System;
+using System.Runtime.InteropServices;
+
+namespace FFMpegCore.Pipes
+{
+ static class PipeHelpers
+ {
+ public static string GetUnqiuePipeName() => $"FFMpegCore_{Guid.NewGuid()}";
+
+ public static string GetPipePath(string pipeName)
+ {
+ if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
+ return $@"\\.\pipe\{pipeName}";
+ else
+ return $"unix:/tmp/CoreFxPipe_{pipeName}";
+ }
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Pipes/RawVideoPipeSource.cs b/FFMpegCore/FFMpeg/Pipes/RawVideoPipeSource.cs
new file mode 100644
index 00000000..0e3ab61a
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Pipes/RawVideoPipeSource.cs
@@ -0,0 +1,73 @@
+using System;
+using System.Collections.Generic;
+using System.Globalization;
+using System.IO;
+using System.Threading;
+using System.Threading.Tasks;
+using FFMpegCore.Exceptions;
+
+namespace FFMpegCore.Pipes
+{
+ ///
+ /// Implementation of for a raw video stream that is gathered from
+ ///
+ public class RawVideoPipeSource : IPipeSource
+ {
+ public string StreamFormat { get; private set; } = null!;
+ public int Width { get; private set; }
+ public int Height { get; private set; }
+ public double FrameRate { get; set; } = 25;
+ private bool _formatInitialized;
+ private readonly IEnumerator _framesEnumerator;
+
+ public RawVideoPipeSource(IEnumerator framesEnumerator)
+ {
+ _framesEnumerator = framesEnumerator;
+ }
+
+ public RawVideoPipeSource(IEnumerable framesEnumerator) : this(framesEnumerator.GetEnumerator()) { }
+
+ public string GetStreamArguments()
+ {
+ if (!_formatInitialized)
+ {
+ //see input format references https://lists.ffmpeg.org/pipermail/ffmpeg-user/2012-July/007742.html
+ if (_framesEnumerator.Current == null)
+ {
+ if (!_framesEnumerator.MoveNext())
+ throw new InvalidOperationException("Enumerator is empty, unable to get frame");
+ }
+ StreamFormat = _framesEnumerator.Current!.Format;
+ Width = _framesEnumerator.Current!.Width;
+ Height = _framesEnumerator.Current!.Height;
+
+ _formatInitialized = true;
+ }
+
+ return $"-f rawvideo -r {FrameRate.ToString(CultureInfo.InvariantCulture)} -pix_fmt {StreamFormat} -s {Width}x{Height}";
+ }
+
+ public async Task WriteAsync(Stream outputStream, CancellationToken cancellationToken)
+ {
+ if (_framesEnumerator.Current != null)
+ {
+ CheckFrameAndThrow(_framesEnumerator.Current);
+ await _framesEnumerator.Current.SerializeAsync(outputStream, cancellationToken).ConfigureAwait(false);
+ }
+
+ while (_framesEnumerator.MoveNext())
+ {
+ CheckFrameAndThrow(_framesEnumerator.Current!);
+ await _framesEnumerator.Current!.SerializeAsync(outputStream, cancellationToken).ConfigureAwait(false);
+ }
+ }
+
+ private void CheckFrameAndThrow(IVideoFrame frame)
+ {
+ if (frame.Width != Width || frame.Height != Height || frame.Format != StreamFormat)
+ throw new FFMpegStreamFormatException(FFMpegExceptionType.Operation, "Video frame is not the same format as created raw video stream\r\n" +
+ $"Frame format: {frame.Width}x{frame.Height} pix_fmt: {frame.Format}\r\n" +
+ $"Stream format: {Width}x{Height} pix_fmt: {StreamFormat}");
+ }
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Pipes/StreamPipeSink.cs b/FFMpegCore/FFMpeg/Pipes/StreamPipeSink.cs
new file mode 100644
index 00000000..addc14ed
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Pipes/StreamPipeSink.cs
@@ -0,0 +1,28 @@
+using System;
+using System.IO;
+using System.Threading;
+using System.Threading.Tasks;
+
+namespace FFMpegCore.Pipes
+{
+ public class StreamPipeSink : IPipeSink
+ {
+ public Func Writer { get; }
+ public int BlockSize { get; set; } = 4096;
+ public string Format { get; set; } = string.Empty;
+
+ public StreamPipeSink(Func writer)
+ {
+ Writer = writer;
+ }
+ public StreamPipeSink(Stream destination)
+ {
+ Writer = (inputStream, cancellationToken) => inputStream.CopyToAsync(destination, BlockSize, cancellationToken);
+ }
+
+ public Task ReadAsync(Stream inputStream, CancellationToken cancellationToken)
+ => Writer(inputStream, cancellationToken);
+
+ public string GetFormat() => Format;
+ }
+}
diff --git a/FFMpegCore/FFMpeg/Pipes/StreamPipeSource.cs b/FFMpegCore/FFMpeg/Pipes/StreamPipeSource.cs
new file mode 100644
index 00000000..99bc0817
--- /dev/null
+++ b/FFMpegCore/FFMpeg/Pipes/StreamPipeSource.cs
@@ -0,0 +1,25 @@
+using System.IO;
+using System.Threading;
+using System.Threading.Tasks;
+
+namespace FFMpegCore.Pipes
+{
+ ///
+ /// Implementation of used for stream redirection
+ ///
+ public class StreamPipeSource : IPipeSource
+ {
+ public Stream Source { get; }
+ public int BlockSize { get; } = 4096;
+ public string StreamFormat { get; } = string.Empty;
+
+ public StreamPipeSource(Stream source)
+ {
+ Source = source;
+ }
+
+ public string GetStreamArguments() => StreamFormat;
+
+ public Task WriteAsync(Stream outputStream, CancellationToken cancellationToken) => Source.CopyToAsync(outputStream, BlockSize, cancellationToken);
+ }
+}
diff --git a/FFMpegCore/FFMpeg/bin/presets/ffprobe.xsd b/FFMpegCore/FFMpeg/bin/presets/ffprobe.xsd
new file mode 100644
index 00000000..8a1e1027
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/ffprobe.xsd
@@ -0,0 +1,242 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/FFMpegCore/FFMpeg/bin/presets/libvpx-1080p.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libvpx-1080p.ffpreset
new file mode 100644
index 00000000..cf259321
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libvpx-1080p.ffpreset
@@ -0,0 +1,19 @@
+vcodec=libvpx
+
+g=120
+lag-in-frames=16
+deadline=good
+cpu-used=0
+vprofile=1
+qmax=51
+qmin=11
+slices=4
+b=2M
+
+#ignored unless using -pass 2
+maxrate=24M
+minrate=100k
+auto-alt-ref=1
+arnr-maxframes=7
+arnr-strength=5
+arnr-type=centered
diff --git a/FFMpegCore/FFMpeg/bin/presets/libvpx-1080p50_60.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libvpx-1080p50_60.ffpreset
new file mode 100644
index 00000000..4a88040d
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libvpx-1080p50_60.ffpreset
@@ -0,0 +1,19 @@
+vcodec=libvpx
+
+g=120
+lag-in-frames=25
+deadline=good
+cpu-used=0
+vprofile=1
+qmax=51
+qmin=11
+slices=4
+b=2M
+
+#ignored unless using -pass 2
+maxrate=24M
+minrate=100k
+auto-alt-ref=1
+arnr-maxframes=7
+arnr-strength=5
+arnr-type=centered
diff --git a/FFMpegCore/FFMpeg/bin/presets/libvpx-360p.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libvpx-360p.ffpreset
new file mode 100644
index 00000000..f9729ba2
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libvpx-360p.ffpreset
@@ -0,0 +1,18 @@
+vcodec=libvpx
+
+g=120
+lag-in-frames=16
+deadline=good
+cpu-used=0
+vprofile=0
+qmax=63
+qmin=0
+b=768k
+
+#ignored unless using -pass 2
+maxrate=1.5M
+minrate=40k
+auto-alt-ref=1
+arnr-maxframes=7
+arnr-strength=5
+arnr-type=centered
diff --git a/FFMpegCore/FFMpeg/bin/presets/libvpx-720p.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libvpx-720p.ffpreset
new file mode 100644
index 00000000..e84cc150
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libvpx-720p.ffpreset
@@ -0,0 +1,19 @@
+vcodec=libvpx
+
+g=120
+lag-in-frames=16
+deadline=good
+cpu-used=0
+vprofile=0
+qmax=51
+qmin=11
+slices=4
+b=2M
+
+#ignored unless using -pass 2
+maxrate=24M
+minrate=100k
+auto-alt-ref=1
+arnr-maxframes=7
+arnr-strength=5
+arnr-type=centered
diff --git a/FFMpegCore/FFMpeg/bin/presets/libvpx-720p50_60.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libvpx-720p50_60.ffpreset
new file mode 100644
index 00000000..8fce2bfb
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libvpx-720p50_60.ffpreset
@@ -0,0 +1,19 @@
+vcodec=libvpx
+
+g=120
+lag-in-frames=25
+deadline=good
+cpu-used=0
+vprofile=0
+qmax=51
+qmin=11
+slices=4
+b=2M
+
+#ignored unless using -pass 2
+maxrate=24M
+minrate=100k
+auto-alt-ref=1
+arnr-maxframes=7
+arnr-strength=5
+arnr-type=centered
diff --git a/FFMpegCore/FFMpeg/bin/presets/libvpx-ultrafast.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libvpx-ultrafast.ffpreset
new file mode 100644
index 00000000..b6d20808
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libvpx-ultrafast.ffpreset
@@ -0,0 +1,24 @@
+coder=0
+flags=-loop
+cmp=+chroma
+partitions=-parti8x8-parti4x4-partp8x8-partb8x8
+me_method=dia
+subq=0
+me_range=16
+g=250
+keyint_min=25
+sc_threshold=0
+i_qfactor=0.71
+b_strategy=0
+qcomp=0.6
+qmin=10
+qmax=51
+qdiff=4
+bf=0
+refs=1
+directpred=1
+trellis=0
+flags2=-bpyramid-mixed_refs-wpred-dct8x8+fastpskip-mbtree
+wpredp=0
+aq_mode=0
+rc_lookahead=0
diff --git a/FFMpegCore/FFMpeg/bin/presets/libx264-baseline.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libx264-baseline.ffpreset
new file mode 100644
index 00000000..ee7654bd
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libx264-baseline.ffpreset
@@ -0,0 +1,4 @@
+coder=0
+bf=0
+flags2=-wpred-dct8x8
+wpredp=0
diff --git a/FFMpegCore/FFMpeg/bin/presets/libx264-fast.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libx264-fast.ffpreset
new file mode 100644
index 00000000..cac6534e
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libx264-fast.ffpreset
@@ -0,0 +1,23 @@
+coder=1
+flags=+loop
+cmp=+chroma
+partitions=+parti8x8+parti4x4+partp8x8+partb8x8
+me_method=hex
+subq=6
+me_range=16
+g=250
+keyint_min=25
+sc_threshold=40
+i_qfactor=0.71
+b_strategy=1
+qcomp=0.6
+qmin=10
+qmax=51
+qdiff=4
+bf=3
+refs=2
+directpred=1
+trellis=1
+flags2=+bpyramid+mixed_refs+wpred+dct8x8+fastpskip
+wpredp=2
+rc_lookahead=30
diff --git a/FFMpegCore/FFMpeg/bin/presets/libx264-fast_firstpass.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libx264-fast_firstpass.ffpreset
new file mode 100644
index 00000000..65ec0112
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libx264-fast_firstpass.ffpreset
@@ -0,0 +1,23 @@
+coder=1
+flags=+loop
+cmp=+chroma
+partitions=-parti8x8-parti4x4-partp8x8-partb8x8
+me_method=dia
+subq=2
+me_range=16
+g=250
+keyint_min=25
+sc_threshold=40
+i_qfactor=0.71
+b_strategy=1
+qcomp=0.6
+qmin=10
+qmax=51
+qdiff=4
+bf=3
+refs=1
+directpred=1
+trellis=0
+flags2=+bpyramid-mixed_refs+wpred-dct8x8+fastpskip
+wpredp=2
+rc_lookahead=30
diff --git a/FFMpegCore/FFMpeg/bin/presets/libx264-faster.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libx264-faster.ffpreset
new file mode 100644
index 00000000..a32eed3c
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libx264-faster.ffpreset
@@ -0,0 +1,23 @@
+coder=1
+flags=+loop
+cmp=+chroma
+partitions=+parti8x8+parti4x4+partp8x8+partb8x8
+me_method=hex
+subq=4
+me_range=16
+g=250
+keyint_min=25
+sc_threshold=40
+i_qfactor=0.71
+b_strategy=1
+qcomp=0.6
+qmin=10
+qmax=51
+qdiff=4
+bf=3
+refs=2
+directpred=1
+trellis=1
+flags2=+bpyramid-mixed_refs+wpred+dct8x8+fastpskip
+wpredp=1
+rc_lookahead=20
diff --git a/FFMpegCore/FFMpeg/bin/presets/libx264-faster_firstpass.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libx264-faster_firstpass.ffpreset
new file mode 100644
index 00000000..c777eb4e
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libx264-faster_firstpass.ffpreset
@@ -0,0 +1,23 @@
+coder=1
+flags=+loop
+cmp=+chroma
+partitions=-parti8x8-parti4x4-partp8x8-partb8x8
+me_method=dia
+subq=2
+me_range=16
+g=250
+keyint_min=25
+sc_threshold=40
+i_qfactor=0.71
+b_strategy=1
+qcomp=0.6
+qmin=10
+qmax=51
+qdiff=4
+bf=3
+refs=1
+directpred=1
+trellis=0
+flags2=+bpyramid-mixed_refs+wpred-dct8x8+fastpskip
+wpredp=1
+rc_lookahead=20
diff --git a/FFMpegCore/FFMpeg/bin/presets/libx264-ipod320.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libx264-ipod320.ffpreset
new file mode 100644
index 00000000..943b5213
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libx264-ipod320.ffpreset
@@ -0,0 +1,7 @@
+coder=0
+bf=0
+flags2=-wpred-dct8x8
+level=13
+maxrate=768000
+bufsize=3000000
+wpredp=0
diff --git a/FFMpegCore/FFMpeg/bin/presets/libx264-ipod640.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libx264-ipod640.ffpreset
new file mode 100644
index 00000000..1ed3d9fb
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libx264-ipod640.ffpreset
@@ -0,0 +1,8 @@
+coder=0
+bf=0
+refs=1
+flags2=-wpred-dct8x8
+level=30
+maxrate=10000000
+bufsize=10000000
+wpredp=0
diff --git a/FFMpegCore/FFMpeg/bin/presets/libx264-lossless_fast.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libx264-lossless_fast.ffpreset
new file mode 100644
index 00000000..ea08d3c0
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libx264-lossless_fast.ffpreset
@@ -0,0 +1,20 @@
+coder=0
+flags=+loop
+cmp=+chroma
+partitions=-parti8x8+parti4x4+partp8x8-partp4x4-partb8x8
+me_method=hex
+subq=3
+me_range=16
+g=250
+keyint_min=25
+sc_threshold=40
+i_qfactor=0.71
+b_strategy=1
+qcomp=0.6
+qmin=10
+qmax=51
+qdiff=4
+directpred=1
+flags2=+fastpskip
+cqp=0
+wpredp=0
diff --git a/FFMpegCore/FFMpeg/bin/presets/libx264-lossless_max.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libx264-lossless_max.ffpreset
new file mode 100644
index 00000000..8c049c9c
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libx264-lossless_max.ffpreset
@@ -0,0 +1,21 @@
+coder=1
+flags=+loop
+cmp=+chroma
+partitions=+parti8x8+parti4x4+partp8x8+partp4x4-partb8x8
+me_method=esa
+subq=8
+me_range=16
+g=250
+keyint_min=25
+sc_threshold=40
+i_qfactor=0.71
+b_strategy=1
+qcomp=0.6
+qmin=10
+qmax=51
+qdiff=4
+refs=16
+directpred=1
+flags2=+mixed_refs+dct8x8+fastpskip
+cqp=0
+wpredp=2
diff --git a/FFMpegCore/FFMpeg/bin/presets/libx264-lossless_medium.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libx264-lossless_medium.ffpreset
new file mode 100644
index 00000000..558be6c9
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libx264-lossless_medium.ffpreset
@@ -0,0 +1,20 @@
+coder=1
+flags=+loop
+cmp=+chroma
+partitions=-parti8x8+parti4x4+partp8x8+partp4x4-partb8x8
+me_method=hex
+subq=5
+me_range=16
+g=250
+keyint_min=25
+sc_threshold=40
+i_qfactor=0.71
+b_strategy=1
+qcomp=0.6
+qmin=10
+qmax=51
+qdiff=4
+directpred=1
+flags2=+fastpskip
+cqp=0
+wpredp=2
diff --git a/FFMpegCore/FFMpeg/bin/presets/libx264-lossless_slow.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libx264-lossless_slow.ffpreset
new file mode 100644
index 00000000..ff641cf4
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libx264-lossless_slow.ffpreset
@@ -0,0 +1,21 @@
+coder=1
+flags=+loop
+cmp=+chroma
+partitions=+parti8x8+parti4x4+partp8x8+partp4x4-partb8x8
+me_method=umh
+subq=6
+me_range=16
+g=250
+keyint_min=25
+sc_threshold=40
+i_qfactor=0.71
+b_strategy=1
+qcomp=0.6
+qmin=10
+qmax=51
+qdiff=4
+refs=2
+directpred=1
+flags2=+dct8x8+fastpskip
+cqp=0
+wpredp=2
diff --git a/FFMpegCore/FFMpeg/bin/presets/libx264-lossless_slower.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libx264-lossless_slower.ffpreset
new file mode 100644
index 00000000..854f74b2
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libx264-lossless_slower.ffpreset
@@ -0,0 +1,21 @@
+coder=1
+flags=+loop
+cmp=+chroma
+partitions=+parti8x8+parti4x4+partp8x8+partp4x4-partb8x8
+me_method=umh
+subq=8
+me_range=16
+g=250
+keyint_min=25
+sc_threshold=40
+i_qfactor=0.71
+b_strategy=1
+qcomp=0.6
+qmin=10
+qmax=51
+qdiff=4
+refs=4
+directpred=1
+flags2=+mixed_refs+dct8x8+fastpskip
+cqp=0
+wpredp=2
diff --git a/FFMpegCore/FFMpeg/bin/presets/libx264-lossless_ultrafast.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libx264-lossless_ultrafast.ffpreset
new file mode 100644
index 00000000..1c429f21
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libx264-lossless_ultrafast.ffpreset
@@ -0,0 +1,19 @@
+coder=0
+flags=+loop
+cmp=+chroma
+partitions=-parti8x8-parti4x4-partp8x8-partp4x4-partb8x8
+me_method=dia
+subq=0
+me_range=16
+g=250
+keyint_min=25
+sc_threshold=40
+i_qfactor=0.71
+b_strategy=1
+qcomp=0.6
+qmin=10
+qmax=51
+qdiff=4
+directpred=1
+flags2=+fastpskip
+cqp=0
diff --git a/FFMpegCore/FFMpeg/bin/presets/libx264-main.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libx264-main.ffpreset
new file mode 100644
index 00000000..d1dc7dda
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libx264-main.ffpreset
@@ -0,0 +1 @@
+flags2=-dct8x8
diff --git a/FFMpegCore/FFMpeg/bin/presets/libx264-medium.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libx264-medium.ffpreset
new file mode 100644
index 00000000..039f1d60
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libx264-medium.ffpreset
@@ -0,0 +1,22 @@
+coder=1
+flags=+loop
+cmp=+chroma
+partitions=+parti8x8+parti4x4+partp8x8+partb8x8
+me_method=hex
+subq=7
+me_range=16
+g=250
+keyint_min=25
+sc_threshold=40
+i_qfactor=0.71
+b_strategy=1
+qcomp=0.6
+qmin=10
+qmax=51
+qdiff=4
+bf=3
+refs=3
+directpred=1
+trellis=1
+flags2=+bpyramid+mixed_refs+wpred+dct8x8+fastpskip
+wpredp=2
diff --git a/FFMpegCore/FFMpeg/bin/presets/libx264-medium_firstpass.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libx264-medium_firstpass.ffpreset
new file mode 100644
index 00000000..e4159892
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libx264-medium_firstpass.ffpreset
@@ -0,0 +1,22 @@
+coder=1
+flags=+loop
+cmp=+chroma
+partitions=-parti8x8-parti4x4-partp8x8-partb8x8
+me_method=dia
+subq=2
+me_range=16
+g=250
+keyint_min=25
+sc_threshold=40
+i_qfactor=0.71
+b_strategy=1
+qcomp=0.6
+qmin=10
+qmax=51
+qdiff=4
+bf=3
+refs=1
+directpred=1
+trellis=0
+flags2=+bpyramid-mixed_refs+wpred-dct8x8+fastpskip
+wpredp=2
diff --git a/FFMpegCore/FFMpeg/bin/presets/libx264-placebo.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libx264-placebo.ffpreset
new file mode 100644
index 00000000..fae2222d
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libx264-placebo.ffpreset
@@ -0,0 +1,23 @@
+coder=1
+flags=+loop
+cmp=+chroma
+partitions=+parti8x8+parti4x4+partp8x8+partp4x4+partb8x8
+me_method=tesa
+subq=10
+me_range=24
+g=250
+keyint_min=25
+sc_threshold=40
+i_qfactor=0.71
+b_strategy=2
+qcomp=0.6
+qmin=10
+qmax=51
+qdiff=4
+bf=16
+refs=16
+directpred=3
+trellis=2
+flags2=+bpyramid+mixed_refs+wpred+dct8x8-fastpskip
+wpredp=2
+rc_lookahead=60
diff --git a/FFMpegCore/FFMpeg/bin/presets/libx264-placebo_firstpass.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libx264-placebo_firstpass.ffpreset
new file mode 100644
index 00000000..fae2222d
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libx264-placebo_firstpass.ffpreset
@@ -0,0 +1,23 @@
+coder=1
+flags=+loop
+cmp=+chroma
+partitions=+parti8x8+parti4x4+partp8x8+partp4x4+partb8x8
+me_method=tesa
+subq=10
+me_range=24
+g=250
+keyint_min=25
+sc_threshold=40
+i_qfactor=0.71
+b_strategy=2
+qcomp=0.6
+qmin=10
+qmax=51
+qdiff=4
+bf=16
+refs=16
+directpred=3
+trellis=2
+flags2=+bpyramid+mixed_refs+wpred+dct8x8-fastpskip
+wpredp=2
+rc_lookahead=60
diff --git a/FFMpegCore/FFMpeg/bin/presets/libx264-slow.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libx264-slow.ffpreset
new file mode 100644
index 00000000..0f3f4d4b
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libx264-slow.ffpreset
@@ -0,0 +1,23 @@
+coder=1
+flags=+loop
+cmp=+chroma
+partitions=+parti8x8+parti4x4+partp8x8+partb8x8
+me_method=umh
+subq=8
+me_range=16
+g=250
+keyint_min=25
+sc_threshold=40
+i_qfactor=0.71
+b_strategy=2
+qcomp=0.6
+qmin=10
+qmax=51
+qdiff=4
+bf=3
+refs=5
+directpred=3
+trellis=1
+flags2=+bpyramid+mixed_refs+wpred+dct8x8+fastpskip
+wpredp=2
+rc_lookahead=50
diff --git a/FFMpegCore/FFMpeg/bin/presets/libx264-slow_firstpass.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libx264-slow_firstpass.ffpreset
new file mode 100644
index 00000000..89e74e45
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libx264-slow_firstpass.ffpreset
@@ -0,0 +1,23 @@
+coder=1
+flags=+loop
+cmp=+chroma
+partitions=-parti8x8-parti4x4-partp8x8-partb8x8
+me_method=dia
+subq=2
+me_range=16
+g=250
+keyint_min=25
+sc_threshold=40
+i_qfactor=0.71
+b_strategy=2
+qcomp=0.6
+qmin=10
+qmax=51
+qdiff=4
+bf=3
+refs=1
+directpred=3
+trellis=0
+flags2=+bpyramid-mixed_refs+wpred-dct8x8+fastpskip
+wpredp=2
+rc_lookahead=50
diff --git a/FFMpegCore/FFMpeg/bin/presets/libx264-slower.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libx264-slower.ffpreset
new file mode 100644
index 00000000..f3a412c0
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libx264-slower.ffpreset
@@ -0,0 +1,23 @@
+coder=1
+flags=+loop
+cmp=+chroma
+partitions=+parti8x8+parti4x4+partp8x8+partp4x4+partb8x8
+me_method=umh
+subq=9
+me_range=16
+g=250
+keyint_min=25
+sc_threshold=40
+i_qfactor=0.71
+b_strategy=2
+qcomp=0.6
+qmin=10
+qmax=51
+qdiff=4
+bf=3
+refs=8
+directpred=3
+trellis=2
+flags2=+bpyramid+mixed_refs+wpred+dct8x8+fastpskip
+wpredp=2
+rc_lookahead=60
diff --git a/FFMpegCore/FFMpeg/bin/presets/libx264-slower_firstpass.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libx264-slower_firstpass.ffpreset
new file mode 100644
index 00000000..aa1eb1e9
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libx264-slower_firstpass.ffpreset
@@ -0,0 +1,23 @@
+coder=1
+flags=+loop
+cmp=+chroma
+partitions=-parti8x8-parti4x4-partp8x8-partb8x8
+me_method=dia
+subq=2
+me_range=16
+g=250
+keyint_min=25
+sc_threshold=40
+i_qfactor=0.71
+b_strategy=2
+qcomp=0.6
+qmin=10
+qmax=51
+qdiff=4
+bf=3
+refs=1
+directpred=3
+trellis=0
+flags2=+bpyramid-mixed_refs+wpred-dct8x8+fastpskip
+wpredp=2
+rc_lookahead=60
diff --git a/FFMpegCore/FFMpeg/bin/presets/libx264-superfast.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libx264-superfast.ffpreset
new file mode 100644
index 00000000..bcd45993
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libx264-superfast.ffpreset
@@ -0,0 +1,23 @@
+coder=1
+flags=+loop
+cmp=+chroma
+partitions=+parti8x8+parti4x4-partp8x8-partb8x8
+me_method=dia
+subq=1
+me_range=16
+g=250
+keyint_min=25
+sc_threshold=40
+i_qfactor=0.71
+b_strategy=1
+qcomp=0.6
+qmin=10
+qmax=51
+qdiff=4
+bf=3
+refs=1
+directpred=1
+trellis=0
+flags2=+bpyramid-mixed_refs+wpred+dct8x8+fastpskip-mbtree
+wpredp=0
+rc_lookahead=0
diff --git a/FFMpegCore/FFMpeg/bin/presets/libx264-superfast_firstpass.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libx264-superfast_firstpass.ffpreset
new file mode 100644
index 00000000..ef06b1d5
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libx264-superfast_firstpass.ffpreset
@@ -0,0 +1,23 @@
+coder=1
+flags=+loop
+cmp=+chroma
+partitions=-parti8x8-parti4x4-partp8x8-partb8x8
+me_method=dia
+subq=1
+me_range=16
+g=250
+keyint_min=25
+sc_threshold=40
+i_qfactor=0.71
+b_strategy=1
+qcomp=0.6
+qmin=10
+qmax=51
+qdiff=4
+bf=3
+refs=1
+directpred=1
+trellis=0
+flags2=+bpyramid-mixed_refs+wpred-dct8x8+fastpskip-mbtree
+wpredp=0
+rc_lookahead=0
diff --git a/FFMpegCore/FFMpeg/bin/presets/libx264-ultrafast.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libx264-ultrafast.ffpreset
new file mode 100644
index 00000000..b6d20808
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libx264-ultrafast.ffpreset
@@ -0,0 +1,24 @@
+coder=0
+flags=-loop
+cmp=+chroma
+partitions=-parti8x8-parti4x4-partp8x8-partb8x8
+me_method=dia
+subq=0
+me_range=16
+g=250
+keyint_min=25
+sc_threshold=0
+i_qfactor=0.71
+b_strategy=0
+qcomp=0.6
+qmin=10
+qmax=51
+qdiff=4
+bf=0
+refs=1
+directpred=1
+trellis=0
+flags2=-bpyramid-mixed_refs-wpred-dct8x8+fastpskip-mbtree
+wpredp=0
+aq_mode=0
+rc_lookahead=0
diff --git a/FFMpegCore/FFMpeg/bin/presets/libx264-ultrafast_firstpass.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libx264-ultrafast_firstpass.ffpreset
new file mode 100644
index 00000000..b6d20808
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libx264-ultrafast_firstpass.ffpreset
@@ -0,0 +1,24 @@
+coder=0
+flags=-loop
+cmp=+chroma
+partitions=-parti8x8-parti4x4-partp8x8-partb8x8
+me_method=dia
+subq=0
+me_range=16
+g=250
+keyint_min=25
+sc_threshold=0
+i_qfactor=0.71
+b_strategy=0
+qcomp=0.6
+qmin=10
+qmax=51
+qdiff=4
+bf=0
+refs=1
+directpred=1
+trellis=0
+flags2=-bpyramid-mixed_refs-wpred-dct8x8+fastpskip-mbtree
+wpredp=0
+aq_mode=0
+rc_lookahead=0
diff --git a/FFMpegCore/FFMpeg/bin/presets/libx264-veryfast.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libx264-veryfast.ffpreset
new file mode 100644
index 00000000..16ca594a
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libx264-veryfast.ffpreset
@@ -0,0 +1,23 @@
+coder=1
+flags=+loop
+cmp=+chroma
+partitions=+parti8x8+parti4x4+partp8x8+partb8x8
+me_method=hex
+subq=2
+me_range=16
+g=250
+keyint_min=25
+sc_threshold=40
+i_qfactor=0.71
+b_strategy=1
+qcomp=0.6
+qmin=10
+qmax=51
+qdiff=4
+bf=3
+refs=1
+directpred=1
+trellis=0
+flags2=+bpyramid-mixed_refs+wpred+dct8x8+fastpskip
+wpredp=0
+rc_lookahead=10
diff --git a/FFMpegCore/FFMpeg/bin/presets/libx264-veryfast_firstpass.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libx264-veryfast_firstpass.ffpreset
new file mode 100644
index 00000000..39eaf3e2
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libx264-veryfast_firstpass.ffpreset
@@ -0,0 +1,23 @@
+coder=1
+flags=+loop
+cmp=+chroma
+partitions=-parti8x8-parti4x4-partp8x8-partb8x8
+me_method=dia
+subq=2
+me_range=16
+g=250
+keyint_min=25
+sc_threshold=40
+i_qfactor=0.71
+b_strategy=1
+qcomp=0.6
+qmin=10
+qmax=51
+qdiff=4
+bf=3
+refs=1
+directpred=1
+trellis=0
+flags2=+bpyramid-mixed_refs+wpred-dct8x8+fastpskip
+wpredp=0
+rc_lookahead=10
diff --git a/FFMpegCore/FFMpeg/bin/presets/libx264-veryslow.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libx264-veryslow.ffpreset
new file mode 100644
index 00000000..a0606255
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libx264-veryslow.ffpreset
@@ -0,0 +1,23 @@
+coder=1
+flags=+loop
+cmp=+chroma
+partitions=+parti8x8+parti4x4+partp8x8+partp4x4+partb8x8
+me_method=umh
+subq=10
+me_range=24
+g=250
+keyint_min=25
+sc_threshold=40
+i_qfactor=0.71
+b_strategy=2
+qcomp=0.6
+qmin=10
+qmax=51
+qdiff=4
+bf=8
+refs=16
+directpred=3
+trellis=2
+flags2=+bpyramid+mixed_refs+wpred+dct8x8+fastpskip
+wpredp=2
+rc_lookahead=60
diff --git a/FFMpegCore/FFMpeg/bin/presets/libx264-veryslow_firstpass.ffpreset b/FFMpegCore/FFMpeg/bin/presets/libx264-veryslow_firstpass.ffpreset
new file mode 100644
index 00000000..6e7079bb
--- /dev/null
+++ b/FFMpegCore/FFMpeg/bin/presets/libx264-veryslow_firstpass.ffpreset
@@ -0,0 +1,23 @@
+coder=1
+flags=+loop
+cmp=+chroma
+partitions=-parti8x8-parti4x4-partp8x8-partb8x8
+me_method=dia
+subq=2
+me_range=24
+g=250
+keyint_min=25
+sc_threshold=40
+i_qfactor=0.71
+b_strategy=2
+qcomp=0.6
+qmin=10
+qmax=51
+qdiff=4
+bf=8
+refs=1
+directpred=3
+trellis=0
+flags2=+bpyramid-mixed_refs+wpred-dct8x8+fastpskip
+wpredp=2
+rc_lookahead=60
diff --git a/FFMpegCore/FFMpegCore.csproj b/FFMpegCore/FFMpegCore.csproj
new file mode 100644
index 00000000..bf9e6824
--- /dev/null
+++ b/FFMpegCore/FFMpegCore.csproj
@@ -0,0 +1,36 @@
+
+
+
+ en
+ https://github.com/rosenbjerg/FFMpegCore
+ https://github.com/rosenbjerg/FFMpegCore
+
+ A .NET Standard FFMpeg/FFProbe wrapper for easily integrating media analysis and conversion into your .NET applications
+ 3.0.0.0
+ 3.0.0.0
+ 3.0.0.0
+ - Added support for mirroring video filter (thanks gorobvictor)
+ 8
+ 4.2.0
+ MIT
+ Malte Rosenbjerg, Vlad Jerca, Max Bagryantsev
+ ffmpeg ffprobe convert video audio mediafile resize analyze muxing
+ GitHub
+ true
+ enable
+ netstandard2.0
+
+
+
+
+ Always
+
+
+
+
+
+
+
+
+
+
diff --git a/FFMpegCore/FFMpegCore.csproj.DotSettings b/FFMpegCore/FFMpegCore.csproj.DotSettings
new file mode 100644
index 00000000..7a8d17ac
--- /dev/null
+++ b/FFMpegCore/FFMpegCore.csproj.DotSettings
@@ -0,0 +1,3 @@
+
+ True
+ True
\ No newline at end of file
diff --git a/FFMpegCore/FFOptions.cs b/FFMpegCore/FFOptions.cs
new file mode 100644
index 00000000..1f7e4977
--- /dev/null
+++ b/FFMpegCore/FFOptions.cs
@@ -0,0 +1,37 @@
+using System.Collections.Generic;
+using System.IO;
+using System.Text;
+
+namespace FFMpegCore
+{
+ public class FFOptions
+ {
+ ///
+ /// Folder container ffmpeg and ffprobe binaries. Leave empty if ffmpeg and ffprobe are present in PATH
+ ///
+ public string BinaryFolder { get; set; } = string.Empty;
+
+ ///
+ /// Folder used for temporary files necessary for static methods on FFMpeg class
+ ///
+ public string TemporaryFilesFolder { get; set; } = Path.GetTempPath();
+
+ ///
+ /// Encoding used for parsing stdout/stderr on ffmpeg and ffprobe processes
+ ///
+ public Encoding Encoding { get; set; } = Encoding.Default;
+
+ ///
+ ///
+ ///
+ public Dictionary ExtensionOverrides { get; set; } = new Dictionary
+ {
+ { "mpegts", ".ts" },
+ };
+
+ ///
+ /// Whether to cache calls to get ffmpeg codec, pixel- and container-formats
+ ///
+ public bool UseCache { get; set; } = true;
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFProbe/AudioStream.cs b/FFMpegCore/FFProbe/AudioStream.cs
new file mode 100644
index 00000000..d6f4b337
--- /dev/null
+++ b/FFMpegCore/FFProbe/AudioStream.cs
@@ -0,0 +1,10 @@
+namespace FFMpegCore
+{
+ public class AudioStream : MediaStream
+ {
+ public int Channels { get; internal set; }
+ public string ChannelLayout { get; internal set; } = null!;
+ public int SampleRateHz { get; internal set; }
+ public string Profile { get; internal set; } = null!;
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFProbe/FFProbe.cs b/FFMpegCore/FFProbe/FFProbe.cs
new file mode 100644
index 00000000..ab35457d
--- /dev/null
+++ b/FFMpegCore/FFProbe/FFProbe.cs
@@ -0,0 +1,124 @@
+using System;
+using System.IO;
+using System.Text.Json;
+using System.Threading.Tasks;
+using FFMpegCore.Arguments;
+using FFMpegCore.Exceptions;
+using FFMpegCore.Helpers;
+using FFMpegCore.Pipes;
+using Instances;
+
+namespace FFMpegCore
+{
+ public static class FFProbe
+ {
+ public static IMediaAnalysis Analyse(string filePath, int outputCapacity = int.MaxValue, FFOptions? ffOptions = null)
+ {
+ if (!File.Exists(filePath))
+ throw new FFMpegException(FFMpegExceptionType.File, $"No file found at '{filePath}'");
+
+ using var instance = PrepareInstance(filePath, outputCapacity, ffOptions ?? GlobalFFOptions.Current);
+ var exitCode = instance.BlockUntilFinished();
+ if (exitCode != 0)
+ throw new FFMpegException(FFMpegExceptionType.Process, $"ffprobe exited with non-zero exit-code ({exitCode} - {string.Join("\n", instance.ErrorData)})", null, string.Join("\n", instance.ErrorData));
+
+ return ParseOutput(instance);
+ }
+ public static IMediaAnalysis Analyse(Uri uri, int outputCapacity = int.MaxValue, FFOptions? ffOptions = null)
+ {
+ using var instance = PrepareInstance(uri.AbsoluteUri, outputCapacity, ffOptions ?? GlobalFFOptions.Current);
+ var exitCode = instance.BlockUntilFinished();
+ if (exitCode != 0)
+ throw new FFMpegException(FFMpegExceptionType.Process, $"ffprobe exited with non-zero exit-code ({exitCode} - {string.Join("\n", instance.ErrorData)})", null, string.Join("\n", instance.ErrorData));
+
+ return ParseOutput(instance);
+ }
+ public static IMediaAnalysis Analyse(Stream stream, int outputCapacity = int.MaxValue, FFOptions? ffOptions = null)
+ {
+ var streamPipeSource = new StreamPipeSource(stream);
+ var pipeArgument = new InputPipeArgument(streamPipeSource);
+ using var instance = PrepareInstance(pipeArgument.PipePath, outputCapacity, ffOptions ?? GlobalFFOptions.Current);
+ pipeArgument.Pre();
+
+ var task = instance.FinishedRunning();
+ try
+ {
+ pipeArgument.During().ConfigureAwait(false).GetAwaiter().GetResult();
+ }
+ catch (IOException) { }
+ finally
+ {
+ pipeArgument.Post();
+ }
+ var exitCode = task.ConfigureAwait(false).GetAwaiter().GetResult();
+ if (exitCode != 0)
+ throw new FFMpegException(FFMpegExceptionType.Process, $"ffprobe exited with non-zero exit-code ({exitCode} - {string.Join("\n", instance.ErrorData)})", null, string.Join("\n", instance.ErrorData));
+
+ return ParseOutput(instance);
+ }
+ public static async Task AnalyseAsync(string filePath, int outputCapacity = int.MaxValue, FFOptions? ffOptions = null)
+ {
+ if (!File.Exists(filePath))
+ throw new FFMpegException(FFMpegExceptionType.File, $"No file found at '{filePath}'");
+
+ using var instance = PrepareInstance(filePath, outputCapacity, ffOptions ?? GlobalFFOptions.Current);
+ await instance.FinishedRunning().ConfigureAwait(false);
+ return ParseOutput(instance);
+ }
+ public static async Task AnalyseAsync(Uri uri, int outputCapacity = int.MaxValue, FFOptions? ffOptions = null)
+ {
+ using var instance = PrepareInstance(uri.AbsoluteUri, outputCapacity, ffOptions ?? GlobalFFOptions.Current);
+ await instance.FinishedRunning().ConfigureAwait(false);
+ return ParseOutput(instance);
+ }
+ public static async Task AnalyseAsync(Stream stream, int outputCapacity = int.MaxValue, FFOptions? ffOptions = null)
+ {
+ var streamPipeSource = new StreamPipeSource(stream);
+ var pipeArgument = new InputPipeArgument(streamPipeSource);
+ using var instance = PrepareInstance(pipeArgument.PipePath, outputCapacity, ffOptions ?? GlobalFFOptions.Current);
+ pipeArgument.Pre();
+
+ var task = instance.FinishedRunning();
+ try
+ {
+ await pipeArgument.During().ConfigureAwait(false);
+ }
+ catch(IOException)
+ {
+ }
+ finally
+ {
+ pipeArgument.Post();
+ }
+ var exitCode = await task.ConfigureAwait(false);
+ if (exitCode != 0)
+ throw new FFMpegException(FFMpegExceptionType.Process, $"FFProbe process returned exit status {exitCode}", null, string.Join("\n", instance.ErrorData));
+
+ pipeArgument.Post();
+ return ParseOutput(instance);
+ }
+
+ private static IMediaAnalysis ParseOutput(Instance instance)
+ {
+ var json = string.Join(string.Empty, instance.OutputData);
+ var ffprobeAnalysis = JsonSerializer.Deserialize(json, new JsonSerializerOptions
+ {
+ PropertyNameCaseInsensitive = true
+ });
+
+ if (ffprobeAnalysis?.Format == null)
+ throw new Exception();
+
+ return new MediaAnalysis(ffprobeAnalysis);
+ }
+
+ private static Instance PrepareInstance(string filePath, int outputCapacity, FFOptions ffOptions)
+ {
+ FFProbeHelper.RootExceptionCheck();
+ FFProbeHelper.VerifyFFProbeExists(ffOptions);
+ var arguments = $"-loglevel error -print_format json -show_format -sexagesimal -show_streams \"{filePath}\"";
+ var instance = new Instance(GlobalFFOptions.GetFFProbeBinaryPath(), arguments) {DataBufferCapacity = outputCapacity};
+ return instance;
+ }
+ }
+}
diff --git a/FFMpegCore/FFProbe/FFProbeAnalysis.cs b/FFMpegCore/FFProbe/FFProbeAnalysis.cs
new file mode 100644
index 00000000..a0f2d418
--- /dev/null
+++ b/FFMpegCore/FFProbe/FFProbeAnalysis.cs
@@ -0,0 +1,127 @@
+using System.Collections.Generic;
+using System.Text.Json.Serialization;
+
+namespace FFMpegCore
+{
+ public class FFProbeAnalysis
+ {
+ [JsonPropertyName("streams")]
+ public List Streams { get; set; } = null!;
+
+ [JsonPropertyName("format")]
+ public Format Format { get; set; } = null!;
+ }
+
+ public class FFProbeStream : ITagsContainer
+ {
+ [JsonPropertyName("index")]
+ public int Index { get; set; }
+
+ [JsonPropertyName("avg_frame_rate")]
+ public string AvgFrameRate { get; set; } = null!;
+
+ [JsonPropertyName("bits_per_raw_sample")]
+ public string BitsPerRawSample { get; set; } = null!;
+
+ [JsonPropertyName("bit_rate")]
+ public string BitRate { get; set; } = null!;
+
+ [JsonPropertyName("channels")]
+ public int? Channels { get; set; }
+
+ [JsonPropertyName("channel_layout")]
+ public string ChannelLayout { get; set; } = null!;
+
+ [JsonPropertyName("codec_type")]
+ public string CodecType { get; set; } = null!;
+
+ [JsonPropertyName("codec_name")]
+ public string CodecName { get; set; } = null!;
+
+ [JsonPropertyName("codec_long_name")]
+ public string CodecLongName { get; set; } = null!;
+
+ [JsonPropertyName("display_aspect_ratio")]
+ public string DisplayAspectRatio { get; set; } = null!;
+
+ [JsonPropertyName("duration")]
+ public string Duration { get; set; } = null!;
+
+ [JsonPropertyName("profile")]
+ public string Profile { get; set; } = null!;
+
+ [JsonPropertyName("width")]
+ public int? Width { get; set; }
+
+ [JsonPropertyName("height")]
+ public int? Height { get; set; }
+
+ [JsonPropertyName("r_frame_rate")]
+ public string FrameRate { get; set; } = null!;
+
+ [JsonPropertyName("pix_fmt")]
+ public string PixelFormat { get; set; } = null!;
+
+ [JsonPropertyName("sample_rate")]
+ public string SampleRate { get; set; } = null!;
+
+ [JsonPropertyName("tags")]
+ public Dictionary Tags { get; set; } = null!;
+ }
+ public class Format : ITagsContainer
+ {
+ [JsonPropertyName("filename")]
+ public string Filename { get; set; } = null!;
+
+ [JsonPropertyName("nb_streams")]
+ public int NbStreams { get; set; }
+
+ [JsonPropertyName("nb_programs")]
+ public int NbPrograms { get; set; }
+
+ [JsonPropertyName("format_name")]
+ public string FormatName { get; set; } = null!;
+
+ [JsonPropertyName("format_long_name")]
+ public string FormatLongName { get; set; } = null!;
+
+ [JsonPropertyName("start_time")]
+ public string StartTime { get; set; } = null!;
+
+ [JsonPropertyName("duration")]
+ public string Duration { get; set; } = null!;
+
+ [JsonPropertyName("size")]
+ public string Size { get; set; } = null!;
+
+ [JsonPropertyName("bit_rate")]
+ public string BitRate { get; set; } = null!;
+
+ [JsonPropertyName("probe_score")]
+ public int ProbeScore { get; set; }
+
+ [JsonPropertyName("tags")]
+ public Dictionary Tags { get; set; } = null!;
+ }
+
+ public interface ITagsContainer
+ {
+ Dictionary Tags { get; set; }
+ }
+ public static class TagExtensions
+ {
+ private static string? TryGetTagValue(ITagsContainer tagsContainer, string key)
+ {
+ if (tagsContainer.Tags != null && tagsContainer.Tags.TryGetValue(key, out var tagValue))
+ return tagValue;
+ return null;
+ }
+
+ public static string? GetLanguage(this ITagsContainer tagsContainer) => TryGetTagValue(tagsContainer, "language");
+ public static string? GetCreationTime(this ITagsContainer tagsContainer) => TryGetTagValue(tagsContainer, "creation_time ");
+ public static string? GetRotate(this ITagsContainer tagsContainer) => TryGetTagValue(tagsContainer, "rotate");
+ public static string? GetDuration(this ITagsContainer tagsContainer) => TryGetTagValue(tagsContainer, "duration");
+
+
+ }
+}
diff --git a/FFMpegCore/FFProbe/IMediaAnalysis.cs b/FFMpegCore/FFProbe/IMediaAnalysis.cs
new file mode 100644
index 00000000..4e67d4f5
--- /dev/null
+++ b/FFMpegCore/FFProbe/IMediaAnalysis.cs
@@ -0,0 +1,15 @@
+using System;
+using System.Collections.Generic;
+
+namespace FFMpegCore
+{
+ public interface IMediaAnalysis
+ {
+ TimeSpan Duration { get; }
+ MediaFormat Format { get; }
+ AudioStream? PrimaryAudioStream { get; }
+ VideoStream? PrimaryVideoStream { get; }
+ List VideoStreams { get; }
+ List AudioStreams { get; }
+ }
+}
diff --git a/FFMpegCore/FFProbe/MediaAnalysis.cs b/FFMpegCore/FFProbe/MediaAnalysis.cs
new file mode 100644
index 00000000..2602f86f
--- /dev/null
+++ b/FFMpegCore/FFProbe/MediaAnalysis.cs
@@ -0,0 +1,154 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text.RegularExpressions;
+
+namespace FFMpegCore
+{
+ internal class MediaAnalysis : IMediaAnalysis
+ {
+ internal MediaAnalysis(FFProbeAnalysis analysis)
+ {
+ Format = ParseFormat(analysis.Format);
+ VideoStreams = analysis.Streams.Where(stream => stream.CodecType == "video").Select(ParseVideoStream).ToList();
+ AudioStreams = analysis.Streams.Where(stream => stream.CodecType == "audio").Select(ParseAudioStream).ToList();
+ }
+
+ private MediaFormat ParseFormat(Format analysisFormat)
+ {
+ return new MediaFormat
+ {
+ Duration = MediaAnalysisUtils.ParseDuration(analysisFormat.Duration),
+ FormatName = analysisFormat.FormatName,
+ FormatLongName = analysisFormat.FormatLongName,
+ StreamCount = analysisFormat.NbStreams,
+ ProbeScore = analysisFormat.ProbeScore,
+ BitRate = long.Parse(analysisFormat.BitRate ?? "0"),
+ Tags = analysisFormat.Tags,
+ };
+ }
+
+ public TimeSpan Duration => new[]
+ {
+ Format.Duration,
+ PrimaryVideoStream?.Duration ?? TimeSpan.Zero,
+ PrimaryAudioStream?.Duration ?? TimeSpan.Zero
+ }.Max();
+
+ public MediaFormat Format { get; }
+ public AudioStream? PrimaryAudioStream => AudioStreams.OrderBy(stream => stream.Index).FirstOrDefault();
+
+ public VideoStream? PrimaryVideoStream => VideoStreams.OrderBy(stream => stream.Index).FirstOrDefault();
+
+ public List VideoStreams { get; }
+ public List AudioStreams { get; }
+
+ private VideoStream ParseVideoStream(FFProbeStream stream)
+ {
+ return new VideoStream
+ {
+ Index = stream.Index,
+ AvgFrameRate = MediaAnalysisUtils.DivideRatio(MediaAnalysisUtils.ParseRatioDouble(stream.AvgFrameRate, '/')),
+ BitRate = !string.IsNullOrEmpty(stream.BitRate) ? MediaAnalysisUtils.ParseIntInvariant(stream.BitRate) : default,
+ BitsPerRawSample = !string.IsNullOrEmpty(stream.BitsPerRawSample) ? MediaAnalysisUtils.ParseIntInvariant(stream.BitsPerRawSample) : default,
+ CodecName = stream.CodecName,
+ CodecLongName = stream.CodecLongName,
+ DisplayAspectRatio = MediaAnalysisUtils.ParseRatioInt(stream.DisplayAspectRatio, ':'),
+ Duration = MediaAnalysisUtils.ParseDuration(stream),
+ FrameRate = MediaAnalysisUtils.DivideRatio(MediaAnalysisUtils.ParseRatioDouble(stream.FrameRate, '/')),
+ Height = stream.Height ?? 0,
+ Width = stream.Width ?? 0,
+ Profile = stream.Profile,
+ PixelFormat = stream.PixelFormat,
+ Rotation = (int)float.Parse(stream.GetRotate() ?? "0"),
+ Language = stream.GetLanguage(),
+ Tags = stream.Tags,
+ };
+ }
+
+ private AudioStream ParseAudioStream(FFProbeStream stream)
+ {
+ return new AudioStream
+ {
+ Index = stream.Index,
+ BitRate = !string.IsNullOrEmpty(stream.BitRate) ? MediaAnalysisUtils.ParseIntInvariant(stream.BitRate) : default,
+ CodecName = stream.CodecName,
+ CodecLongName = stream.CodecLongName,
+ Channels = stream.Channels ?? default,
+ ChannelLayout = stream.ChannelLayout,
+ Duration = MediaAnalysisUtils.ParseDuration(stream),
+ SampleRateHz = !string.IsNullOrEmpty(stream.SampleRate) ? MediaAnalysisUtils.ParseIntInvariant(stream.SampleRate) : default,
+ Profile = stream.Profile,
+ Language = stream.GetLanguage(),
+ Tags = stream.Tags,
+ };
+ }
+
+
+ }
+
+ public static class MediaAnalysisUtils
+ {
+ private static readonly Regex DurationRegex = new Regex(@"^(\d+):(\d{1,2}):(\d{1,2})\.(\d{1,3})", RegexOptions.Compiled);
+
+ public static double DivideRatio((double, double) ratio) => ratio.Item1 / ratio.Item2;
+
+ public static (int, int) ParseRatioInt(string input, char separator)
+ {
+ if (string.IsNullOrEmpty(input)) return (0, 0);
+ var ratio = input.Split(separator);
+ return (ParseIntInvariant(ratio[0]), ParseIntInvariant(ratio[1]));
+ }
+
+ public static (double, double) ParseRatioDouble(string input, char separator)
+ {
+ if (string.IsNullOrEmpty(input)) return (0, 0);
+ var ratio = input.Split(separator);
+ return (ratio.Length > 0 ? ParseDoubleInvariant(ratio[0]) : 0, ratio.Length > 1 ? ParseDoubleInvariant(ratio[1]) : 0);
+ }
+
+ public static double ParseDoubleInvariant(string line) =>
+ double.Parse(line, System.Globalization.NumberStyles.Any, System.Globalization.CultureInfo.InvariantCulture);
+
+ public static int ParseIntInvariant(string line) =>
+ int.Parse(line, System.Globalization.NumberStyles.Any, System.Globalization.CultureInfo.InvariantCulture);
+
+
+ public static TimeSpan ParseDuration(string duration)
+ {
+ if (!string.IsNullOrEmpty(duration))
+ {
+ var match = DurationRegex.Match(duration);
+ if (match.Success)
+ {
+ // ffmpeg may provide < 3-digit number of milliseconds (omitting trailing zeros), which won't simply parse correctly
+ // e.g. 00:12:02.11 -> 12 minutes 2 seconds and 110 milliseconds
+ var millisecondsPart = match.Groups[4].Value;
+ if (millisecondsPart.Length < 3)
+ {
+ millisecondsPart = millisecondsPart.PadRight(3, '0');
+ }
+
+ var hours = int.Parse(match.Groups[1].Value);
+ var minutes = int.Parse(match.Groups[2].Value);
+ var seconds = int.Parse(match.Groups[3].Value);
+ var milliseconds = int.Parse(millisecondsPart);
+ return new TimeSpan(0, hours, minutes, seconds, milliseconds);
+ }
+ else
+ {
+ return TimeSpan.Zero;
+ }
+ }
+ else
+ {
+ return TimeSpan.Zero;
+ }
+ }
+
+ public static TimeSpan ParseDuration(FFProbeStream ffProbeStream)
+ {
+ return ParseDuration(ffProbeStream.Duration);
+ }
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFProbe/MediaFormat.cs b/FFMpegCore/FFProbe/MediaFormat.cs
new file mode 100644
index 00000000..874317cc
--- /dev/null
+++ b/FFMpegCore/FFProbe/MediaFormat.cs
@@ -0,0 +1,16 @@
+using System;
+using System.Collections.Generic;
+
+namespace FFMpegCore
+{
+ public class MediaFormat
+ {
+ public TimeSpan Duration { get; set; }
+ public string FormatName { get; set; } = null!;
+ public string FormatLongName { get; set; } = null!;
+ public int StreamCount { get; set; }
+ public double ProbeScore { get; set; }
+ public double BitRate { get; set; }
+ public Dictionary? Tags { get; set; }
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFProbe/MediaStream.cs b/FFMpegCore/FFProbe/MediaStream.cs
new file mode 100644
index 00000000..0780c8eb
--- /dev/null
+++ b/FFMpegCore/FFProbe/MediaStream.cs
@@ -0,0 +1,20 @@
+using FFMpegCore.Enums;
+
+using System;
+using System.Collections.Generic;
+
+namespace FFMpegCore
+{
+ public class MediaStream
+ {
+ public int Index { get; internal set; }
+ public string CodecName { get; internal set; } = null!;
+ public string CodecLongName { get; internal set; } = null!;
+ public int BitRate { get; internal set; }
+ public TimeSpan Duration { get; internal set; }
+ public string? Language { get; internal set; }
+ public Dictionary? Tags { get; internal set; }
+
+ public Codec GetCodecInfo() => FFMpeg.GetCodec(CodecName);
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/FFProbe/VideoStream.cs b/FFMpegCore/FFProbe/VideoStream.cs
new file mode 100644
index 00000000..0bcfc090
--- /dev/null
+++ b/FFMpegCore/FFProbe/VideoStream.cs
@@ -0,0 +1,19 @@
+using FFMpegCore.Enums;
+
+namespace FFMpegCore
+{
+ public class VideoStream : MediaStream
+ {
+ public double AvgFrameRate { get; internal set; }
+ public int BitsPerRawSample { get; internal set; }
+ public (int Width, int Height) DisplayAspectRatio { get; internal set; }
+ public string Profile { get; internal set; } = null!;
+ public int Width { get; internal set; }
+ public int Height { get; internal set; }
+ public double FrameRate { get; internal set; }
+ public string PixelFormat { get; internal set; } = null!;
+ public int Rotation { get; set; }
+
+ public PixelFormat GetPixelFormatInfo() => FFMpeg.GetPixelFormat(PixelFormat);
+ }
+}
\ No newline at end of file
diff --git a/FFMpegCore/GlobalFFOptions.cs b/FFMpegCore/GlobalFFOptions.cs
new file mode 100644
index 00000000..358787a7
--- /dev/null
+++ b/FFMpegCore/GlobalFFOptions.cs
@@ -0,0 +1,52 @@
+using System;
+using System.IO;
+using System.Runtime.InteropServices;
+using System.Text.Json;
+
+namespace FFMpegCore
+{
+ public static class GlobalFFOptions
+ {
+ private static readonly string ConfigFile = "ffmpeg.config.json";
+
+ public static FFOptions Current { get; private set; }
+ static GlobalFFOptions()
+ {
+ if (File.Exists(ConfigFile))
+ {
+ Current = JsonSerializer.Deserialize(File.ReadAllText(ConfigFile))!;
+ }
+ else
+ {
+ Current = new FFOptions();
+ }
+ }
+
+ public static void Configure(Action optionsAction)
+ {
+ optionsAction?.Invoke(Current);
+ }
+ public static void Configure(FFOptions ffOptions)
+ {
+ Current = ffOptions ?? throw new ArgumentNullException(nameof(ffOptions));
+ }
+
+
+ public static string GetFFMpegBinaryPath(FFOptions? ffOptions = null) => GetFFBinaryPath("FFMpeg", ffOptions ?? Current);
+
+ public static string GetFFProbeBinaryPath(FFOptions? ffOptions = null) => GetFFBinaryPath("FFProbe", ffOptions ?? Current);
+
+ private static string GetFFBinaryPath(string name, FFOptions ffOptions)
+ {
+ var ffName = name.ToLowerInvariant();
+ if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
+ ffName += ".exe";
+
+ var target = Environment.Is64BitProcess ? "x64" : "x86";
+ if (Directory.Exists(Path.Combine(ffOptions.BinaryFolder, target)))
+ ffName = Path.Combine(target, ffName);
+
+ return Path.Combine(ffOptions.BinaryFolder, ffName);
+ }
+ }
+}
diff --git a/FFMpegCore/Helpers/FFMpegHelper.cs b/FFMpegCore/Helpers/FFMpegHelper.cs
new file mode 100644
index 00000000..12e52c38
--- /dev/null
+++ b/FFMpegCore/Helpers/FFMpegHelper.cs
@@ -0,0 +1,47 @@
+using System;
+using System.Drawing;
+using System.IO;
+using FFMpegCore.Exceptions;
+using Instances;
+
+namespace FFMpegCore.Helpers
+{
+ public static class FFMpegHelper
+ {
+ private static bool _ffmpegVerified;
+
+ public static void ConversionSizeExceptionCheck(Image image)
+ => ConversionSizeExceptionCheck(image.Size.Width, image.Size.Height);
+
+ public static void ConversionSizeExceptionCheck(IMediaAnalysis info)
+ => ConversionSizeExceptionCheck(info.PrimaryVideoStream!.Width, info.PrimaryVideoStream.Height);
+
+ private static void ConversionSizeExceptionCheck(int width, int height)
+ {
+ if (height % 2 != 0 || width % 2 != 0 )
+ throw new ArgumentException("FFMpeg yuv420p encoding requires the width and height to be a multiple of 2!");
+ }
+
+ public static void ExtensionExceptionCheck(string filename, string extension)
+ {
+ if (!extension.Equals(Path.GetExtension(filename), StringComparison.OrdinalIgnoreCase))
+ throw new FFMpegException(FFMpegExceptionType.File,
+ $"Invalid output file. File extension should be '{extension}' required.");
+ }
+
+ public static void RootExceptionCheck()
+ {
+ if (GlobalFFOptions.Current.BinaryFolder == null)
+ throw new FFOptionsException("FFMpeg root is not configured in app config. Missing key 'BinaryFolder'.");
+ }
+
+ public static void VerifyFFMpegExists(FFOptions ffMpegOptions)
+ {
+ if (_ffmpegVerified) return;
+ var (exitCode, _) = Instance.Finish(GlobalFFOptions.GetFFMpegBinaryPath(ffMpegOptions), "-version");
+ _ffmpegVerified = exitCode == 0;
+ if (!_ffmpegVerified)
+ throw new FFMpegException(FFMpegExceptionType.Operation, "ffmpeg was not found on your system");
+ }
+ }
+}
diff --git a/FFMpegCore/Helpers/FFProbeHelper.cs b/FFMpegCore/Helpers/FFProbeHelper.cs
new file mode 100644
index 00000000..d0064e43
--- /dev/null
+++ b/FFMpegCore/Helpers/FFProbeHelper.cs
@@ -0,0 +1,36 @@
+using FFMpegCore.Exceptions;
+using Instances;
+
+namespace FFMpegCore.Helpers
+{
+ public class FFProbeHelper
+ {
+ private static bool _ffprobeVerified;
+
+ public static int Gcd(int first, int second)
+ {
+ while (first != 0 && second != 0)
+ {
+ if (first > second)
+ first -= second;
+ else second -= first;
+ }
+ return first == 0 ? second : first;
+ }
+
+ public static void RootExceptionCheck()
+ {
+ if (GlobalFFOptions.Current.BinaryFolder == null)
+ throw new FFOptionsException("FFProbe root is not configured in app config. Missing key 'BinaryFolder'.");
+ }
+
+ public static void VerifyFFProbeExists(FFOptions ffMpegOptions)
+ {
+ if (_ffprobeVerified) return;
+ var (exitCode, _) = Instance.Finish(GlobalFFOptions.GetFFProbeBinaryPath(ffMpegOptions), "-version");
+ _ffprobeVerified = exitCode == 0;
+ if (!_ffprobeVerified)
+ throw new FFMpegException(FFMpegExceptionType.Operation, "ffprobe was not found on your system");
+ }
+ }
+}
diff --git a/FFMpegCore/ImageInfo.cs b/FFMpegCore/ImageInfo.cs
new file mode 100644
index 00000000..cf8561e2
--- /dev/null
+++ b/FFMpegCore/ImageInfo.cs
@@ -0,0 +1,179 @@
+using System;
+using System.Drawing;
+using System.IO;
+using FFMpegCore.Enums;
+using FFMpegCore.Helpers;
+
+namespace FFMpegCore
+{
+ public class ImageInfo
+ {
+ private FileInfo _file;
+
+ ///
+ /// Create a image information object from a target path.
+ ///
+ /// Image file information.
+ public ImageInfo(FileInfo fileInfo)
+ {
+ if (!fileInfo.Extension.ToLowerInvariant().EndsWith(FileExtension.Png))
+ {
+ throw new Exception("Image joining currently suppors only .png file types");
+ }
+
+ fileInfo.Refresh();
+
+ Size = fileInfo.Length / (1024 * 1024);
+
+ using (var image = Image.FromFile(fileInfo.FullName))
+ {
+ Width = image.Width;
+ Height = image.Height;
+ var cd = FFProbeHelper.Gcd(Width, Height);
+ Ratio = $"{Width / cd}:{Height / cd}";
+ }
+
+
+ if (!fileInfo.Exists)
+ throw new ArgumentException($"Input file {fileInfo.FullName} does not exist!");
+
+ _file = fileInfo;
+
+
+ }
+
+ ///
+ /// Create a image information object from a target path.
+ ///
+ /// Path to image.
+ public ImageInfo(string path) : this(new FileInfo(path)) { }
+
+ ///
+ /// Aspect ratio.
+ ///
+ public string Ratio { get; internal set; }
+
+ ///
+ /// Height of the image file.
+ ///
+ public int Height { get; internal set; }
+
+ ///
+ /// Width of the image file.
+ ///
+ public int Width { get; internal set; }
+
+ ///
+ /// Image file size in MegaBytes (MB).
+ ///
+ public double Size { get; internal set; }
+
+ ///
+ /// Gets the name of the file.
+ ///
+ public string Name => _file.Name;
+
+ ///
+ /// Gets the full path of the file.
+ ///
+ public string FullName => _file.FullName;
+
+ ///
+ /// Gets the file extension.
+ ///
+ public string Extension => _file.Extension;
+
+ ///
+ /// Gets a flag indicating if the file is read-only.
+ ///
+ public bool IsReadOnly => _file.IsReadOnly;
+
+ ///
+ /// Gets a flag indicating if the file exists (no cache, per call verification).
+ ///
+ public bool Exists => File.Exists(FullName);
+
+ ///
+ /// Gets the creation date.
+ ///
+ public DateTime CreationTime => _file.CreationTime;
+
+ ///
+ /// Gets the parent directory information.
+ ///
+ public DirectoryInfo Directory => _file.Directory;
+
+ ///
+ /// Create a image information object from a file information object.
+ ///
+ /// Image file information.
+ ///
+ public static ImageInfo FromFileInfo(FileInfo fileInfo)
+ {
+ return FromPath(fileInfo.FullName);
+ }
+
+ ///
+ /// Create a image information object from a target path.
+ ///
+ /// Path to image.
+ ///
+ public static ImageInfo FromPath(string path)
+ {
+ return new ImageInfo(path);
+ }
+
+ ///
+ /// Pretty prints the image information.
+ ///
+ ///
+ public override string ToString()
+ {
+ return "Image Path : " + FullName + Environment.NewLine +
+ "Image Root : " + Directory.FullName + Environment.NewLine +
+ "Image Name: " + Name + Environment.NewLine +
+ "Image Extension : " + Extension + Environment.NewLine +
+ "Aspect Ratio : " + Ratio + Environment.NewLine +
+ "Resolution : " + Width + "x" + Height + Environment.NewLine +
+ "Size : " + Size + " MB";
+ }
+
+ ///
+ /// Open a file stream.
+ ///
+ /// Opens a file in a specified mode.
+ /// File stream of the image file.
+ public FileStream FileOpen(FileMode mode)
+ {
+ return _file.Open(mode);
+ }
+
+ ///
+ /// Move file to a specific directory.
+ ///
+ ///
+ public void MoveTo(DirectoryInfo destination)
+ {
+ var newLocation = $"{destination.FullName}{Path.DirectorySeparatorChar}{Name}{Extension}";
+ _file.MoveTo(newLocation);
+ _file = new FileInfo(newLocation);
+ }
+
+ ///
+ /// Delete the file.
+ ///
+ public void Delete()
+ {
+ _file.Delete();
+ }
+
+ ///
+ /// Converts image info to file info.
+ ///
+ /// A new FileInfo instance.
+ public FileInfo ToFileInfo()
+ {
+ return new FileInfo(_file.FullName);
+ }
+ }
+}
diff --git a/README.md b/README.md
new file mode 100644
index 00000000..9dce3459
--- /dev/null
+++ b/README.md
@@ -0,0 +1,228 @@
+# FFMpegCore
+[![CI](https://github.com/rosenbjerg/FFMpegCore/workflows/CI/badge.svg)](https://github.com/rosenbjerg/FFMpegCore/actions?query=workflow%3ACI)
+[![NuGet Badge](https://buildstats.info/nuget/FFMpegCore)](https://www.nuget.org/packages/FFMpegCore/)
+[![GitHub issues](https://img.shields.io/github/issues/rosenbjerg/FFMpegCore)](https://github.com/rosenbjerg/FFMpegCore/issues)
+[![GitHub stars](https://img.shields.io/github/stars/rosenbjerg/FFMpegCore)](https://github.com/rosenbjerg/FFMpegCore/stargazers)
+[![GitHub](https://img.shields.io/github/license/rosenbjerg/FFMpegCore)](https://github.com/rosenbjerg/FFMpegCore/blob/master/LICENSE)
+
+# Setup
+
+#### NuGet:
+
+```
+Install-Package FFMpegCore
+```
+
+A .NET Standard FFMpeg/FFProbe wrapper for easily integrating media analysis and conversion into your C# applications. Support both synchronous and asynchronous use
+
+# API
+
+## FFProbe
+
+FFProbe is used to gather media information:
+
+```csharp
+var mediaInfo = FFProbe.Analyse(inputPath);
+```
+or
+```csharp
+var mediaInfo = await FFProbe.AnalyseAsync(inputPath);
+```
+
+
+## FFMpeg
+FFMpeg is used for converting your media files to web ready formats.
+Easily build your FFMpeg arguments using the fluent argument builder:
+
+Convert input file to h264/aac scaled to 720p w/ faststart, for web playback
+```csharp
+FFMpegArguments
+ .FromFileInput(inputPath)
+ .OutputToFile(outputPath, false, options => options
+ .WithVideoCodec(VideoCodec.LibX264)
+ .WithConstantRateFactor(21)
+ .WithAudioCodec(AudioCodec.Aac)
+ .WithVariableBitrate(4)
+ .WithVideoFilters(filterOptions => filterOptions
+ .Scale(VideoSize.Hd))
+ .WithFastStart())
+ .ProcessSynchronously();
+```
+
+Easily capture screens from your videos:
+```csharp
+// process the snapshot in-memory and use the Bitmap directly
+var bitmap = FFMpeg.Snapshot(inputPath, new Size(200, 400), TimeSpan.FromMinutes(1));
+
+// or persists the image on the drive
+FFMpeg.Snapshot(inputPath, outputPath, new Size(200, 400), TimeSpan.FromMinutes(1));
+```
+
+Convert to and/or from streams
+```csharp
+await FFMpegArguments
+ .FromPipeInput(new StreamPipeSource(inputStream))
+ .OutputToPipe(new StreamPipeSink(outputStream), options => options
+ .WithVideoCodec("vp9")
+ .ForceFormat("webm"))
+ .ProcessAsynchronously();
+```
+
+Join video parts into one single file:
+```csharp
+FFMpeg.Join(@"..\joined_video.mp4",
+ @"..\part1.mp4",
+ @"..\part2.mp4",
+ @"..\part3.mp4"
+);
+```
+
+Join images into a video:
+```csharp
+FFMpeg.JoinImageSequence(@"..\joined_video.mp4", frameRate: 1,
+ ImageInfo.FromPath(@"..\1.png"),
+ ImageInfo.FromPath(@"..\2.png"),
+ ImageInfo.FromPath(@"..\3.png")
+);
+```
+
+Mute videos:
+```csharp
+FFMpeg.Mute(inputPath, outputPath);
+```
+
+Save audio track from video:
+```csharp
+FFMpeg.ExtractAudio(inputPath, outputPath);
+```
+
+Add or replace audio track on video:
+```csharp
+FFMpeg.ReplaceAudio(inputPath, inputAudioPath, outputPath);
+```
+
+Add poster image to audio file (good for youtube videos):
+```csharp
+FFMpeg.PosterWithAudio(inputPath, inputAudioPath, outputPath);
+// or
+var image = Image.FromFile(inputImagePath);
+image.AddAudio(inputAudioPath, outputPath);
+```
+
+Other available arguments could be found in `FFMpegCore.Arguments` namespace.
+
+### Input piping
+With input piping it is possible to write video frames directly from program memory without saving them to jpeg or png and then passing path to input of ffmpeg. This feature also allows us to convert video on-the-fly while frames are being generated or received.
+
+The `IPipeSource` interface is used as the source of data. It could be represented as encoded video stream or raw frames stream. Currently, the `IPipeSource` interface has single implementation, `RawVideoPipeSource` that is used for raw stream encoding.
+
+For example:
+
+Method that is generating bitmap frames:
+```csharp
+IEnumerable CreateFrames(int count)
+{
+ for(int i = 0; i < count; i++)
+ {
+ yield return GetNextFrame(); //method of generating new frames
+ }
+}
+```
+Then create `ArgumentsContainer` with `InputPipeArgument`
+```csharp
+var videoFramesSource = new RawVideoPipeSource(CreateFrames(64)) //pass IEnumerable or IEnumerator to constructor of RawVideoPipeSource
+{
+ FrameRate = 30 //set source frame rate
+};
+await FFMpegArguments
+ .FromPipeInput(videoFramesSource)
+ .OutputToFile(outputPath, false, options => options
+ .WithVideoCodec(VideoCodec.LibVpx))
+ .ProcessAsynchronously();
+```
+
+if you want to use `System.Drawing.Bitmap` as `IVideoFrame`, there is a `BitmapVideoFrameWrapper` wrapper class.
+
+
+## Binaries
+
+If you prefer to manually download them, visit [ffbinaries](https://ffbinaries.com/downloads) or [zeranoe Windows builds](https://ffmpeg.zeranoe.com/builds/).
+
+#### Windows
+
+command: `choco install ffmpeg -Y`
+
+location: `C:\ProgramData\chocolatey\lib\ffmpeg\tools\ffmpeg\bin`
+
+#### Mac OSX
+
+command: `brew install ffmpeg mono-libgdiplus`
+
+location: `/usr/local/bin`
+
+#### Ubuntu
+
+command: `sudo apt-get install -y ffmpeg libgdiplus`
+
+location: `/usr/bin`
+
+## Path Configuration
+
+#### Behavior
+
+If you wish to support multiple client processor architectures, you can do so by creating a folder `x64` and `x86` in the `root` directory.
+Both folders should contain the binaries (`ffmpeg.exe` and `ffprobe.exe`) for build for the respective architectures.
+
+By doing so, the library will attempt to use either `/root/{ARCH}/(ffmpeg|ffprobe).exe`.
+
+If these folders are not defined, it will try to find the binaries in `/root/(ffmpeg|ffprobe.exe)`
+
+#### Option 1
+
+The default value of an empty string (expecting ffmpeg to be found through PATH) can be overwritten via the `FFOptions` class:
+
+```c#
+// setting global options
+GlobalFFOptions.Configure(new FFOptions { BinaryFolder = "./bin", TemporaryFilesFolder = "/tmp" });
+// or
+GlobalFFOptions.Configure(options => options.BinaryFolder = "./bin");
+
+// or individual, per-run options
+await FFMpegArguments
+ .FromFileInput(inputPath)
+ .OutputToFile(outputPath)
+ .ProcessAsynchronously(true, new FFOptions { BinaryFolder = "./bin", TemporaryFilesFolder = "/tmp" });
+```
+
+#### Option 2
+
+The root and temp directory for the ffmpeg binaries can be configured via the `ffmpeg.config.json` file.
+
+```json
+{
+ "BinaryFolder": "./bin",
+ "TemporaryFilesFolder": "/tmp"
+}
+```
+
+# Compatibility
+ Some versions of FFMPEG might not have the same argument schema. The lib has been tested with version `3.3` to `4.2`
+
+
+## Contributors
+
+
+
+
+
+
+
+
+
+
+
+### License
+
+Copyright © 2021
+
+Released under [MIT license](https://github.com/rosenbjerg/FFMpegCore/blob/master/LICENSE)
diff --git a/blackdetect.txt b/blackdetect.txt
new file mode 100644
index 00000000..1b28ba48
--- /dev/null
+++ b/blackdetect.txt
@@ -0,0 +1,45 @@
+ffprobe code to detect black frames:
+
+ffprobe -f lavfi -i movie="input.mp4,blackdetect[out0]" -show_entries tags=lavfi.black_start,lavfi.black_end -of default=nw=1
+
+Output displays line by line as each black frame is found.
+
+TAG:lavfi.black_start=35.7691
+TAG:lavfi.black_start=35.7691
+TAG:lavfi.black_end=37.0704
+TAG:lavfi.black_end=37.0704
+TAG:lavfi.black_start=53.9205
+TAG:lavfi.black_start=53.9205
+TAG:lavfi.black_end=54.1207
+TAG:lavfi.black_end=54.1207
+TAG:lavfi.black_start=68.9689
+TAG:lavfi.black_start=68.9689
+TAG:lavfi.black_end=69.4694
+TAG:lavfi.black_end=69.4694
+TAG:lavfi.black_start=75.7423
+TAG:lavfi.black_start=75.7423
+TAG:lavfi.black_end=76.2095
+TAG:lavfi.black_end=76.2095
+TAG:lavfi.black_start=1617.95
+TAG:lavfi.black_start=1617.95
+TAG:lavfi.black_end=1618.25
+TAG:lavfi.black_end=1618.25
+TAG:lavfi.black_start=1635.1
+TAG:lavfi.black_start=1635.1
+TAG:lavfi.black_end=1635.33
+TAG:lavfi.black_end=1635.33
+TAG:lavfi.black_start=1650.18
+TAG:lavfi.black_start=1650.18
+TAG:lavfi.black_end=1650.68
+TAG:lavfi.black_end=1650.68
+TAG:lavfi.black_start=1656.96
+TAG:lavfi.black_start=1656.96
+TAG:lavfi.black_end=1657.76
+TAG:lavfi.black_end=1657.76
+TAG:lavfi.black_start=1660.03
+TAG:lavfi.black_start=1660.03
+[Parsed_movie_0 @ 0000020c41f67b00] EOF timestamp not reliable
+[blackdetect @ 0000020c41f470c0] black_start:1660.03 black_end:1980.48 black_duration:320.453
+
+
+errors in the file can appear such as the "EOF timestamp not reliable" error seen above. The processing time would almost necessitate using await as the file in its entirety is scanned and can take multiple minutes to complete.
\ No newline at end of file