Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
150 changes: 117 additions & 33 deletions src/FFmpegVideoEncoder.cs
100755 → 100644
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,10 @@ public bool SetCodec(AVCodecID cdc, string name, Dictionary<string, string>? opt

private AVCodec* GetCodec(AVCodecID codecID, bool isEncoder = true)
{
AVCodec* codec = (AVCodec*)(_specificEncoders?[codecID] ?? IntPtr.Zero);
AVCodec* codec = null;

if (_specificEncoders?.TryGetValue(codecID, out var cdc) ?? false)
codec = (AVCodec*)cdc;

if (codec == null)
{
Expand All @@ -220,8 +223,14 @@ public bool SetCodec(AVCodecID cdc, string name, Dictionary<string, string>? opt
return codec;
}

private Dictionary<string, string>? GetCodecOptions(string? name)
=> !string.IsNullOrWhiteSpace(name) ? _codecOptionsByName?[name!] ?? _codecOptions : null;
private Dictionary<string, string> GetCodecOptions(string? name)
{
if (!string.IsNullOrWhiteSpace(name)
&& (_codecOptionsByName?.TryGetValue(name!, out var opt) ?? false))
return opt;
else
return _codecOptions;
}

public void InitialiseEncoder(AVCodecID codecID, int width, int height, int fps)
{
Expand All @@ -231,7 +240,8 @@ public void InitialiseEncoder(AVCodecID codecID, int width, int height, int fps)
_codecID = codecID;

var codec = GetCodec(codecID);
var encOpts = GetCodecOptions(GetNameString(codec->name));
var cdcname = GetNameString(codec->name);
var encOpts = GetCodecOptions(cdcname);

if (codec == null)
{
Expand Down Expand Up @@ -265,35 +275,44 @@ public void InitialiseEncoder(AVCodecID codecID, int width, int height, int fps)
else
_encoderContext->gop_size = fps;

// provide tunings for known codecs
switch (GetNameString(codec->name))
try
{
case "libx264":
ffmpeg.av_opt_set(_encoderContext->priv_data, "profile", "baseline", 0).ThrowExceptionIfError();
ffmpeg.av_opt_set(_encoderContext->priv_data, "tune", "zerolatency", 0).ThrowExceptionIfError();
break;
case "h264_qsv":
ffmpeg.av_opt_set(_encoderContext->priv_data, "profile", "66" /* baseline */, 0).ThrowExceptionIfError();
ffmpeg.av_opt_set(_encoderContext->priv_data, "preset", "7" /* veryfast */, 0).ThrowExceptionIfError();
break;
case "libvpx":
ffmpeg.av_opt_set(_encoderContext->priv_data, "quality", "realtime", 0).ThrowExceptionIfError();
break;
default:
break;
// provide tunings for known codecs
switch (cdcname)
{
case "libx264":
ffmpeg.av_opt_set(_encoderContext->priv_data, "profile", "baseline", 0).ThrowExceptionIfError();
ffmpeg.av_opt_set(_encoderContext->priv_data, "tune", "zerolatency", 0).ThrowExceptionIfError();
break;
case "h264_qsv":
ffmpeg.av_opt_set(_encoderContext->priv_data, "profile", "66" /* baseline */, 0).ThrowExceptionIfError();
ffmpeg.av_opt_set(_encoderContext->priv_data, "preset", "7" /* veryfast */, 0).ThrowExceptionIfError();
break;
case "libvpx":
ffmpeg.av_opt_set(_encoderContext->priv_data, "quality", "realtime", 0).ThrowExceptionIfError();
break;
case "libx265":
//ffmpeg.av_opt_set(_encoderContext->priv_data, "forced-idr", "1", 0).ThrowExceptionIfError();
//ffmpeg.av_opt_set(_encoderContext->priv_data, "crf", "28", 0).ThrowExceptionIfError();
ffmpeg.av_opt_set(_encoderContext->priv_data, "preset", "ultrafast", 0).ThrowExceptionIfError();
ffmpeg.av_opt_set(_encoderContext->priv_data, "tune", "zerolatency", 0).ThrowExceptionIfError();
break;
default:
break;
}
}
catch (ApplicationException ex)
{
logger.LogCritical(ex, "Failed to set default encoder options for codec {name}. {msg}", cdcname, ex.Message);
throw;
}

foreach (var option in encOpts)
foreach (var option in _codecOptions)
{
try
{
ffmpeg.av_opt_set(_encoderContext->priv_data, option.Key, option.Value, 0).ThrowExceptionIfError();
}
catch (Exception excp)
{
logger.LogWarning("Failed to set encoder option \"{key}\"=\"{val}\", Skipping this option. {msg}", option.Key, option.Value, excp.Message);
}
};
var ok = ffmpeg.av_opt_set(_encoderContext->priv_data, option.Key, option.Value, ffmpeg.AV_OPT_SEARCH_CHILDREN);
if (ok < 0)
logger.LogWarning("Failed to set encoder option \"{key}\"=\"{val}\", Skipping this option. {msg}", option.Key, option.Value, FFmpegInit.av_strerror(ok));
}

ffmpeg.avcodec_open2(_encoderContext, codec, null).ThrowExceptionIfError();

Expand Down Expand Up @@ -331,7 +350,8 @@ private void ResetEncoder()
{
ffmpeg.avcodec_free_context(pCtx);
}
_negotiatedPixFmt = null;

UnsafeReset();
}
}
}
Expand All @@ -348,8 +368,31 @@ private void ResetDecoder()
{
ffmpeg.avcodec_free_context(pCtx);
}

UnsafeReset();
}
}
}

private void UnsafeReset()
{
if (_frame != null)
{
fixed (AVFrame** pFrame = &_frame)
{
ffmpeg.av_frame_free(pFrame);
}
}

if (_gpuFrame != null)
{
fixed (AVFrame** pFrame = &_gpuFrame)
{
ffmpeg.av_frame_free(pFrame);
}
}

_negotiatedPixFmt = null;
}

private void InitialiseDecoder(AVCodecID codecID)
Expand Down Expand Up @@ -557,6 +600,8 @@ private bool CheckDropFrame()

if (error == 0)
{
//TracePacket(pPacket, "hevc_mp4toannexb");

if (_codecID == AVCodecID.AV_CODEC_ID_H264)
{
// TODO: Work out how to use the FFmpeg H264 bit stream parser to extract the NALs.
Expand Down Expand Up @@ -595,6 +640,37 @@ private bool CheckDropFrame()
}
}

/*
// for debugging or bitstream filter example
bool inits = false;
unsafe AVBSFContext* bsfctx;
private void TracePacket(AVPacket* pPacket, string name)
{
if (!inits)
{
IntPtr ptr = IntPtr.Zero;
//var bsf = ffmpeg.av_bsf_get_by_name("hevc_mp4toannexb");
var bsf = ffmpeg.av_bsf_get_by_name(name);
ffmpeg.av_bsf_alloc(bsf, (AVBSFContext**)&ptr);
bsfctx = (AVBSFContext*)ptr;
ffmpeg.avcodec_parameters_from_context(bsfctx->par_in,
(IntPtr)_encoderContext == IntPtr.Zero ? _decoderContext : _encoderContext);
ffmpeg.av_bsf_init(bsfctx);
inits = true;
}

try
{
ffmpeg.av_bsf_send_packet(bsfctx, pPacket).ThrowExceptionIfError();
ffmpeg.av_bsf_receive_packet(bsfctx, pPacket).ThrowExceptionIfError();
}
catch (Exception e)
{
logger.LogError("BSF excp: {Log}", e.Message);
}
}
*/

public void AdjustStream(int bitrate, int fps)
{

Expand Down Expand Up @@ -664,13 +740,18 @@ public void AdjustStream(int bitrate, int fps)
width = 0;
height = 0;

if (_isDecoderInitialised && _codecID != codecID)
ResetDecoder();

if (!_isDecoderInitialised)
{
InitialiseDecoder(codecID);
_frame = ffmpeg.av_frame_alloc();
_gpuFrame = ffmpeg.av_frame_alloc();
}

//TracePacket(packet, "trace_headers");

List<RawImage> rgbFrames = new List<RawImage>();
if (ffmpeg.avcodec_send_packet(_decoderContext, packet) < 0)
{
Expand Down Expand Up @@ -830,17 +911,20 @@ internal bool NegotiatePixelFormat(AVCodecID codecid, int width, int height, int
fmts++;
}

var ret = false;
fmt = *fmts;
var ok = _encoderContext->codec->pix_fmts[0] != fmt;
if (fmt == AVPixelFormat.AV_PIX_FMT_NONE)
fmt = _encoderContext->codec->pix_fmts[0];
else
ret = true;

ResetEncoder();

_negotiatedPixFmt = fmt;

if (logger.IsEnabled(LogLevel.Trace))
logger.LogTrace("Negotiated pixel format {fmt}", fmt);

return ok;
return ret;
}
}

Expand Down
14 changes: 14 additions & 0 deletions src/FFmpegVideoEndPoint.cs
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,20 @@ public void GotVideoFrame(IPEndPoint remoteEndPoint, uint timestamp, byte[] payl
{
if ( (!_isClosed) && (payload != null) && (OnVideoSinkDecodedSampleFaster != null) )
{
if (_videoFormatManager.SelectedFormat.Codec != format.Codec)
{
if (_videoFormatManager.GetSourceFormats().Exists(f => f.Codec == format.Codec))
{
logger.LogWarning("Video format {format} is not selected but supported, continuing by using it.", format.FormatName);
_videoFormatManager.SetSelectedFormat(format);
}
else
{
logger.LogError("Video format {format} is not supported by this endpoint.", format.FormatName);
return;
}
}

AVCodecID? codecID = FFmpegConvert.GetAVCodecID(_videoFormatManager.SelectedFormat.Codec);
if(codecID != null)
{
Expand Down
12 changes: 12 additions & 0 deletions src/FfmpegInit.cs
Original file line number Diff line number Diff line change
Expand Up @@ -191,9 +191,21 @@ public static class FFmpegConvert
case VideoCodecsEnum.VP8:
avCodecID = AVCodecID.AV_CODEC_ID_VP8;
break;
case VideoCodecsEnum.VP9:
avCodecID = AVCodecID.AV_CODEC_ID_VP9;
break;
case VideoCodecsEnum.H264:
avCodecID = AVCodecID.AV_CODEC_ID_H264;
break;
case VideoCodecsEnum.H265:
avCodecID = AVCodecID.AV_CODEC_ID_HEVC;
break;

// Currently disabled because MJPEG doesn't work with the current pipeline that forces pixel conversion to YUV420P
// TODO: Fix pixel format conversion in Decode->Encode pipeline
//case VideoCodecsEnum.JPEG:
// avCodecID = AVCodecID.AV_CODEC_ID_MJPEG;
// break;
}

return avCodecID;
Expand Down
23 changes: 18 additions & 5 deletions src/Helper.cs
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,26 @@ public class Helper
public const int MIN_SLEEP_MILLISECONDS = 15;
public const int DEFAULT_VIDEO_FRAME_RATE = 30;

// Old hardcoded
[Obsolete("VP8 in RTP is defined as dynamic type, this may not be used for matching.")]
public const int VP8_FORMATID = 96;
[Obsolete("H264 in RTP is defined as dynamic type, this may not be used for matching.")]
public const int H264_FORMATID = 100;

internal static List<VideoFormat> GetSupportedVideoFormats() => new List<VideoFormat>
{
new VideoFormat(VideoCodecsEnum.VP8, Helper.VP8_FORMATID, VideoFormat.DEFAULT_CLOCK_RATE),
new VideoFormat(VideoCodecsEnum.H264, Helper.H264_FORMATID, VideoFormat.DEFAULT_CLOCK_RATE)
};
private static int _dynFmtIdCounter = VideoFormat.DYNAMIC_ID_MIN;

internal static List<VideoFormat> GetSupportedVideoFormats() => _supportedVidFormats; // Use predefined list of supported video formats

private static readonly List<VideoFormat> _supportedVidFormats =
[
new VideoFormat(VideoCodecsEnum.VP8, _dynFmtIdCounter++, VideoFormat.DEFAULT_CLOCK_RATE),
new VideoFormat(VideoCodecsEnum.VP9, _dynFmtIdCounter++, VideoFormat.DEFAULT_CLOCK_RATE),
new VideoFormat(VideoCodecsEnum.H264, _dynFmtIdCounter++, VideoFormat.DEFAULT_CLOCK_RATE, "packetization-mode=1"),
new VideoFormat(VideoCodecsEnum.H265, _dynFmtIdCounter++, VideoFormat.DEFAULT_CLOCK_RATE, "packetization-mode=1"),

// Currently disabled because MJPEG doesn't work with the current pipeline that forces pixel conversion to YUV420P
// TODO: Fix pixel format conversion in Decode->Encode pipeline
//new VideoFormat(VideoCodecsEnum.JPEG, _dynamicFmtIdRand.Next(DYNAMIC_ID_MIN, DYNAMIC_ID_MAX), VideoFormat.DEFAULT_CLOCK_RATE),
];
}
}