windows之FFmpeg 软、硬编码
如果设备支持qsv 硬件加速,则可支持硬编码
namespace Coder.FFmpeg; /// <summary> /// 视频编码器 /// </summary> public sealed unsafe class FFmpegVideoEncoder : IDisposable { #region 私有变量 private readonly Size _originSize; private readonly FrameConverter _frameConverter; //编码器正在运行 private bool _isCodecRunning; #endregion #region 公共属性 /// <summary> /// 编解码格式 /// </summary> public AVCodecID AVCodecID { get; set; } = AVCodecID.AV_CODEC_ID_H264; /// <summary> /// 开启硬件加速 /// </summary> public bool OpenHwDevice { get; set; } /// <summary> /// 编码参数(构造时已设置默认值) /// </summary> public AVCodecContext* EncodeContext { get; private set; } #endregion /// <summary> /// 创建视频编码器 /// </summary> /// <param name="originSize">原始大小</param> /// <param name="destinationSize">编码后大小</param> /// <param name="originPixelFormat">原始像素格式</param> /// <param name="destinationPixelFormat">目标像素格式</param> /// <exception cref="InvalidOperationException"></exception> public FFmpegVideoEncoder(Size originSize, Size destinationSize, AVPixelFormat originPixelFormat, AVPixelFormat destinationPixelFormat) { _originSize = originSize; var pCodec = GetEncodeType(destinationPixelFormat); #region 编码参数 EncodeContext->width = destinationSize.Width; EncodeContext->height = destinationSize.Height; EncodeContext->framerate = new AVRational { num = 30, den = 1 }; EncodeContext->time_base = new AVRational { num = 1, den = 30 }; EncodeContext->gop_size = 30; // 设置预测算法 EncodeContext->flags |= ffmpeg.AV_CODEC_FLAG_PSNR; EncodeContext->flags2 |= ffmpeg.AV_CODEC_FLAG2_FAST; EncodeContext->max_b_frames = 0; EncodeContext->bit_rate = 8000000; ffmpeg.av_opt_set(EncodeContext->priv_data, "preset", "ultrafast", 0); ffmpeg.av_opt_set(EncodeContext->priv_data, "tune", "zerolatency", 0); #endregion ffmpeg.avcodec_open2(EncodeContext, pCodec, null).ThrowExceptionIfError(); _frameConverter = new FrameConverter(_originSize, originPixelFormat, destinationSize, EncodeContext->pix_fmt); _isCodecRunning = true; } private AVCodec* GetEncodeType(AVPixelFormat destinationPixelFormat) { AVCodec* pCodec; if (!OpenHwDevice) { pCodec = ffmpeg.avcodec_find_encoder(AVCodecID); EncodeContext = ffmpeg.avcodec_alloc_context3(pCodec); EncodeContext->pix_fmt = destinationPixelFormat; return pCodec; } pCodec = ffmpeg.avcodec_find_encoder_by_name("h264_qsv"); var type = ffmpeg.av_hwdevice_find_type_by_name("qsv"); if (pCodec != null) { for (int i = 0; ; i++) { AVCodecHWConfig* config = ffmpeg.avcodec_get_hw_config(pCodec, i); if (config == null) break; if ((config->methods & 0x01) == 0x01 && config->device_type == type) { OpenHwDevice = true; break; } } } EncodeContext = ffmpeg.avcodec_alloc_context3(pCodec); if (!OpenHwDevice || ffmpeg.av_hwdevice_ctx_create(&EncodeContext->hw_device_ctx, AVHWDeviceType.AV_HWDEVICE_TYPE_QSV, null, null, 0) < 0) { ffmpeg.avcodec_close(EncodeContext); ffmpeg.av_free(EncodeContext); pCodec = ffmpeg.avcodec_find_encoder(AVCodecID); EncodeContext = ffmpeg.avcodec_alloc_context3(pCodec); EncodeContext->pix_fmt = destinationPixelFormat; return pCodec; } EncodeContext->pix_fmt = AVPixelFormat.AV_PIX_FMT_NV12; return pCodec; } /// <summary> /// 释放资源 /// </summary> public void Dispose() { if (!_isCodecRunning) return; _isCodecRunning = false; ffmpeg.avcodec_close(EncodeContext); ffmpeg.av_free(EncodeContext); _frameConverter.Dispose(); } /// <summary> /// 编码 /// </summary> /// <param name="originData">原始数据</param> /// <returns>返回编码后数据</returns> /// <exception cref="InvalidOperationException"></exception> public byte[] Encode(byte[] originData) { if (!_isCodecRunning) { throw new InvalidOperationException("编码器未运行!"); } fixed (byte* pBitmapData = originData) { var sourceFrame = new AVFrame { data = new byte_ptr8 { [0] = pBitmapData }, linesize = new int8 { [0] = originData.Length / _originSize.Height }, height = _originSize.Height }; var frame = _frameConverter.Convert(sourceFrame); var pPacket = ffmpeg.av_packet_alloc(); byte[] buffer; try { int error; do { ffmpeg.avcodec_send_frame(EncodeContext, &frame).ThrowExceptionIfError(); error = ffmpeg.avcodec_receive_packet(EncodeContext, pPacket); } while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN)); error.ThrowExceptionIfError(); buffer = new byte[pPacket->size]; Marshal.Copy(new IntPtr(pPacket->data), buffer, 0, pPacket->size); } finally { ffmpeg.av_frame_unref(&frame); ffmpeg.av_packet_unref(pPacket); } return buffer; } } /// <summary> /// 编码 /// </summary> /// <param name="originData">原始数据</param> /// <returns>返回编码后数据指针和长度元组</returns> /// <exception cref="InvalidOperationException"></exception> public (IntPtr Pointer, int Length) EncodeToIntPtr(byte[] originData) { if (!_isCodecRunning) { throw new InvalidOperationException("编码器未运行!"); } fixed (byte* pBitmapData = originData) { var sourceFrame = new AVFrame { data = new byte_ptr8 { [0] = pBitmapData }, linesize = new int8 { [0] = originData.Length / _originSize.Height }, height = _originSize.Height }; var frame = _frameConverter.Convert(sourceFrame); var pPacket = ffmpeg.av_packet_alloc(); try { int error; do { ffmpeg.avcodec_send_frame(EncodeContext, &frame).ThrowExceptionIfError(); error = ffmpeg.avcodec_receive_packet(EncodeContext, pPacket); } while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN)); error.ThrowExceptionIfError(); } finally { ffmpeg.av_frame_unref(&frame); ffmpeg.av_packet_unref(pPacket); } return (new IntPtr(pPacket->data), pPacket->size); } } }