跨平台xamarin.Android 开发之 :适配架构(X86_64 、arm64-v8a、 armeabi-v7a )FFmpeg 解码
此代码的编写花费了脑细胞:在每次解码开启解码器到只需要一次解码器的开启优化
前提:编译好FFMpeg 的各平台的动态库
Windows 、Android (X86_64 、 X86、arm64-v8a、 armeabi-v7a ) 解码 相对编码要简单一些,因为不涉及到AVFrame 取指转换
解码包括:创建解码器、解码、释放解码器
using System; using System.Drawing; using System.Runtime.InteropServices; namespace FFmpegAnalyzer { /// <summary> /// 解码器 /// </summary> internal unsafe class FFmpegDecoder { /// <param name="decodedFrameSize">解码后一帧数据的大小</param> /// <param name="isRgb">Rgb数据</param> public FFmpegDecoder(Size decodedFrameSize, bool isRgb ) { _decodedFrameSize = decodedFrameSize; _isRgb = isRgb; } /// <summary> /// 创建解码器 /// </summary> /// <param name="codecFormat">解码格式</param> public void CreateDecoder(AVCodecID codecFormat) { var originPixelFormat = AVPixelFormat.AV_PIX_FMT_YUV420P; var destinationPixelFormat = _isRgb ? AVPixelFormat.AV_PIX_FMT_RGB24 : AVPixelFormat.AV_PIX_FMT_BGRA; //获取解码器 _pDecodec = FFmpeg.avcodec_find_decoder(codecFormat); if (_pDecodec == null) throw new InvalidOperationException("Codec not found."); _pDecodecContext = FFmpeg.avcodec_alloc_context3(_pDecodec); _pDecodecContext->width = _decodedFrameSize.Width; _pDecodecContext->height = _decodedFrameSize.Height; _pDecodecContext->time_base = new AVRational { num = 1, den = 30 }; _pDecodecContext->pix_fmt = originPixelFormat; _pDecodecContext->framerate = new AVRational { num = 30, den = 1 }; _pDecodecContext->gop_size = 30; // 设置预测算法 _pDecodecContext->flags |= FFmpeg.AV_CODEC_FLAG_PSNR; _pDecodecContext->flags2 |= FFmpeg.AV_CODEC_FLAG2_FAST; _pDecodecContext->max_b_frames = 0; FFmpeg.av_opt_set(_pDecodecContext->priv_data, "preset", "veryfast", 0); FFmpeg.av_opt_set(_pDecodecContext->priv_data, "tune", "zerolatency", 0); //打开解码器 FFmpeg.avcodec_open2(_pDecodecContext, _pDecodec, null).ThrowExceptionIfError(); _pConvertContext = FFmpeg.sws_getContext(_decodedFrameSize.Width, _decodedFrameSize.Height, originPixelFormat, _decodedFrameSize.Width, _decodedFrameSize.Height, destinationPixelFormat, FFmpeg.SWS_FAST_BILINEAR, null, null, null); if (_pConvertContext == null) throw new ApplicationException("Could not initialize the conversion context."); var convertedFrameBufferSize = FFmpeg.av_image_get_buffer_size(destinationPixelFormat, _decodedFrameSize.Width, _decodedFrameSize.Height, 4); _convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize); _dstData = new BytePtr4(); _dstLineSize = new Int4(); FFmpeg.av_image_fill_arrays(ref _dstData, ref _dstLineSize, (byte*)_convertedFrameBufferPtr, destinationPixelFormat, _decodedFrameSize.Width, _decodedFrameSize.Height, 4); _isCodecRunning = true; } /// <summary> /// 解码 /// </summary> /// <param name="frameBytes"></param> /// <returns></returns> public byte[] DecodeFrames(byte[] frameBytes) { if (!_isCodecRunning) { throw new InvalidOperationException("解码器未运行!"); } var waitDecodePacket = FFmpeg.av_packet_alloc(); var waitDecoderFrame = FFmpeg.av_frame_alloc(); FFmpeg.av_frame_unref(waitDecoderFrame); fixed (byte* waitDecodeData = frameBytes) { waitDecodePacket->data = waitDecodeData; waitDecodePacket->size = frameBytes.Length; FFmpeg.av_frame_unref(waitDecoderFrame); try { int error; do { FFmpeg.avcodec_send_packet(_pDecodecContext, waitDecodePacket); error = FFmpeg.avcodec_receive_frame(_pDecodecContext, waitDecoderFrame); } while (error == FFmpeg.AVERROR(FFmpeg.EAGAIN)); } finally { FFmpeg.av_packet_unref(waitDecodePacket); } var decodeAfterFrame = ConvertToRgb(waitDecoderFrame, _decodedFrameSize); var length = _isRgb ? decodeAfterFrame.height * decodeAfterFrame.width * 3 : decodeAfterFrame.height * decodeAfterFrame.width * 4; byte[] buffer = new byte[length]; Marshal.Copy((IntPtr)decodeAfterFrame.data[0], buffer, 0, buffer.Length); return buffer; } } /// <summary> /// 释放 /// </summary> public void Dispose() { _isCodecRunning = false; //释放解码器 FFmpeg.avcodec_close(_pDecodecContext); FFmpeg.av_free(_pDecodecContext); //释放转换器 Marshal.FreeHGlobal(_convertedFrameBufferPtr); FFmpeg.sws_freeContext(_pConvertContext); } /// <summary> /// 转换成Rgb /// </summary> /// <param name="waitDecoderFrame"></param> /// <param name="detSize">变化后目标大小</param> /// <returns></returns> private AVFrame ConvertToRgb(AVFrame* waitDecoderFrame, Size detSize) { FFmpeg.sws_scale(_pConvertContext, waitDecoderFrame->data, waitDecoderFrame->linesize, 0, waitDecoderFrame->height, _dstData, _dstLineSize).ThrowExceptionIfError(); var decodeAfterData = new BytePtr8(); decodeAfterData.UpdateFrom(_dstData); var lineSize = new Int8(); lineSize.UpdateFrom(_dstLineSize); FFmpeg.av_frame_unref(waitDecoderFrame); return new AVFrame { data = decodeAfterData, linesize = lineSize, width = detSize.Width, height = detSize.Height }; } //解码器 private AVCodec* _pDecodec; private AVCodecContext* _pDecodecContext; //转换缓存区 private IntPtr _convertedFrameBufferPtr; private BytePtr4 _dstData; private Int4 _dstLineSize; //格式转换 private SwsContext* _pConvertContext; //解码后一帧数据的大小 private Size _decodedFrameSize; //三通道 private readonly bool _isRgb; //解码器正在运行 private bool _isCodecRunning; } }