如何利用开源解码项目开发js视频解码的web应用(三)

经过前两个章节的准备我们开始正式编码和开发应用

编译步骤

  以上我们环境已经准备就绪。

  ps:请确保你的系统上有可以使⽤的xcode 当前的命令⾏保证已经初始化过emsdk。

1. 进⼊到ffmpeg的解压⽬录

2. 运⾏下述配置

  emconfigure ./configure --cc="emcc" --prefix=$(pwd)/../dist --enablecross-compile --target-os=none --arch=x86_32 --cpu=generic --disableffplay --disable-ffprobe --disable-asm --disable-doc --disable-devices -- disable-avdevice --disable-postproc --disable-swresample --disablepthreads --disable-w32threads --disable-network --disable-hwaccels -- disable-hwaccels --disable-parsers --disable-bsfs --disable-debug -- disable-protocols --disable-indevs --disable-outdevs --enableprotocol=file --enable-decoder=hevc --enable-decoder=h264

3. 配置成功以后修改config.mak

  RANLIB的值为 RANLIB=emranlib AR的值为 AR=emar

4. 运⾏编译命令 emmake make -j6

5. 如果编译报错(c99),则需要修改⼀些头⽂件⾥的宏定义,特别是time之 类的头⽂件

6. 运⾏安装命令 make install

7. 切换到安装目录下

  cd ../dist/

8. 开始准备导出接⼝

  写⼀个.c⽂件将ffmpeg的avcodec模块⾥需要⽤到的接⼝接⼝进⾏封装,因 为原始的接⼝的参数都是结构体的不⽅便web进⾏数据的提取。这⾥我写了⼀ 个process.c

  1 #include "process.h"
  2 #include <stdio.h>
  3 #include <string.h>
  4 #include <libavutil/frame.h>
  5 #include <emscripten/emscripten.h>
  6 
  7 enum CodecType{
  8     Encode_Video_HI_H264,
  9     Encode_Video_HI_H265
 10 }; 
 11 
 12 typedef struct _DecoderContainer
 13 {
 14     AVCodecContext *pCodecCtx;
 15     AVPacket* pAvPacket;
 16 }DecoderContainer;
 17 
 18 
 19 int TranslateFFMPEGCodecID(int nType)
 20 {
 21     enum AVCodecID avcodecID;
 22     switch(nType)
 23     {
 24     case Encode_Video_HI_H264:
 25         {
 26             avcodecID = AV_CODEC_ID_H264;
 27             break; 
 28         }
 29     case Encode_Video_HI_H265:
 30         {
 31             avcodecID = AV_CODEC_ID_H265;
 32             break; 
 33         }
 34     }
 35     
 36     return avcodecID;
 37 }
 38 void EMSCRIPTEN_KEEPALIVE RegisterAll()
 39 {
 40     avcodec_register_all();
 41 }
 42 
 43 int EMSCRIPTEN_KEEPALIVE OpenDecoder(int nType, int nWidth, int nHeight)
 44 {
 45     int nID = TranslateFFMPEGCodecID(nType);
 46     
 47     enum AVCodecID id = nID;
 48 
 49     AVCodec *pCodec = avcodec_find_decoder(id); 
 50     AVCodecContext *pCodecCtx = avcodec_alloc_context3(pCodec);
 51     pCodecCtx->width = nWidth;
 52     pCodecCtx->height = nHeight;
 53     pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
 54     // Open codec
 55     if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
 56         return 0; // Could not open codec
 57     }
 58     AVPacket* avPacket = av_packet_alloc();
 59     DecoderContainer* pContainer = (DecoderContainer *)malloc(sizeof(DecoderContainer));
 60     pContainer->pCodecCtx = pCodecCtx;
 61     pContainer->pAvPacket = avPacket;
 62     return pContainer;
 63 }
 64 
 65 int EMSCRIPTEN_KEEPALIVE FrameAlloc()
 66 {
 67     AVFrame* pFrame = av_frame_alloc();
 68     printf("FrameAlloc pFrame %p\n", pFrame);
 69     return pFrame;
 70 }
 71 
 72 void EMSCRIPTEN_KEEPALIVE FrameFree(AVFrame* pFrame)
 73 {
 74     av_frame_free(&pFrame);
 75 }       
 76 int EMSCRIPTEN_KEEPALIVE getY(void* pFrame)
 77 {
 78     return ((AVFrame *)pFrame)->data[0];
 79 }       
 80 int EMSCRIPTEN_KEEPALIVE getU(void* pFrame)
 81 {
 82     return ((AVFrame *)pFrame)->data[1];
 83 }       
 84 int EMSCRIPTEN_KEEPALIVE getV(void* pFrame)
 85 {
 86     return ((AVFrame *)pFrame)->data[2];
 87 }
 88 
 89 int EMSCRIPTEN_KEEPALIVE getWidth(void* pFrame)
 90 {
 91     return ((AVFrame *)pFrame)->width;
 92 }  
 93 
 94 int EMSCRIPTEN_KEEPALIVE getHeight(void* pFrame)
 95 {
 96     return ((AVFrame *)pFrame)->height;
 97 }  
 98 
 99 int EMSCRIPTEN_KEEPALIVE getYLength(void* pFrame)
100 {
101     return ((AVFrame *)pFrame)->linesize[0];
102 }
103 int EMSCRIPTEN_KEEPALIVE getULength(void* pFrame)
104 {
105     return ((AVFrame *)pFrame)->linesize[1];
106 }
107 int EMSCRIPTEN_KEEPALIVE getVLength(void* pFrame)
108 {
109     return ((AVFrame *)pFrame)->linesize[2];
110 }
111 int EMSCRIPTEN_KEEPALIVE DecodeFrame(void* pContainer, void* pSourceBuffer, int nSourceLen, void* pFrame)
112 {
113     DecoderContainer *pDecoderContainer=(DecoderContainer*)pContainer;
114     // Decode video frame
115 
116     pDecoderContainer->pAvPacket->data = (uint8_t*)pSourceBuffer;
117     pDecoderContainer->pAvPacket->size = nSourceLen;
118     
119     int frameFinished;
120     if(avcodec_decode_video2(pDecoderContainer->pCodecCtx, pFrame, &frameFinished, pDecoderContainer->pAvPacket)<=0)
121     {
122         return 0;
123     }
124     return 1;
125 }
126 
127 int EMSCRIPTEN_KEEPALIVE CloseDecoder(void* pContainer)
128 {
129     DecoderContainer *pDecoderContainer=(DecoderContainer*)pContainer;
130     avcodec_close(pDecoderContainer->pCodecCtx);
131     av_free(pDecoderContainer->pCodecCtx);
132     pDecoderContainer->pCodecCtx = NULL;
133     av_packet_free(&(pDecoderContainer->pAvPacket));
134 }
View Code
  1 #include "process.h"
  2 #include <stdio.h>
  3 #include <string.h>
  4 #include <libavutil/frame.h>
  5 #include <emscripten/emscripten.h>
  6 
  7 enum CodecType{
  8     Encode_Video_HI_H264,
  9     Encode_Video_HI_H265
 10 }; 
 11 
 12 typedef struct _DecoderContainer
 13 {
 14     AVCodecContext *pCodecCtx;
 15     AVPacket* pAvPacket;
 16 }DecoderContainer;
 17 
 18 
 19 int TranslateFFMPEGCodecID(int nType)
 20 {
 21     enum AVCodecID avcodecID;
 22     switch(nType)
 23     {
 24     case Encode_Video_HI_H264:
 25         {
 26             avcodecID = AV_CODEC_ID_H264;
 27             break; 
 28         }
 29     case Encode_Video_HI_H265:
 30         {
 31             avcodecID = AV_CODEC_ID_H265;
 32             break; 
 33         }
 34     }
 35     
 36     return avcodecID;
 37 }
 38 void EMSCRIPTEN_KEEPALIVE RegisterAll()
 39 {
 40     avcodec_register_all();
 41 }
 42 
 43 int EMSCRIPTEN_KEEPALIVE OpenDecoder(int nType, int nWidth, int nHeight)
 44 {
 45     int nID = TranslateFFMPEGCodecID(nType);
 46     
 47     enum AVCodecID id = nID;
 48 
 49     AVCodec *pCodec = avcodec_find_decoder(id); 
 50     AVCodecContext *pCodecCtx = avcodec_alloc_context3(pCodec);
 51     pCodecCtx->width = nWidth;
 52     pCodecCtx->height = nHeight;
 53     pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
 54     // Open codec
 55     if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
 56         return 0; // Could not open codec
 57     }
 58     AVPacket* avPacket = av_packet_alloc();
 59     DecoderContainer* pContainer = (DecoderContainer *)malloc(sizeof(DecoderContainer));
 60     pContainer->pCodecCtx = pCodecCtx;
 61     pContainer->pAvPacket = avPacket;
 62     return pContainer;
 63 }
 64 
 65 int EMSCRIPTEN_KEEPALIVE FrameAlloc()
 66 {
 67     AVFrame* pFrame = av_frame_alloc();
 68     printf("FrameAlloc pFrame %p\n", pFrame);
 69     return pFrame;
 70 }
 71 
 72 void EMSCRIPTEN_KEEPALIVE FrameFree(AVFrame* pFrame)
 73 {
 74     av_frame_free(&pFrame);
 75 }       
 76 int EMSCRIPTEN_KEEPALIVE getY(void* pFrame)
 77 {
 78     return ((AVFrame *)pFrame)->data[0];
 79 }       
 80 int EMSCRIPTEN_KEEPALIVE getU(void* pFrame)
 81 {
 82     return ((AVFrame *)pFrame)->data[1];
 83 }       
 84 int EMSCRIPTEN_KEEPALIVE getV(void* pFrame)
 85 {
 86     return ((AVFrame *)pFrame)->data[2];
 87 }
 88 
 89 int EMSCRIPTEN_KEEPALIVE getWidth(void* pFrame)
 90 {
 91     return ((AVFrame *)pFrame)->width;
 92 }  
 93 
 94 int EMSCRIPTEN_KEEPALIVE getHeight(void* pFrame)
 95 {
 96     return ((AVFrame *)pFrame)->height;
 97 }  
 98 
 99 int EMSCRIPTEN_KEEPALIVE getYLength(void* pFrame)
100 {
101     return ((AVFrame *)pFrame)->linesize[0];
102 }
103 int EMSCRIPTEN_KEEPALIVE getULength(void* pFrame)
104 {
105     return ((AVFrame *)pFrame)->linesize[1];
106 }
107 int EMSCRIPTEN_KEEPALIVE getVLength(void* pFrame)
108 {
109     return ((AVFrame *)pFrame)->linesize[2];
110 }
111 int EMSCRIPTEN_KEEPALIVE DecodeFrame(void* pContainer, void* pSourceBuffer, int nSourceLen, void* pFrame)
112 {
113     DecoderContainer *pDecoderContainer=(DecoderContainer*)pContainer;
114     // Decode video frame
115 
116     pDecoderContainer->pAvPacket->data = (uint8_t*)pSourceBuffer;
117     pDecoderContainer->pAvPacket->size = nSourceLen;
118     
119     int frameFinished;
120     if(avcodec_decode_video2(pDecoderContainer->pCodecCtx, pFrame, &frameFinished, pDecoderContainer->pAvPacket)<=0)
121     {
122         return 0;
123     }
124     return 1;
125 }
126 
127 int EMSCRIPTEN_KEEPALIVE CloseDecoder(void* pContainer)
128 {
129     DecoderContainer *pDecoderContainer=(DecoderContainer*)pContainer;
130     avcodec_close(pDecoderContainer->pCodecCtx);
131     av_free(pDecoderContainer->pCodecCtx);
132     pDecoderContainer->pCodecCtx = NULL;
133     av_packet_free(&(pDecoderContainer->pAvPacket));
134 }
View Code

9. 设置导出的接口列表

  export EXPORTED_FUNCTIONS="[ '_OpenDecoder', '_RegisterAll', '_DecodeFrame',  '_CloseDecoder','_FrameAlloc',  '_FrameFree', '_getY', '_getU', '_getV', '_getYLength', '_getULength', '_getVLength', '_getWidth', '_getHeight']"

10. 编译⽣成wasm⽂件

  emcc -I ./include process.c ../dist/lib/libavcodec.a ../dist/lib/libavutil.a -s EXPORTED_FUNCTIONS="${EXPORTED_FUNCTIONS}" -o ffmpegasm.js - O3 -s WASM=1 -s ALLOW_MEMORY_GROWTH=1 -s TOTAL_MEMORY=33554432 -s ASSERTIONS=1

11. 上述命令成功后会看到 dist目录下⽣成了⼀个 ffmpegasm.js 和ffmpegasm.wasm两个⽂件,js⽂件 是给web加载使⽤的,wasm是被js加载并进⾏编译的。

12. ⾄此,我们已经完成了编译的完整过程。只需要⼀个demo就可以看到效 果了

使用

  使⽤wasm⽂件中的module模块的注意事项 wasm必须初始化以后才能使⽤,

  注意下该事件收到以后才能调⽤模块的所有接⼝ Module.onRuntimeInitialized = function () { console.log('WASM initialized done!’); }

 

Module._RegisterAll();
var nWidth =704 ;
var nHeight = 576;
var s = Module._OpenDecoder(0,nWidth,nHeight);
console.log('_OpenDecoder return ', s);

var input = document.getElementById("file");
input.onchange = function () {
var file = this.files[0];
if (!!file) {
//读取本地文件输出
var reader = new FileReader();
reader.readAsArrayBuffer(file);
reader.onload = function () {
var rr = new Uint8Array(this.result);
var srcpic = Module._malloc(this.result.byteLength);
var srcBuf = new Uint8Array(Module.HEAPU8.buffer, srcpic, this.result.byteLength);
srcBuf.set(rr);
var frame = Module._FrameAlloc();
Module._DecodeFrame(s, srcBuf.byteOffset, this.result.byteLength, frame);

/*yuv可以重组合并一下,也可以保持继续使用分量的形式,建议使用分量的形式*/
var ret = new Uint8Array(Module.HEAPU8.buffer, frame, frame.byteLength);
var yaddr = Module._getY(frame);
var uaddr = Module._getU(frame);
var vaddr = Module._getV(frame);
var widthex = Module._getYLength(frame);
console.log(widthex, nHeight);

var ybuffer = new Uint8Array(Module.HEAPU8.buffer, yaddr, widthex*nHeight);
var ubuffer = new Uint8Array(Module.HEAPU8.buffer, uaddr, widthex*nHeight/4);
var vbuffer = new Uint8Array(Module.HEAPU8.buffer, vaddr, idthex*nHeight/4);

drawYuvCanvas2(ybuffer,ubuffer ,vbuffer, widthex, nHeight);
}
}
}

posted on 2019-12-29 11:20  maoliangwu  阅读(761)  评论(0编辑  收藏  举报