FFmpegh.264解码
- (int)DecodeH264Frames: (unsigned char*)inputBuffer withLength:(int)aLength { int gotPicPtr=0; int result=0; av_init_packet(&pAvPackage); pAvPackage.data=(unsigned char*)inputBuffer; pAvPackage.size=aLength; //解码 result=avcodec_decode_video2(pCodecCtx, pVideoFrame, &gotPicPtr, &pAvPackage); //如果视频尺寸更改,我们丢掉这个frame if((pictureWidth!=0)&&(pictureWidth!=pCodecCtx->width)){ setRecordResolveState=0; pictureWidth=pCodecCtx->width; return -1; } //YUV 420 Y U V -> RGB if(gotPicPtr) { unsigned int lumaLength= (pCodecCtx->height)*(MIN(pVideoFrame->linesize[0], pCodecCtx->width));// W * H unsigned int chromBLength=((pCodecCtx->height)/2)*(MIN(pVideoFrame->linesize[1], (pCodecCtx->width)/2));// w * h/4 unsigned int chromRLength=((pCodecCtx->height)/2)*(MIN(pVideoFrame->linesize[2], (pCodecCtx->width)/2)); H264YUV_Frame yuvFrame; memset(&yuvFrame, 0, sizeof(H264YUV_Frame)); yuvFrame.luma.length = lumaLength; yuvFrame.chromaB.length = chromBLength; yuvFrame.chromaR.length =chromRLength; yuvFrame.luma.dataBuffer=(unsigned char*)malloc(lumaLength); yuvFrame.chromaB.dataBuffer=(unsigned char*)malloc(chromBLength); yuvFrame.chromaR.dataBuffer=(unsigned char*)malloc(chromRLength); copyDecodedFrame(pVideoFrame->data[0],yuvFrame.luma.dataBuffer,pVideoFrame->linesize[0], pCodecCtx->width,pCodecCtx->height); copyDecodedFrame(pVideoFrame->data[1], yuvFrame.chromaB.dataBuffer,pVideoFrame->linesize[1], pCodecCtx->width / 2,pCodecCtx->height / 2); copyDecodedFrame(pVideoFrame->data[2], yuvFrame.chromaR.dataBuffer,pVideoFrame->linesize[2], pCodecCtx->width / 2,pCodecCtx->height / 2); yuvFrame.width=pCodecCtx->width; yuvFrame.height=pCodecCtx->height;
dispatch_sync(dispatch_get_main_queue(), ^{ [self updateYUVFrameOnMainThread:(H264YUV_Frame*)&yuvFrame]; }); free(yuvFrame.luma.dataBuffer); free(yuvFrame.chromaB.dataBuffer); free(yuvFrame.chromaR.dataBuffer); } av_free_packet(&pAvPackage); return 0; }