Stay Hungry , Stay Foolish

君子生非异也,善假于物也

  博客园  :: 首页  :: 新随笔  :: 联系 :: 订阅 订阅  :: 管理

使用jni方式调用FFmepg项目中接口,对H264裸码进行解码。

该Demo主要实现从文件中读取H264编码的视频流,然后使用FFmpeg解码,将解码后的码流保存到文件。

工程目录结构如图所示:

                                     image

Android.mk文件内容如下

LOCAL_PATH := $(call my-dir)  
  
# FFmpeg library  
include $(CLEAR_VARS)  
LOCAL_MODULE := avcodec  
LOCAL_SRC_FILES := $(LOCAL_PATH)/libs/libavcodec-56.so  
include $(PREBUILT_SHARED_LIBRARY)  
  
include $(CLEAR_VARS)  
LOCAL_MODULE := avutil  
LOCAL_SRC_FILES := $(LOCAL_PATH)/libs/libavutil-54.so  
include $(PREBUILT_SHARED_LIBRARY)  

include $(CLEAR_VARS)  
LOCAL_MODULE := swresample  
LOCAL_SRC_FILES := $(LOCAL_PATH)/libs/libswresample-1.so  
include $(PREBUILT_SHARED_LIBRARY)  
  
include $(CLEAR_VARS)  
LOCAL_MODULE := swscale  
LOCAL_SRC_FILES := $(LOCAL_PATH)/libs/libswscale-3.so  
include $(PREBUILT_SHARED_LIBRARY)  

# Program  
include $(CLEAR_VARS)  
LOCAL_MODULE := hello-jni
LOCAL_SRC_FILES := hello-jni.c
LOCAL_C_INCLUDES += $(LOCAL_PATH)/include
LOCAL_LDLIBS := -llog -lz
LOCAL_SHARED_LIBRARIES := avcodec swscale avutil swresample
include $(BUILD_SHARED_LIBRARY)

Application.mk内容如下:

APP_ABI := armeabi

HelloJni.java内容如下:

package com.example.hellojni;

import android.app.Activity;
import android.widget.TextView;
import android.os.Bundle;


public class HelloJni extends Activity
{
    /** Called when the activity is first created. */
    @Override
    public void onCreate(Bundle savedInstanceState)
    {
        super.onCreate(savedInstanceState);

        TextView  tv = new TextView(this);
        if(DecodeH264Video())
        {
            tv.setText("Decode Video Success");
        }
        else
        {
            tv.setText("Decode Video Failed");
        }
        setContentView(tv);
    }

    public native boolean  DecodeH264Video();

    static {
        System.loadLibrary("avcodec-56");
        System.loadLibrary("swscale-3");
        System.loadLibrary("hello-jni");  
        System.loadLibrary("avutil-54"); 
        System.loadLibrary("swresample-1");  
    }
}

hello-jni.c文件内容如下:

#include "libavcodec/avcodec.h"
#include "libswscale/swscale.h"
#include <stdio.h>
#include <string.h>
#include <jni.h>
#include <android/log.h>

typedef enum
{
    FALSE = 0, TRUE = 1,
} C_BOOL;

typedef unsigned char uint8_t;
const int IN_BUFFER_SIZE = 4096;

#define LOGE(format, ...)  __android_log_print(ANDROID_LOG_ERROR, "(>_<)", format, ##__VA_ARGS__)
#define LOGD(format, ...)  __android_log_print(ANDROID_LOG_DEBUG, "(-_-)", format, ##__VA_ARGS__)

static C_BOOL __DecodeH264Video(FILE* fp_in, FILE* fp_out);

JNIEXPORT jboolean JNICALL Java_com_example_hellojni_HelloJni_DecodeH264Video(JNIEnv *env, jobject obj)
{
    char filepath_in[] = "/data/video/bxjg_352x288.h264";
    FILE *fp_in = fopen(filepath_in, "rb");
    if (NULL == fp_in)
    {
        LOGE("open input h264 video file failed, filename [%s]", filepath_in);
        return (jboolean) FALSE;
    }

    char filepath_out[] = "/data/video/bxjg_352x288.yuv";
    FILE *fp_out = fopen(filepath_out, "wb");
    if (NULL == fp_out)
    {
        LOGE("open output yuv video file failed, filename [%s]", filepath_out);
        return (jboolean) FALSE;
    }

    LOGD("open input and output file success");

    if (TRUE == __DecodeH264Video(fp_in, fp_out))
    {
        LOGD("decode h264 video success");
    }
    else
    {
        LOGE("decode h264 video failed");
        return (jboolean) FALSE;
    }

    fclose(fp_in);
    fclose(fp_out);

    return (jboolean) TRUE;
}

C_BOOL __DecodeH264Video(FILE* fp_in, FILE* fp_out)
{
    avcodec_register_all();

    AVCodec *pCodec = NULL;
    pCodec = avcodec_find_decoder(AV_CODEC_ID_H264);
    if (NULL == pCodec)
    {
        LOGE("avcodec_find_decoder failed");
        return FALSE;
    }

    AVCodecContext *pCodecCtx = NULL;
    pCodecCtx = avcodec_alloc_context3(pCodec);
    if (NULL == pCodecCtx)
    {
        LOGE("avcodec_alloc_context3 failed");
        return FALSE;
    }

    AVCodecParserContext *pCodecParserCtx = NULL;
    pCodecParserCtx = av_parser_init(AV_CODEC_ID_H264);
    if (NULL == pCodecParserCtx)
    {
        LOGE("av_parser_init failed");
        return FALSE;
    }

    if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0)
    {
        LOGE("avcodec_open2 failed");
        return FALSE;
    }

    AVFrame *pFrame = NULL;
    pFrame = av_frame_alloc();
    if (NULL == pFrame)
    {
        LOGE("av_frame_alloc failed");
        return FALSE;
    }

    AVPacket packet;
    av_init_packet(&packet);

    uint8_t in_buffer[IN_BUFFER_SIZE + FF_INPUT_BUFFER_PADDING_SIZE];
    memset(in_buffer, 0, sizeof(in_buffer));
    uint8_t *cur_ptr = NULL;
    int cur_size = 0;
    int ret = 0;
    int got_picture = 0;
    int y_size = 0;
    int first_time = 1;

    struct SwsContext *img_convert_ctx = NULL;
    AVFrame *pFrameYUV = NULL;
    uint8_t *out_buffer = NULL;

    while (TRUE)
    {
        cur_size = fread(in_buffer, 1, IN_BUFFER_SIZE, fp_in);
        if (0 == cur_size)
        {
            break;
        }

        cur_ptr = in_buffer;
        while (cur_size > 0)
        {
            int parse_len = av_parser_parse2(pCodecParserCtx, pCodecCtx, &packet.data, &packet.size, cur_ptr, cur_size,
                    AV_NOPTS_VALUE, AV_NOPTS_VALUE, AV_NOPTS_VALUE);

            cur_ptr += parse_len;
            cur_size -= parse_len;

            if (0 == packet.size)
            {
                continue;
            }

            LOGD("packet size [%d]", packet.size);

            switch (pCodecParserCtx->pict_type)
            {
                case AV_PICTURE_TYPE_I:
                {
                    LOGD("AV_PICTURE_TYPE_I");
                    break;
                }
                case AV_PICTURE_TYPE_P:
                {
                    LOGD("AV_PICTURE_TYPE_P");
                    break;
                }
                case AV_PICTURE_TYPE_B:
                {
                    LOGD("AV_PICTURE_TYPE_B");
                    break;
                }
                default:
                {
                    LOGD("OTHER_PICTURE_TYPE");
                    break;
                }
            }

            LOGD("CodecParserCtx->output_picture_number [%d]", pCodecParserCtx->output_picture_number);

            ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, &packet);

            if (ret < 0)
            {
                LOGE("avcodec_decode_video2 failed");
                return FALSE;
            }

            if (got_picture)
            {
                if (first_time)
                {
                    LOGD("CodecCtx->codec->long_name [%s]", pCodecCtx->codec->long_name);
                    LOGD("CodecCtx->width [%d], CodecCtx->height [%d]", pCodecCtx->width, pCodecCtx->height);

                    //SwsContext
                    img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
                            pCodecCtx->width, pCodecCtx->height, PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);

                    pFrameYUV = av_frame_alloc();

                    out_buffer = (uint8_t *) av_malloc(
                            avpicture_get_size(PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));

                    avpicture_fill((AVPicture *) pFrameYUV, out_buffer, PIX_FMT_YUV420P, pCodecCtx->width,
                            pCodecCtx->height);

                    y_size = pCodecCtx->width * pCodecCtx->height;

                    first_time = 0;
                }

                sws_scale(img_convert_ctx, (const uint8_t* const *) pFrame->data, pFrame->linesize, 0,
                        pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);

                fwrite(pFrameYUV->data[0], 1, y_size, fp_out); //Y
                fwrite(pFrameYUV->data[1], 1, y_size / 4, fp_out); //U
                fwrite(pFrameYUV->data[2], 1, y_size / 4, fp_out); //V

                LOGD("succeed to decode one frame");

            }
        }

    }

    //Flush Decoder
    packet.data = NULL;
    packet.size = 0;

    while (TRUE)
    {
        ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, &packet);
        if (ret < 0)
        {
            LOGE("avcodec_decode_video2 failed");
            return FALSE;
        }

        if (!got_picture)
        {
            break;
        }

        if (got_picture)
        {

            sws_scale(img_convert_ctx, (const uint8_t* const *) pFrame->data, pFrame->linesize, 0, pCodecCtx->height,
                    pFrameYUV->data, pFrameYUV->linesize);

            fwrite(pFrameYUV->data[0], 1, y_size, fp_out); //Y
            fwrite(pFrameYUV->data[1], 1, y_size / 4, fp_out); //U
            fwrite(pFrameYUV->data[2], 1, y_size / 4, fp_out); //V

            LOGD("Flush Decoder: Succeed to decode 1 frame");
        }
    }

    sws_freeContext(img_convert_ctx);
    av_frame_free(&pFrameYUV);
    av_parser_close(pCodecParserCtx);
    av_frame_free(&pFrame);
    avcodec_close(pCodecCtx);
    av_free(pCodecCtx);

    return TRUE;
}

如果仅使用FFmpeg中的解码功能,可用如下的配置选项对编译的库进行瘦身,使得编译出来的解码动态库libavcodec.so大小为2M左右!

#!/bin/bash
NDK="/home/alchen/android-ndk-r9d"
TARGET="android-19"
SYSROOT="$NDK/platforms/$TARGET/arch-arm"
TOOLCHAIN="$NDK/toolchains/arm-linux-androideabi-4.8/prebuilt/linux-x86_64"

function build_one
{
./configure \
    --prefix=$PREFIX \
    --cross-prefix=$TOOLCHAIN/bin/arm-linux-androideabi- \
    --target-os=linux \
    --enable-decoder=h264 \
    --enable-shared \
    --enable-version3 \
    --enable-gpl \
    --enable-nonfree \
    --enable-protocol=file \
    --enable-avfilter \
    --enable-cross-compile \
    --enable-asm \
    --enable-neon \
    --enable-armv5te \
    --disable-static \
    --disable-decoders \
    --disable-doc \
    --disable-muxers \
    --disable-demuxers \
    --disable-bsfs \
    --disable-indevs \
    --disable-outdevs \
    --disable-filters \
    --disable-ffmpeg \
    --disable-ffplay \
    --disable-ffserver \
    --disable-ffprobe \
    --disable-encoders \
    --disable-devices \
    --disable-protocols \
    --disable-network \
    --disable-avdevice \
    --arch=arm \
    --sysroot=$SYSROOT \
    --extra-cflags="-Os -fpic $ADDI_CFLAGS" \
    --extra-ldflags="$ADDI_LDFLAGS" \
    $ADDITIONAL_CONFIGURE_FLAG
    make clean
    make
    make install
}
CPU=arm
PREFIX=$(pwd)/android/$CPU
ADDI_CFLAGS="-marm"
build_one
posted on 2016-01-14 13:21  octocat  阅读(776)  评论(0编辑  收藏  举报