码迷,mamicode.com
首页 > 其他好文 > 详细

使用FFmpeg解码H264-2015.01.14

时间:2016-01-14 14:20:36      阅读:1069      评论:0      收藏:0      [点我收藏+]

标签:

使用jni方式调用FFmepg项目中接口,对H264裸码进行解码。

该Demo主要实现从文件中读取H264编码的视频流,然后使用FFmpeg解码,将解码后的码流保存到文件。

工程目录结构如图所示:

                                     技术分享

Android.mk文件内容如下

LOCAL_PATH := $(call my-dir)  
  
# FFmpeg library  
include $(CLEAR_VARS)  
LOCAL_MODULE := avcodec  
LOCAL_SRC_FILES := $(LOCAL_PATH)/libs/libavcodec-56.so  
include $(PREBUILT_SHARED_LIBRARY)  
  
include $(CLEAR_VARS)  
LOCAL_MODULE := avutil  
LOCAL_SRC_FILES := $(LOCAL_PATH)/libs/libavutil-54.so  
include $(PREBUILT_SHARED_LIBRARY)  

include $(CLEAR_VARS)  
LOCAL_MODULE := swresample  
LOCAL_SRC_FILES := $(LOCAL_PATH)/libs/libswresample-1.so  
include $(PREBUILT_SHARED_LIBRARY)  
  
include $(CLEAR_VARS)  
LOCAL_MODULE := swscale  
LOCAL_SRC_FILES := $(LOCAL_PATH)/libs/libswscale-3.so  
include $(PREBUILT_SHARED_LIBRARY)  

# Program  
include $(CLEAR_VARS)  
LOCAL_MODULE := hello-jni
LOCAL_SRC_FILES := hello-jni.c
LOCAL_C_INCLUDES += $(LOCAL_PATH)/include
LOCAL_LDLIBS := -llog -lz
LOCAL_SHARED_LIBRARIES := avcodec swscale avutil swresample
include $(BUILD_SHARED_LIBRARY)

Application.mk内容如下:

APP_ABI := armeabi

HelloJni.java内容如下:

package com.example.hellojni;

import android.app.Activity;
import android.widget.TextView;
import android.os.Bundle;


public class HelloJni extends Activity
{
    /** Called when the activity is first created. */
    @Override
    public void onCreate(Bundle savedInstanceState)
    {
        super.onCreate(savedInstanceState);

        TextView  tv = new TextView(this);
        if(DecodeH264Video())
        {
            tv.setText("Decode Video Success");
        }
        else
        {
            tv.setText("Decode Video Failed");
        }
        setContentView(tv);
    }

    public native boolean  DecodeH264Video();

    static {
        System.loadLibrary("avcodec-56");
        System.loadLibrary("swscale-3");
        System.loadLibrary("hello-jni");  
        System.loadLibrary("avutil-54"); 
        System.loadLibrary("swresample-1");  
    }
}

hello-jni.c文件内容如下:

#include "libavcodec/avcodec.h"
#include "libswscale/swscale.h"
#include <stdio.h>
#include <string.h>
#include <jni.h>
#include <android/log.h>

typedef enum
{
    FALSE = 0, TRUE = 1,
} C_BOOL;

typedef unsigned char uint8_t;
const int IN_BUFFER_SIZE = 4096;

#define LOGE(format, ...)  __android_log_print(ANDROID_LOG_ERROR, "(>_<)", format, ##__VA_ARGS__)
#define LOGD(format, ...)  __android_log_print(ANDROID_LOG_DEBUG, "(-_-)", format, ##__VA_ARGS__)

static C_BOOL __DecodeH264Video(FILE* fp_in, FILE* fp_out);

JNIEXPORT jboolean JNICALL Java_com_example_hellojni_HelloJni_DecodeH264Video(JNIEnv *env, jobject obj)
{
    char filepath_in[] = "/data/video/bxjg_352x288.h264";
    FILE *fp_in = fopen(filepath_in, "rb");
    if (NULL == fp_in)
    {
        LOGE("open input h264 video file failed, filename [%s]", filepath_in);
        return (jboolean) FALSE;
    }

    char filepath_out[] = "/data/video/bxjg_352x288.yuv";
    FILE *fp_out = fopen(filepath_out, "wb");
    if (NULL == fp_out)
    {
        LOGE("open output yuv video file failed, filename [%s]", filepath_out);
        return (jboolean) FALSE;
    }

    LOGD("open input and output file success");

    if (TRUE == __DecodeH264Video(fp_in, fp_out))
    {
        LOGD("decode h264 video success");
    }
    else
    {
        LOGE("decode h264 video failed");
        return (jboolean) FALSE;
    }

    fclose(fp_in);
    fclose(fp_out);

    return (jboolean) TRUE;
}

C_BOOL __DecodeH264Video(FILE* fp_in, FILE* fp_out)
{
    avcodec_register_all();

    AVCodec *pCodec = NULL;
    pCodec = avcodec_find_decoder(AV_CODEC_ID_H264);
    if (NULL == pCodec)
    {
        LOGE("avcodec_find_decoder failed");
        return FALSE;
    }

    AVCodecContext *pCodecCtx = NULL;
    pCodecCtx = avcodec_alloc_context3(pCodec);
    if (NULL == pCodecCtx)
    {
        LOGE("avcodec_alloc_context3 failed");
        return FALSE;
    }

    AVCodecParserContext *pCodecParserCtx = NULL;
    pCodecParserCtx = av_parser_init(AV_CODEC_ID_H264);
    if (NULL == pCodecParserCtx)
    {
        LOGE("av_parser_init failed");
        return FALSE;
    }

    if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0)
    {
        LOGE("avcodec_open2 failed");
        return FALSE;
    }

    AVFrame *pFrame = NULL;
    pFrame = av_frame_alloc();
    if (NULL == pFrame)
    {
        LOGE("av_frame_alloc failed");
        return FALSE;
    }

    AVPacket packet;
    av_init_packet(&packet);

    uint8_t in_buffer[IN_BUFFER_SIZE + FF_INPUT_BUFFER_PADDING_SIZE];
    memset(in_buffer, 0, sizeof(in_buffer));
    uint8_t *cur_ptr = NULL;
    int cur_size = 0;
    int ret = 0;
    int got_picture = 0;
    int y_size = 0;
    int first_time = 1;

    struct SwsContext *img_convert_ctx = NULL;
    AVFrame *pFrameYUV = NULL;
    uint8_t *out_buffer = NULL;

    while (TRUE)
    {
        cur_size = fread(in_buffer, 1, IN_BUFFER_SIZE, fp_in);
        if (0 == cur_size)
        {
            break;
        }

        cur_ptr = in_buffer;
        while (cur_size > 0)
        {
            int parse_len = av_parser_parse2(pCodecParserCtx, pCodecCtx, &packet.data, &packet.size, cur_ptr, cur_size,
                    AV_NOPTS_VALUE, AV_NOPTS_VALUE, AV_NOPTS_VALUE);

            cur_ptr += parse_len;
            cur_size -= parse_len;

            if (0 == packet.size)
            {
                continue;
            }

            LOGD("packet size [%d]", packet.size);

            switch (pCodecParserCtx->pict_type)
            {
                case AV_PICTURE_TYPE_I:
                {
                    LOGD("AV_PICTURE_TYPE_I");
                    break;
                }
                case AV_PICTURE_TYPE_P:
                {
                    LOGD("AV_PICTURE_TYPE_P");
                    break;
                }
                case AV_PICTURE_TYPE_B:
                {
                    LOGD("AV_PICTURE_TYPE_B");
                    break;
                }
                default:
                {
                    LOGD("OTHER_PICTURE_TYPE");
                    break;
                }
            }

            LOGD("CodecParserCtx->output_picture_number [%d]", pCodecParserCtx->output_picture_number);

            ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, &packet);

            if (ret < 0)
            {
                LOGE("avcodec_decode_video2 failed");
                return FALSE;
            }

            if (got_picture)
            {
                if (first_time)
                {
                    LOGD("CodecCtx->codec->long_name [%s]", pCodecCtx->codec->long_name);
                    LOGD("CodecCtx->width [%d], CodecCtx->height [%d]", pCodecCtx->width, pCodecCtx->height);

                    //SwsContext
                    img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
                            pCodecCtx->width, pCodecCtx->height, PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);

                    pFrameYUV = av_frame_alloc();

                    out_buffer = (uint8_t *) av_malloc(
                            avpicture_get_size(PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));

                    avpicture_fill((AVPicture *) pFrameYUV, out_buffer, PIX_FMT_YUV420P, pCodecCtx->width,
                            pCodecCtx->height);

                    y_size = pCodecCtx->width * pCodecCtx->height;

                    first_time = 0;
                }

                sws_scale(img_convert_ctx, (const uint8_t* const *) pFrame->data, pFrame->linesize, 0,
                        pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);

                fwrite(pFrameYUV->data[0], 1, y_size, fp_out); //Y
                fwrite(pFrameYUV->data[1], 1, y_size / 4, fp_out); //U
                fwrite(pFrameYUV->data[2], 1, y_size / 4, fp_out); //V

                LOGD("succeed to decode one frame");

            }
        }

    }

    //Flush Decoder
    packet.data = NULL;
    packet.size = 0;

    while (TRUE)
    {
        ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, &packet);
        if (ret < 0)
        {
            LOGE("avcodec_decode_video2 failed");
            return FALSE;
        }

        if (!got_picture)
        {
            break;
        }

        if (got_picture)
        {

            sws_scale(img_convert_ctx, (const uint8_t* const *) pFrame->data, pFrame->linesize, 0, pCodecCtx->height,
                    pFrameYUV->data, pFrameYUV->linesize);

            fwrite(pFrameYUV->data[0], 1, y_size, fp_out); //Y
            fwrite(pFrameYUV->data[1], 1, y_size / 4, fp_out); //U
            fwrite(pFrameYUV->data[2], 1, y_size / 4, fp_out); //V

            LOGD("Flush Decoder: Succeed to decode 1 frame");
        }
    }

    sws_freeContext(img_convert_ctx);
    av_frame_free(&pFrameYUV);
    av_parser_close(pCodecParserCtx);
    av_frame_free(&pFrame);
    avcodec_close(pCodecCtx);
    av_free(pCodecCtx);

    return TRUE;
}

使用FFmpeg解码H264-2015.01.14

标签:

原文地址:http://www.cnblogs.com/zhouLee/p/5129886.html

(0)
(0)
   
举报
评论 一句话评论(0
登录后才能评论!
© 2014 mamicode.com 版权所有  联系我们:gaon5@hotmail.com
迷上了代码!