标签:des android blog os java 使用 io ar art
平台是RK3066(福州瑞芯微公司),android 4.2.0,其实时VP8硬编码,与软件编码是ffpmeg,x264,xvid等软编码是有区别的。硬编码主要是依赖于
硬件。
硬编码:通过调用Android系统自带的Camera录制视频,实际上是调用了底层的高清编码硬件模块,也即显卡,不使用CPU,速度快
软编码:使用CPU进行编码,如常见C/C++代码,一般编译生成的二进制都是的,速度相对较慢。例如使用Android NDK编译H264生成so库,编写jni接口,再使用java调用so库
VPUCoder.h
/* * VPUCoder.h * * Current, Only Support YUV420sp encoder and decoder * * Created on: Dec 16, 2013 * Author: henry * * Example: * * int main() * { * int ret = InitCodec(); * //===========encode video * ret = StartEnc("/sdcard/test.mkv", 1280, 720, 30); * * while(1) * { * //get data and length, //unsigned char* data; int length * ret = ProcessEnc(data, length); * } * ret = StopEnc(); * * //===========decode video * ret = ProcessDec("/sdcard/test.mkv", 1280, 720); * return 0; * } */ #ifndef VPUCODER_H_ #define VPUCODER_H_ /** * Init encoder and decoder handle, only call once, must first call * * @return 0 is successful, another fail */ int InitCodec(); /** * setup encoder configure * @param filePath : save file path * @param enc_width : video width * @param enc_height : video height * @param enc_fps : video fps * * @return 0 is successful, another fail */ int StartEnc(const char* filePath, uint32_t enc_width, uint32_t enc_height, uint32_t enc_fps); /** * stop encode video */ void StopEnc(); /** * @params data : frame data * @params length : frame length * * @return 0 is successful, another fail */ int ProcessEnc(const unsigned char* data, uint32_t length); /** * setup decoder configure * @param filePath : source file path * @param enc_width : video width * @param enc_height : video height * * @return 0 is successful, another fail */ int ProcessDec(const char* filePath, uint32_t dec_width, uint32_t dec_height); #endif /* VPUCODER_H_ */
VPUCoder.cpp
#include <stdio.h> #include <unistd.h> #include <stdlib.h> #include <vpu_global.h> #include <SoftwareRenderer.h> #include <vpu_api_interface.h> #include <utils/Log.h> #include <ui/DisplayInfo.h> #include <gui/ISurfaceTexture.h> #include <gui/ISurfaceComposer.h> #include <gui/SurfaceTextureClient.h> #include <gui/SurfaceComposerClient.h> #include <media/stagefright/MetaData.h> using namespace android; #ifdef LOGI #undef LOGI #undef LOGW #undef LOGE #undef LOGD #endif #define LOG_TAG_SERVICE "TeliDHVPU8Codec" #define LOGW(...) __android_log_print(ANDROID_LOG_WARN, LOG_TAG_SERVICE, __VA_ARGS__) #define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG_SERVICE, __VA_ARGS__) #define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG_SERVICE, __VA_ARGS__) #define LOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG_SERVICE, __VA_ARGS__) #define MAX_STREM_LENGTH (1280 * 400) static uint32_t g_lenght_enc = MAX_STREM_LENGTH; static FILE* g_file_enc = NULL; static FILE* g_file_dec = NULL; static VPU_API g_vpu_api; static void* g_vp8encHandle = NULL; static void* g_vp8decHandle = NULL; static unsigned int g_bufferLen = 0; static unsigned char* g_frameBuffer = NULL; static EncParams1 g_encParams; static int g_SyncFlag = 0; static unsigned int g_dataSize; static unsigned int g_InputTimestamp; static int g_nal = 0; static int g_ret = 0; int InitCodec() { vpu_api_init(&g_vpu_api, OMX_ON2_VIDEO_CodingVPX); return 0; } int StartEnc(const char* filePath, uint32_t enc_width, uint32_t enc_height, uint32_t enc_fps) { LOGI("StartEnc Begin ===================="); if(!filePath) { LOGD("Please make sure file path not null!"); return -1; } if(g_file_enc) { LOGD("Another file is being encoded!"); return -1; } g_file_enc = fopen(filePath, "wb+"); if(!g_file_enc) { LOGD("file open fail!"); return -1; } g_lenght_enc = enc_width * enc_height * 3 >> 1; memset(&g_encParams, 0, sizeof(EncParams1)); g_encParams.width = enc_width; g_encParams.height = enc_height; g_encParams.framerate = 25; g_encParams.bitRate = 10000; g_frameBuffer = (unsigned char*)malloc(MAX_STREM_LENGTH); g_vp8encHandle = g_vpu_api.get_class_On2Encoder(); g_vpu_api.init_class_On2Encoder(g_vp8encHandle, &g_encParams, g_frameBuffer, &g_bufferLen); g_nal = g_bufferLen; fwrite(&g_nal, 1, sizeof(g_nal), g_file_enc); fwrite(g_frameBuffer, 1, g_bufferLen, g_file_enc); LOGI("StartEnc End g_bufferLen = %d ====================", g_bufferLen); return 0; } void StopEnc() { LOGI("StartEnc Begin ===================="); g_lenght_enc = 0; if(g_frameBuffer) { free(g_frameBuffer); g_frameBuffer = NULL; } if(g_file_enc) { fclose(g_file_enc); g_file_enc = NULL; } if(g_vp8encHandle) { g_vpu_api.deinit_class_On2Encoder(g_vp8encHandle); g_vpu_api.destroy_class_On2Encoder(g_vp8encHandle); g_vp8encHandle = NULL; } LOGI("StartEnc End ===================="); } int ProcessEnc(const unsigned char* data, uint32_t length) { if(length != g_lenght_enc) { LOGD("encode frame length is error!"); StopEnc(); return -1; } if(!g_file_enc) { LOGD("file not open!"); return -1; } LOGD("enc_oneframe_class_On2AvcEncoder"); g_SyncFlag = 0; g_ret = g_vpu_api.enc_oneframe_class_On2Encoder(g_vp8encHandle, g_frameBuffer, &g_bufferLen , (unsigned char*)data, 0, &g_dataSize, &g_InputTimestamp, &g_SyncFlag); if(g_bufferLen > 0 && g_ret >=0) { LOGI("ProcessEnc encode one frame ===================="); g_nal = g_bufferLen; fwrite(&g_nal, 1, sizeof(g_nal), g_file_enc); fwrite(g_frameBuffer, 1, g_bufferLen, g_file_enc); fflush(g_file_enc); } else { LOGD("ProcessEnc encode frame fail!"); return -1; } g_bufferLen = 0; return 0; } int32_t FillFrameHead(unsigned char* dst, unsigned char* src, uint32_t size , uint32_t bitsreamLen, int64_t time, uint32_t type, uint32_t num) { VPU_BITSTREAM h; uint32_t TimeLow = (uint32_t)(time/1000); h.StartCode = BSWAP(VPU_BITSTREAM_START_CODE); h.SliceLength= BSWAP(size); h.SliceTime.TimeLow = BSWAP(TimeLow); h.SliceTime.TimeHigh= 0; h.SliceType= BSWAP(type); h.SliceNum= BSWAP(num); h.Res[0] = 0; h.Res[1] = 0; memcpy(dst, &h, bitsreamLen); return 0; } int ProcessDec(const char* filePath, uint32_t dec_width, uint32_t dec_height) { LOGI("ProcessDec Begin dec_width = %d dec_height = %d====================", dec_width, dec_height); if(!filePath) { LOGD("src file path is null"); return -1; } FILE *fp_in = fopen(filePath, "rb"); if(!fp_in) { LOGD("File not exist!"); return -1; } char filePathOut[64]; memset(filePathOut, 0, 64); sprintf(filePathOut, "%s.yuv", filePath); g_file_dec = fopen(filePathOut, "wb+"); if(!g_file_dec) { LOGD("file open fail!"); fclose(fp_in); return -1; } unsigned char* aOutBuffer; unsigned char* aInputBuf = NULL; unsigned char* aInputData = NULL; unsigned int aOutputLength; unsigned int aInBufSize = MAX_STREM_LENGTH; g_vp8decHandle = g_vpu_api.get_class_On2Decoder(); g_vpu_api.init_class_On2Decoder(g_vp8decHandle); int tmp_length = (dec_width * dec_height * 3) >> 1; LOGI(" init_class_On2AvcDecoder OK! tmp_length=%d", tmp_length); aOutBuffer = (unsigned char*)malloc(tmp_length); aInputBuf = (unsigned char*)malloc(MAX_STREM_LENGTH); fread(&g_nal, 1, sizeof(g_nal), fp_in); if(g_nal > 0) { fread(aInputBuf, 1, g_nal, fp_in); } else { LOGD("Can‘t decoder the video!"); goto fail; } int ret; uint32_t bitsLen; uint32_t vpu_stream_lenc; vpu_stream_lenc = sizeof(VPU_BITSTREAM); aInputData = aInputBuf + vpu_stream_lenc; while(1) { ret = fread(&g_nal, 1, sizeof(g_nal), fp_in); if(ret <= 0) break; fread(aInputData, 1, g_nal, fp_in); aInBufSize = g_nal; bitsLen = vpu_stream_lenc + aInBufSize; FillFrameHead(aInputBuf, aInputData, aInBufSize, vpu_stream_lenc, 0, 0, 0); ret = g_vpu_api.dec_oneframe_class_On2Decoder(g_vp8decHandle, aOutBuffer, &aOutputLength, aInputBuf, &bitsLen); LOGI("out_length=%d in_length=%d ret=%d", aOutputLength, bitsLen, ret); if(aOutputLength > 0) { VPU_FRAME *frame = (VPU_FRAME *)aOutBuffer; if(frame->vpumem.phy_addr) { VPUMemLink(&frame->vpumem); if(frame->vpumem.vir_addr) { LOGI("VPUMemLinear_t size=%d", frame->vpumem.size); fwrite(frame->vpumem.vir_addr, 1, tmp_length, g_file_dec); fflush(g_file_dec); } else { LOGI("vir_addr is null"); } VPUFreeLinear(&frame->vpumem); } else { LOGI("phy_addr is null"); } } fflush(fp_in); } fail: if(aOutBuffer) { free(aOutBuffer); aOutBuffer = NULL; } if(aInputBuf) { free(aInputBuf); aInputBuf = NULL; } if(g_file_dec) { fclose(g_file_dec); g_file_dec = NULL; } if(fp_in) { fclose(fp_in); fp_in = NULL; } if(g_vp8decHandle) { g_vpu_api.deinit_class_On2Decoder(g_vp8decHandle); g_vpu_api.destroy_class_On2Decoder(g_vp8decHandle); g_vp8decHandle = NULL; } LOGI("ProcessDec End ===================="); return 0; }
android.mk
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
VISION=1.0.1
LOCAL_MODULE := VPU8Coder_${VISION}
LOCAL_SRC_FILES := VPUCoder.cpp
LOCAL_LDLIBS := -L$(LOCAL_PATH)/libs \
-lvpu \
-lcutils \
-lutils \
-lgui \
-lrk_on2 \
-lstagefright
LOCAL_C_INCLUDES := \
$(JNI_H_INCLUDE) \
$(LOCAL_PATH)/include \
$(LOCAL_PATH)/include/media/openmax
LOCAL_CFLAGS := -DOSCL_IMPORT_REF= -DOSCL_UNUSED_ARG= -DOSCL_EXPORT_REF=
include $(BUILD_SHARED_LIBRARY)
标签:des android blog os java 使用 io ar art
原文地址:http://www.cnblogs.com/wenrenhua08/p/3937549.html