标签:android style blog http java 使用
前面本来说是做h264编码的 研究了两天发现ffmpeg里的h264编码似乎是要信赖第三方库x264 还是怎么简单怎么来吧所以就整了个mpeg编码 ffmpeg移植前面我有一篇ffmpeg解码里已经给了 具体链接在这http://blog.csdn.net/hclydao/article/details/18546757
怎么使用那里面也已经说了 这里主要是通过ffmpeg将yuv422格式转换成rgb 然后就是yuv422转成mpeg格式 接前面几篇 获取到yuv422数据后 为了能显示出来 所以先转换成rgb565的数据 接口函数如下
/*
* yuv to rgb
*/
JNIEXPORT jint JNICALL Java_com_hclydao_webcam_Ffmpeg_yuvtorgb(JNIEnv * env, jclass obj,const jbyteArray yuvdata, jbyteArray rgbdata,const jint dwidth,const jint dheight)
{
jbyte *ydata = (jbyte*)(*env)->GetByteArrayElements(env, yuvdata, 0);
jbyte *rdata = (jbyte*)(*env)->GetByteArrayElements(env, rgbdata, 0);
AVFrame * rpicture=NULL;
AVFrame * ypicture=NULL;
struct SwsContext *swsctx = NULL;
rpicture=avcodec_alloc_frame();
ypicture=avcodec_alloc_frame();
avpicture_fill((AVPicture *) rpicture, (uint8_t *)rdata, PIX_FMT_RGB565,dwidth,dheight);
avpicture_fill((AVPicture *) ypicture, (uint8_t *)ydata, AV_PIX_FMT_YUYV422,mwidth,mheight);
swsctx = sws_getContext(mwidth,mheight, AV_PIX_FMT_YUYV422, dwidth, dheight,PIX_FMT_RGB565, SWS_BICUBIC, NULL, NULL, NULL);
sws_scale(swsctx,(const uint8_t* const*)ypicture->data,ypicture->linesize,0,mheight,rpicture->data,rpicture->linesize);
sws_freeContext(swsctx);
av_free(rpicture);
av_free(ypicture);
(*env)->ReleaseByteArrayElements(env, yuvdata, ydata, 0);
(*env)->ReleaseByteArrayElements(env, rgbdata, rdata, 0);
return 0;
}
然后就是mpeg编码了 网上说ffmpeg只能将yuv420p的编码 所以要先将yuv422转成yuv420p后在进行编码 相关接口函数如下
AVCodecContext *pCodecCtx= NULL;
AVPacket avpkt;
FILE * video_file;
unsigned char *outbuf=NULL;
unsigned char *yuv420buf=NULL;
static int outsize=0;
/*
* encording init
*/
JNIEXPORT jint JNICALL Java_com_hclydao_webcam_Ffmpeg_videoinit(JNIEnv * env, jclass obj,jbyteArray filename)
{
LOGI("%s\n",__func__);
AVCodec * pCodec=NULL;
avcodec_register_all();
pCodec=avcodec_find_encoder(AV_CODEC_ID_MPEG1VIDEO);
if(pCodec == NULL) {
LOGE("++++++++++++codec not found\n");
return -1;
}
pCodecCtx=avcodec_alloc_context3(pCodec);
if (pCodecCtx == NULL) {
LOGE("++++++Could not allocate video codec context\n");
return -1;
}
/* put sample parameters */
pCodecCtx->bit_rate = 400000;
/* resolution must be a multiple of two */
pCodecCtx->width = mwidth;
pCodecCtx->height = mheight;
/* frames per second */
pCodecCtx->time_base= (AVRational){1,25};
pCodecCtx->gop_size = 10; /* emit one intra frame every ten frames */
pCodecCtx->max_b_frames=1;
pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;//AV_PIX_FMT_YUYV422;
/* open it */
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
LOGE("+++++++Could not open codec\n");
return -1;
}
outsize = mwidth * mheight*2;
outbuf = malloc(outsize*sizeof(char));
yuv420buf = malloc(outsize*sizeof(char));
jbyte *filedir = (jbyte*)(*env)->GetByteArrayElements(env, filename, 0);
if ((video_file = fopen(filedir, "wb")) == NULL) {
LOGE("++++++++++++open %s failed\n",filedir);
return -1;
}
(*env)->ReleaseByteArrayElements(env, filename, filedir, 0);
return 1;
}
JNIEXPORT jint JNICALL Java_com_hclydao_webcam_Ffmpeg_videostart(JNIEnv * env, jclass obj,jbyteArray yuvdata)
{
int frameFinished=0,size=0;
jbyte *ydata = (jbyte*)(*env)->GetByteArrayElements(env, yuvdata, 0);
AVFrame * yuv420pframe=NULL;
AVFrame * yuv422frame=NULL;
struct SwsContext *swsctx = NULL;
yuv420pframe=avcodec_alloc_frame();
yuv422frame=avcodec_alloc_frame();
avpicture_fill((AVPicture *) yuv420pframe, (uint8_t *)yuv420buf, AV_PIX_FMT_YUV420P,mwidth,mheight);
avpicture_fill((AVPicture *) yuv422frame, (uint8_t *)ydata, AV_PIX_FMT_YUYV422,mwidth,mheight);
swsctx = sws_getContext(mwidth,mheight, AV_PIX_FMT_YUYV422, mwidth, mheight,AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
sws_scale(swsctx,(const uint8_t* const*)yuv422frame->data,yuv422frame->linesize,0,mheight,yuv420pframe->data,yuv420pframe->linesize);
av_init_packet(&avpkt);
size = avcodec_encode_video2(pCodecCtx, &avpkt, yuv420pframe, &frameFinished);
if (size < 0) {
LOGE("+++++Error encoding frame\n");
return -1;
}
if(frameFinished)
fwrite(avpkt.data,avpkt.size,1,video_file);
av_free_packet(&avpkt);
sws_freeContext(swsctx);
av_free(yuv420pframe);
av_free(yuv422frame);
(*env)->ReleaseByteArrayElements(env, yuvdata, ydata, 0);
}
JNIEXPORT jint JNICALL Java_com_hclydao_webcam_Ffmpeg_videoclose(JNIEnv * env, jclass obj)
{
fclose(video_file);
avcodec_close(pCodecCtx);
av_free(pCodecCtx);
free(outbuf);
}最后录下来的视频是可以用播放的 总感觉代码好像哪里写的不对 bug总是有的 过程和原理搞清楚了其它就容易了
下面是我录的 至此摄像头这块暂时就这样了
这测试我又重新新建了一个工程
整个工程下载链接 请去我的资源里找吧 我上传了没显示出来 那个有20几M的就是的了
这个bug很多 没怎么认真去写 如果原理和过程有问题的希望大家指出
android4.0 USB Camera实例(六)ffmpeg mpeg编码,布布扣,bubuko.com
android4.0 USB Camera实例(六)ffmpeg mpeg编码
标签:android style blog http java 使用
原文地址:http://blog.csdn.net/hclydao/article/details/36415975