标签:
最近又复习了一下live555的源代码,并在VS2013下编译了视频直播程序。
从视频读取一帧后x264编码,用live555的RTSP传输流媒体。
效果清晰流畅(640*480+40帧每秒),很满意。
附上主要的cpp代码
/* H264FramedLiveSource.cpp By Chain_Gank */
#include <highgui.h> #include <cv.h> #include <stdio.h> #include <string.h> #include <stdlib.h> #include <stdint.h> //#include <unistd.h> #include <fcntl.h> #include "H264FramedLiveSource.hh" #pragma comment(lib,"libx264.lib") #define ENCODER_TUNE "zerolatency" #define ENCODER_PROFILE "baseline" #define ENCODER_PRESET "veryfast" #define ENCODER_COLORSPACE X264_CSP_I420 #define CLEAR(x) (memset((&x),0,sizeof(x))) int FPS, WIDTH, HEIGHT, widthStep; //这些是视频的参数,用来初始化编码器 和 图片格式转换 //这个函数把RGB24图片转换为YUV420格式图片 void Convert(unsigned char *RGB, unsigned char *YUV, unsigned int width, unsigned int height); extern class F f; //包含编码器和图片缓冲的各种指针变量和函数。因为一直都要用,所以在一开始就声明的全局变量,到程序结束一直存在 //初始化全局变量f中的各个成员,给指针分配相应的内存空间,并初始化摄像头和264编码器 void F::init() { int ret, frames_total; //ret是临时变量作用不大,frames_total存储视频总帧数,用于初始化编码器 cap = cvCaptureFromFile("test.mp4"); //cap = cvCaptureFromAVI("4.avi"); //cap = cvCaptureFromCAM(0); //camera img = cvQueryFrame(cap); //获取视频文件属性 FPS = (int)cvGetCaptureProperty(cap, CV_CAP_PROP_FPS); frames_total = (int)cvGetCaptureProperty(cap, CV_CAP_PROP_FRAME_COUNT) - 1; //cvSetCaptureProperty(cap, CV_CAP_PROP_FRAME_WIDTH, 320); //camera //cvSetCaptureProperty(cap, CV_CAP_PROP_FRAME_HEIGHT, 240); //camera HEIGHT = (int)cvGetCaptureProperty(cap, CV_CAP_PROP_FRAME_HEIGHT) ; WIDTH = (int)cvGetCaptureProperty(cap, CV_CAP_PROP_FRAME_WIDTH) ; //调节视频的分辨率 while (WIDTH > 480 || HEIGHT > 270) { HEIGHT /= 2; WIDTH /= 2; } widthStep = WIDTH * 3; //固定为3倍,用于提取IplImage中的RGB数据 printf("fps: %d\twidth: %d\theight: %d\n", FPS, WIDTH, HEIGHT); //这个是修改分辨率后的图片缓冲区 img2 = cvCreateImage(cvSize(WIDTH, HEIGHT), img->depth, img->nChannels); if (!cap) { fprintf(stderr, "Can not open file.\n"); exit(-1); } //encoder结构体包含了编码器需要的变量 encoder = (my_x264_encoder *)malloc(sizeof(my_x264_encoder)); if (!encoder){ printf("cannot malloc my_x264_encoder !\n"); exit(EXIT_FAILURE); } CLEAR(*encoder); //初始化encoder的成员 strcpy(encoder->parameter_preset, ENCODER_PRESET); strcpy(encoder->parameter_tune, ENCODER_TUNE); encoder->x264_parameter = (x264_param_t *)malloc(sizeof(x264_param_t)); if (!encoder->x264_parameter){ printf("malloc x264_parameter error!\n"); exit(EXIT_FAILURE); } CLEAR(*(encoder->x264_parameter)); x264_param_default(encoder->x264_parameter); //设置编码器参数状态为: 修改后立刻生效 if ((ret = x264_param_default_preset(encoder->x264_parameter, encoder->parameter_preset, encoder->parameter_tune))<0){ printf("x264_param_default_preset error!\n"); exit(EXIT_FAILURE); } /*cpuFlags 去空缓冲区继续使用不死锁保证*/ encoder->x264_parameter->i_threads = X264_SYNC_LOOKAHEAD_AUTO; /*视频选项*/ encoder->x264_parameter->i_width = WIDTH;//要编码的图像的宽度 encoder->x264_parameter->i_height = HEIGHT;//要编码的图像的高度 encoder->x264_parameter->i_frame_total = frames_total;// frames_total;//要编码的总帧数,不知道用0 encoder->x264_parameter->i_keyint_max = 10 * FPS; //关键帧出现间隔,如果视频动作变化较大,可调小到2-4 /*流参数*///下面四个参数不怎么影响,不过也初始化了吧 encoder->x264_parameter->i_bframe = 5; encoder->x264_parameter->b_open_gop = 0; encoder->x264_parameter->i_bframe_pyramid = 0; //禁止B帧 encoder->x264_parameter->i_bframe_adaptive = X264_B_ADAPT_TRELLIS; //B帧出现率 /*log参数,不需要打印编码信息时直接注释掉*/ //encoder->x264_parameter->i_log_level = X264_LOG_DEBUG; encoder->x264_parameter->i_fps_num = FPS;//码率分子 encoder->x264_parameter->i_fps_den = 1;//码率分母 //下面两个挺重要 encoder->x264_parameter->b_intra_refresh = 1; encoder->x264_parameter->b_annexb = 1; strcpy(encoder->parameter_profile, ENCODER_PROFILE); if ((ret = x264_param_apply_profile(encoder->x264_parameter, encoder->parameter_profile))<0){ printf("x264_param_apply_profile error!\n"); exit(EXIT_FAILURE); } /*打开编码器*/ encoder->x264_encoder = x264_encoder_open(encoder->x264_parameter); encoder->colorspace = ENCODER_COLORSPACE; /*初始化pic*/ encoder->yuv420p_picture = (x264_picture_t *)malloc(sizeof(x264_picture_t)); if (!encoder->yuv420p_picture){ printf("malloc encoder->yuv420p_picture error!\n"); exit(EXIT_FAILURE); } if ((ret = x264_picture_alloc(encoder->yuv420p_picture, encoder->colorspace, WIDTH, HEIGHT))<0){ printf("ret=%d\n", ret); printf("x264_picture_alloc error!\n"); exit(EXIT_FAILURE); } //设置264的输入图片格式 encoder->yuv420p_picture->img.i_csp = encoder->colorspace; encoder->yuv420p_picture->img.i_plane = 3; //plane格式存储 encoder->yuv420p_picture->i_type = X264_TYPE_AUTO; /*申请YUV buffer*///这个YUV缓冲区的数据就是264需要的图片数据 encoder->yuv = (uint8_t *)malloc(WIDTH*HEIGHT * 3 / 2); if (!encoder->yuv){ printf("malloc yuv error!\n"); exit(EXIT_FAILURE); } CLEAR(*(encoder->yuv)); encoder->yuv420p_picture->img.plane[0] = encoder->yuv; encoder->yuv420p_picture->img.plane[1] = encoder->yuv + WIDTH*HEIGHT; encoder->yuv420p_picture->img.plane[2] = encoder->yuv + WIDTH*HEIGHT + WIDTH*HEIGHT / 4; n_nal = 0;//初始化为0,之后用于保存每帧的NALU数量 //每个NALU的数据都存在这里,一般一帧编码为1个(最多3个)NALU,分辨率较大时也许会更多 encoder->nal = (x264_nal_t *)malloc(3 * sizeof(x264_nal_t)); if (!encoder->nal){ printf("malloc x264_nal_t error!\n"); exit(EXIT_FAILURE); } CLEAR(*(encoder->nal)); //用于提取IplImage中的图片数据 RGB1 = (unsigned char *)malloc(HEIGHT * WIDTH * 3); /**************************************************************************************************************************************/ /************************************************* 初始化完毕 *********************************************************************/ /**************************************************************************************************************************************/ } H264FramedLiveSource::H264FramedLiveSource(UsageEnvironment& env, unsigned preferredFrameSize, unsigned playTimePerFrame) : FramedSource(env) { } H264FramedLiveSource* H264FramedLiveSource::createNew(UsageEnvironment& env, unsigned preferredFrameSize, unsigned playTimePerFrame) { H264FramedLiveSource* newSource = new H264FramedLiveSource(env, preferredFrameSize, playTimePerFrame); return newSource; } H264FramedLiveSource::~H264FramedLiveSource() { printf("~~~~~~~~~~~~~~~\n"); /*free(RGB1); cvReleaseCapture(&cap); free(encoder->yuv); free(encoder->yuv420p_picture); free(encoder->x264_parameter); x264_encoder_close(encoder->x264_encoder); free(encoder);*/ } void F::getframe() { printf("#"); img = cvQueryFrame(cap); cvResize(img, img2); //把图片转换为img2的分辨率大小,在上面的函数中有写到img2初始化的大小,可自行调整 /*for循环:从IplImage格式中获取RGB格式图片*/ for (int i = 0; i< HEIGHT; i++) { for (int j = 0; j< WIDTH; j++) { RGB1[(i*WIDTH + j) * 3] = img2->imageData[i * widthStep + j * 3 + 2];; RGB1[(i*WIDTH + j) * 3 + 1] = img2->imageData[i * widthStep + j * 3 + 1]; RGB1[(i*WIDTH + j) * 3 + 2] = img2->imageData[i * widthStep + j * 3]; } } //把RGB格式图片转换为YUV格式图片 Convert(RGB1, encoder->yuv, WIDTH, HEIGHT); encoder->yuv420p_picture->i_pts++; //每帧图片序列号,要保持递增 //编码YUV格式图片,编码后的数据放在encoder->nal中 if (x264_encoder_encode(encoder->x264_encoder, &encoder->nal, &n_nal, encoder->yuv420p_picture, &pic_out) < 0) { printf("x264_encoder_encode error!\n"); exit(EXIT_FAILURE); } } void H264FramedLiveSource::doGetNextFrame() { fFrameSize = 0; f.getframe(); //把编码后的H264数据(即所有NALU)放到fTo,fTo会被live555的某些函数使用并发送 for (f.my_nal = f.encoder->nal; f.my_nal < f.encoder->nal + f.n_nal; ++(f.my_nal)) { //判断NALU是否大于fTo输入缓冲区 if (fFrameSize + f.my_nal->i_payload > fMaxSize) { printf("fMaxSize!!!!!!!!!!!!\n"); break; } //p_payload存放NALU数据,i_payload存放NALU字节大小 memcpy((unsigned char*)fTo + fFrameSize, f.my_nal->p_payload, f.my_nal->i_payload); fFrameSize += f.my_nal->i_payload; } //将会判断数据是否存满,Parser缓冲区(一共两个)互换。 //然后提取有效数据,continueReadingFrame,读完有效数据后由调用回doGetNextFrame nextTask() = envir().taskScheduler().scheduleDelayedTask(0, (TaskFunc*)FramedSource::afterGetting, this); return; }
视频直播:libx264编码一帧图片+live555搭建RTSP流媒体服务器
标签:
原文地址:http://www.cnblogs.com/chaingank/p/4727906.html