标签:
我们都知道,想要驱动linux下的摄像头,其实很简单,照着V4L2的手册一步步来写,很快就可以写出来,但是在写之前我们要注意改变系统的一些配置,使系统支持framebuffer,在dev下产生fb0这样的节点,这样我们才能在linux系统上操作Camera摄像头,framebuffer在之前的博文已经有说过了,这里就不再提了。
有需要了解framebuffer的那么请点击:http://baike.baidu.com/view/3351639.htm
最重要的,我们需要改一个脚本,在/dev/grub.conf,我们来看看怎么改:
# grub.conf generated by anaconda # # Note that you do not have to rerun grub after making changes to this file # NOTICE: You have a /boot partition. This means that # all kernel and initrd paths are relative to /boot/, eg. # root (hd0,0) # kernel /vmlinuz-version ro root=/dev/sdb2 # initrd /initrd-[generic-]version.img #boot=/dev/sdb default=0 timeout=5 splashimage=(hd0,0)/grub/splash.xpm.gz hiddenmenu title CentOS (2.6.32-431.el6.i686) root (hd0,0) kernel /vmlinuz-2.6.32-431.el6.i686 ro root=UUID=2bc12537-d6c1-4e67-b4e5-e9c466205554 nomodeset rd_NO_LUKS KEYBOARDTYPE=pc KEYTABLE=us rd_NO_MD crashkernel=auto LANG=zh_CN.UTF-8 rd_NO_LVM rd_NO_DM rhgb quiet vga=0x318 initrd /initramfs-2.6.32-431.el6.i686.img
色彩 | 640x400 | 640x480 | 800x600 | 1024x768 | 1280x1024 | 1600x1200 |
4bits | ? | ? | 0x302 | ? | ? | ? |
8bits | 0x300 | 0x301 | 0x303 | 0x305 | 0x307 | 0x31C |
15bits | ? | 0x310 | 0x313 | 0x316 | 0x319 | 0x31D |
16bits | ? | 0x311 | 0x314 | 0x317 | 0x31A | 0x31E |
24bits | ? | 0x312 | 0x315 | 0x318 | 0x31B | 0x31F |
32bits | ? | ? | ? | ? | ? | ? |
extern int ioctl (int __fd, unsigned long int __request, …) __THROW; __fd:设备的ID,例如刚才用open函数打开视频通道后返回的cameraFd; __request:具体的命令标志符。 在进行V4L2开发中,一般会用到以下的命令标志符: VIDIOC_REQBUFS:分配内存 VIDIOC_QUERYBUF:把VIDIOC_REQBUFS中分配的数据缓存转换成物理地址 VIDIOC_QUERYCAP:查询驱动功能 VIDIOC_ENUM_FMT:获取当前驱动支持的视频格式 VIDIOC_S_FMT:设置当前驱动的频捕获格式 VIDIOC_G_FMT:读取当前驱动的频捕获格式 VIDIOC_TRY_FMT:验证当前驱动的显示格式 VIDIOC_CROPCAP:查询驱动的修剪能力 VIDIOC_S_CROP:设置视频信号的边框 VIDIOC_G_CROP:读取视频信号的边框 VIDIOC_QBUF:把数据放回缓存队列 VIDIOC_DQBUF:把数据从缓存中读取出来 VIDIOC_STREAMON:开始视频显示函数 VIDIOC_STREAMOFF:结束视频显示函数 VIDIOC_QUERYSTD:检查当前视频设备支持的标准,例如PAL或NTSC。 这些IO调用,有些是必须的,有些是可选择的。
#include <stdio.h> #include <unistd.h> #include <fcntl.h> #include "j-yuv.h" #include "CameralOpt.h" #include "FrameBufferOpt.h" #define WIDTH 640 #define HIGHT 480 int main(void) { char yuyv[WIDTH*HIGHT*2]; char bmp[WIDTH*HIGHT*3]; // set_bmp_header((struct bmp_header_t *)bmp, WIDTH, HIGHT); //初始化摄像头 Init_Cameral(WIDTH , HIGHT ); //初始化framebuffer Init_FrameBuffer(WIDTH , HIGHT ); //开启摄像头 Start_Cameral(); //采集一张图片 int count = 0 ; while(1) { Get_Picture(yuyv); yuyv2rgb24(yuyv, bmp, WIDTH, HIGHT); Write_FrameBuffer(bmp); // printf("count:%d \n" , count++); } //关闭摄像头 Stop_Cameral(); //关闭Framebuffer Exit_Framebuffer(); //退出 Exit_Cameral(); return 0; }2、juv.h
#ifndef __JYUV_H #define __JYUV_H typedef unsigned char u8; typedef unsigned short u16; typedef unsigned int u32; #pragma pack(1) //定义bmp头 struct bmp_header_t{ u16 magic; u32 file_size; u32 RESERVED1; u32 offset; //54 bytes 表示54个偏移量 u32 head_num; //40 u32 width; u32 height; u16 color_planes; //1 u16 bit_count; u32 bit_compression; //0 u32 image_size; //except the size of header u32 h_resolution; u32 v_resolution; u32 color_num; u32 important_colors; }; #pragma pack() void set_bmp_header(struct bmp_header_t * header, u32 width, u32 height); int yuyv2rgb24(u8 *yuyv, u8 *rgb, u32 width, u32 height); #endif /* __JYUV_H */3、juv.c
#include "j-yuv.h" #define BIT_COUNT 24 void set_bmp_header(struct bmp_header_t *header, u32 width, u32 height) { header->magic = 0x4d42; header->image_size = width * height * BIT_COUNT/8; header->file_size = header->image_size + 54; header->RESERVED1 = 0; header->offset = 54; header->head_num = 40; header->width = width; header->height = height; header->color_planes = 1; header->bit_count = BIT_COUNT; header->bit_compression = 0; header->h_resolution = 0; header->v_resolution = 0; header->color_num = 0; header->important_colors = 0; } //yuyv转rgb24的算法实现 int yuyv2rgb24(u8 *yuyv, u8 *rgb, u32 width, u32 height) { u32 i, in, rgb_index = 0; u8 y0, u0, y1, v1; int r, g, b; u32 out = 0, x, y; for(in = 0; in < width * height * 2; in += 4) { y0 = yuyv[in+0]; u0 = yuyv[in+1]; y1 = yuyv[in+2]; v1 = yuyv[in+3]; for (i = 0; i < 2; i++) { if (i) y = y1; else y = y0; r = y + (140 * (v1-128))/100; //r g = y - (34 * (u0-128))/100 - (71 * (v1-128))/100; //g b = y + (177 * (u0-128))/100; //b if(r > 255) r = 255; if(g > 255) g = 255; if(b > 255) b = 255; if(r < 0) r = 0; if(g < 0) g = 0; if(b < 0) b = 0; y = height - rgb_index/width -1; x = rgb_index%width; rgb[(y*width+x)*3+0] = b; rgb[(y*width+x)*3+1] = g; rgb[(y*width+x)*3+2] = r; rgb_index++; } } return 0; }4、FrameBufferOpt.c
#include "FrameBufferOpt.h" static int Frame_fd ; static int *FrameBuffer = NULL ; static int W , H ; //初始化framebuffer int Init_FrameBuffer(int Width , int Higth) { W = Width ; H = Higth ; Frame_fd = open("/dev/fb" , O_RDWR); if(-1 == Frame_fd) { perror("open frame buffer fail"); return -1 ; } //根本就不用CPU搬运 用DMA做为搬运工 FrameBuffer = mmap(0, 1280*1024*4 , PROT_READ | PROT_WRITE , MAP_SHARED , Frame_fd ,0 ); if(FrameBuffer == (void *)-1) { perror("memory map fail"); return -2 ; } return 0 ; } //写入framebuffer int Write_FrameBuffer(const char *buffer) { int row , col ; char *p = NULL ; for(row = 0 ; row <1024 ; row++) { for(col = 0 ; col < 1280 ; col++) { if((row < H) && (col < W)) { p = (char *)(buffer + (row * W+ col ) * 3); FrameBuffer[row*1280+col] = RGB((unsigned char)(*(p+2)),(unsigned char)(*(p+1)),(unsigned char )(*p)); } } } return 0 ; } //退出framebuffer int Exit_Framebuffer(void) { munmap(FrameBuffer , W*H*4); close(Frame_fd); return 0 ; }5、FrameBufferOpt.h
#ifndef _FRAMEBUFFEROPT_H #define _FRAMEBUFFEROPT_H #include <stdio.h> #include <fcntl.h> #include <unistd.h> #include <sys/mman.h> #define RGB(r,g,b) ((r<<16)|(g<<8)|b) //初始化ramebuffer int Init_FrameBuffer(int Width , int Higth); //写数据到framebuffer int Write_FrameBuffer(const char *buffer); //退出framebuffer int Exit_Framebuffer(void); #endif //_FRAMEBUFFEROPT_H6、CameralOpt.h
#ifndef _CAMERALOPT_H #define _CAMERALOPT_H #include <stdio.h> #include <linux/videodev2.h> #include <fcntl.h> #include <unistd.h> #include <string.h> #include <stdlib.h> #include <errno.h> #include <sys/mman.h> #define COUNT 3 //初始化摄像头 int Init_Cameral(int Width , int Hight); int Exit_Cameral(void); //退出摄像头 //摄像头开始采集 int Start_Cameral(void); int Stop_Cameral(void);//停止摄像头 //获取摄像头的数据 int Get_Picture(char *buffer); #endif //_CAMERALOPT_H7、CameralOpt.c
#include "CameralOpt.h" int video_fd ; int length ; char *yuv[COUNT] ; struct v4l2_buffer enqueue , dequeue ; //定义出入队的操作结构体成员 int Init_Cameral(int Width , int Hight) { //参数检查 char *videodevname = NULL ; videodevname = "/dev/video0" ; //打开设备 video_fd = open(videodevname , O_RDWR); if(-1 == video_fd ) { perror("open video device fail"); return -1 ; } int i ; int ret ; struct v4l2_format format ; format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE ; format.fmt.pix.width = Width; format.fmt.pix.height = Hight; format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV ; //我支持的格式是这个 ret = ioctl(video_fd , VIDIOC_S_FMT , &format); if(ret != 0) { perror("set video format fail"); return -2 ; } //申请buffer,切割成几个部分 //3 struct v4l2_requestbuffers requestbuffer ; requestbuffer.count = COUNT ; requestbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE ; requestbuffer.memory = V4L2_MEMORY_MMAP ; ret = ioctl(video_fd , VIDIOC_REQBUFS , &requestbuffer); if(ret != 0) { perror("request buffer fail "); return -3 ; } //querybuffer struct v4l2_buffer querybuffer ; querybuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE ; querybuffer.memory = V4L2_MEMORY_MMAP ; for(i = 0 ; i < COUNT ; i++) { querybuffer.index = i ; ret = ioctl(video_fd , VIDIOC_QUERYBUF , &querybuffer); if(ret != 0) { perror("query buffer fail"); return -4 ; } // printf("index:%d length:%d offset:%d \n" , // querybuffer.index , querybuffer.length , querybuffer.m.offset); length = querybuffer.length ; //将摄像头内存印射到进程的内存地址 yuv[i] = mmap(0,querybuffer.length , PROT_READ | PROT_WRITE , MAP_SHARED , video_fd , querybuffer.m.offset ); //列队 struct v4l2_buffer queuebuffer ; queuebuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE ; queuebuffer.memory = V4L2_MEMORY_MMAP ; queuebuffer.index = i ; ret = ioctl(video_fd , VIDIOC_QBUF , &queuebuffer); if(ret != 0) { perror("queuebuffer fail"); return -5 ; } } //初始化入队出队 enqueue.type = V4L2_BUF_TYPE_VIDEO_CAPTURE ; dequeue.type = V4L2_BUF_TYPE_VIDEO_CAPTURE ; enqueue.memory = V4L2_MEMORY_MMAP ; dequeue.memory = V4L2_MEMORY_MMAP ; return 0 ; } int Exit_Cameral(void) { int i ; for(i = 0 ; i < COUNT ; i++) munmap(yuv+i , length); close(video_fd); return 0 ; } int Start_Cameral(void) { //开启摄像头 int ret ; int on = 1 ; ret = ioctl(video_fd , VIDIOC_STREAMON , &on); if(ret != 0) { perror("start Cameral fail"); return -1 ; } return 0 ; } int Stop_Cameral(void) { //停止摄像头 int ret ; int off= 1 ; ret = ioctl(video_fd , VIDIOC_STREAMOFF, &off); if(ret != 0) { perror("stop Cameral fail"); return -1 ; } return 0 ; } int Get_Picture(char *buffer) { int ret ; //出队 ret = ioctl(video_fd , VIDIOC_DQBUF , &dequeue); if(ret != 0) { perror("dequeue fail"); return -1 ; } //获取图片数据 YUV yuv[dequeue.index] memcpy(buffer , yuv[dequeue.index] , dequeue.length); // write(yuyv_fd , yuv[dequeue.index] , dequeue.length); enqueue.index = dequeue.index ; ret = ioctl(video_fd , VIDIOC_QBUF , &enqueue); if(ret != 0) { perror("enqueue fail"); return -2 ; } return 0 ; }
标签:
原文地址:http://blog.csdn.net/morixinguan/article/details/51001713