在虚拟机搭建好系统后,真正的使用才刚刚开始。
在使用摄像头的时候,首先看自己的摄像头插上去是显示jpeg的还是yuv的
yuv的要实现UVC转QImage转IplImage这样的流程才能使用,jpeg的好多人不会用说没用其实最好用了。一点不卡。yuv的有点卡。
我用的也是yuv以前朋友用的jpeg的摄像头。
代码是用的网上的一个代码修改的
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
|
#ifndef VIDEODEVICE_H
#define VIDEODEVICE_H
#include <string.h> #include <stdlib.h>
#include <errno.h>
#include <fcntl.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <asm/types.h> #include <linux/videodev2.h>
#include <QString> #include <QObject>
#define CLEAR(x) memset(&(x), 0, sizeof(x))
class VideoDevice :
public QObject
{ Q_OBJECT
public :
VideoDevice(QString dev_name);
//VideoDevice();
int open_device();
int
close_device(); int
init_device(); int
start_capturing(); int
stop_capturing(); int
uninit_device(); int
get_frame( void
**, size_t *);
int
unget_frame(); private :
int
init_mmap(); struct
buffer {
void
* start; size_t
length; };
QString dev_name;
int
fd; buffer* buffers;
unsigned
int n_buffers;
int
index; signals: void
display_error(QString); }; #endif // VIDEODEVICE_H |
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
|
#include "videodevice.h"
VideoDevice::VideoDevice(QString dev_name)
{ this ->dev_name = dev_name;
this ->fd = -1;
this ->buffers = NULL;
this ->n_buffers = 0;
this ->index = -1;
} int
VideoDevice::open_device() { fd = open(dev_name.toStdString().c_str(), O_RDWR /*|O_NONBLOCK*/ , 0);
// fd = open(dev_name.toStdString().c_str(), O_RDWR|O_NONBLOCK, 0);
if (-1 == fd)
{
emit display_error(tr( "open: %1" ).arg(QString( strerror ( errno ))));
return
-1; }
return
0; } int
VideoDevice::close_device() { if (-1 == close(fd))
{
emit display_error(tr( "close: %1" ).arg(QString( strerror ( errno ))));
return
-1; }
return
0; } int
VideoDevice::init_device() { v4l2_capability cap;
v4l2_cropcap cropcap;
v4l2_crop crop;
v4l2_format fmt;
if (-1 == ioctl(fd, VIDIOC_QUERYCAP, &cap))
{
if (EINVAL ==
errno )
{
emit display_error(tr( "%1 is no V4l2 device" ).arg(dev_name));
}
else {
emit display_error(tr( "VIDIOC_QUERYCAP: %1" ).arg(QString( strerror ( errno ))));
}
return
-1; }
if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
{
emit display_error(tr( "%1 is no video capture device" ).arg(dev_name));
return
-1; }
if (!(cap.capabilities & V4L2_CAP_STREAMING))
{
emit display_error(tr( "%1 does not support streaming i/o" ).arg(dev_name));
return
-1; }
CLEAR(cropcap);
cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (0 == ioctl(fd, VIDIOC_CROPCAP, &cropcap))
{
CLEAR(crop);
crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
crop.c = cropcap.defrect;
if (-1 == ioctl(fd, VIDIOC_S_CROP, &crop))
{
if (EINVAL ==
errno )
{
// emit display_error(tr("VIDIOC_S_CROP not supported"));
} else {
emit display_error(tr( "VIDIOC_S_CROP: %1" ).arg(QString( strerror ( errno ))));
return
-1; }
}
}
else {
emit display_error(tr( "VIDIOC_CROPCAP: %1" ).arg(QString( strerror ( errno ))));
return
-1; }
CLEAR(fmt);
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = 640;
fmt.fmt.pix.height = 480;
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
if (-1 == ioctl(fd, VIDIOC_S_FMT, &fmt))
{
emit display_error(tr( "VIDIOC_S_FMT" ).arg(QString( strerror ( errno ))));
return
-1; }
if (-1 == init_mmap())
{
return
-1; }
return
0; } int
VideoDevice::init_mmap() { v4l2_requestbuffers req;
CLEAR(req);
req.count = 4;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
if (-1 == ioctl(fd, VIDIOC_REQBUFS, &req))
{
if (EINVAL ==
errno )
{
emit display_error(tr( "%1 does not support memory mapping" ).arg(dev_name));
return
-1; }
else {
emit display_error(tr( "VIDIOC_REQBUFS %1" ).arg(QString( strerror ( errno ))));
return
-1; }
}
if (req.count < 2)
{
emit display_error(tr( "Insufficient buffer memory on %1" ).arg(dev_name));
return
-1; }
buffers = (buffer*) calloc (req.count,
sizeof (*buffers));
if (!buffers)
{
emit display_error(tr( "out of memory" ));
return
-1; }
for (n_buffers = 0; n_buffers < req.count; ++n_buffers)
{
v4l2_buffer buf;
CLEAR(buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = n_buffers;
if (-1 == ioctl(fd, VIDIOC_QUERYBUF, &buf))
{
emit display_error(tr( "VIDIOC_QUERYBUF: %1" ).arg(QString( strerror ( errno ))));
return
-1; }
buffers[n_buffers].length = buf.length;
buffers[n_buffers].start =
mmap(NULL,
// start anywhere
buf.length, PROT_READ | PROT_WRITE,
MAP_SHARED,
fd, buf.m.offset);
if (MAP_FAILED == buffers[n_buffers].start)
{
emit display_error(tr( "mmap %1" ).arg(QString( strerror ( errno ))));
return
-1; }
}
return
0; } int
VideoDevice::start_capturing() { unsigned
int i;
for (i = 0; i < n_buffers; ++i)
{
v4l2_buffer buf;
CLEAR(buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory =V4L2_MEMORY_MMAP;
buf.index = i;
// fprintf(stderr, "n_buffers: %d\n", i);
if (-1 == ioctl(fd, VIDIOC_QBUF, &buf))
{
emit display_error(tr( "VIDIOC_QBUF: %1" ).arg(QString( strerror ( errno ))));
return
-1; }
}
v4l2_buf_type type;
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (-1 == ioctl(fd, VIDIOC_STREAMON, &type))
{
emit display_error(tr( "VIDIOC_STREAMON: %1" ).arg(QString( strerror ( errno ))));
return
-1; }
return
0; } int
VideoDevice::stop_capturing() { v4l2_buf_type type;
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (-1 == ioctl(fd, VIDIOC_STREAMOFF, &type))
{
emit display_error(tr( "VIDIOC_STREAMOFF: %1" ).arg(QString( strerror ( errno ))));
return
-1; }
return
0; } int
VideoDevice::uninit_device() { unsigned
int i;
for (i = 0; i < n_buffers; ++i)
{
if (-1 == munmap(buffers[i].start, buffers[i].length))
{
emit display_error(tr( "munmap: %1" ).arg(QString( strerror ( errno ))));
return
-1; }
}
free (buffers);
return
0; } int
VideoDevice::get_frame( void
**frame_buf, size_t * len)
{ v4l2_buffer queue_buf;
CLEAR(queue_buf);
queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
queue_buf.memory = V4L2_MEMORY_MMAP;
if (-1 == ioctl(fd, VIDIOC_DQBUF, &queue_buf))
{
switch ( errno )
{
case
EAGAIN: // perror("dqbuf");
return
-1; case
EIO: return
-1 ; default :
emit display_error(tr( "VIDIOC_DQBUF: %1" ).arg(QString( strerror ( errno ))));
return
-1; }
}
*frame_buf = buffers[queue_buf.index].start;
*len = buffers[queue_buf.index].length;
index = queue_buf.index;
return
0; } int
VideoDevice::unget_frame() { if (index != -1)
{
v4l2_buffer queue_buf;
CLEAR(queue_buf);
queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
queue_buf.memory = V4L2_MEMORY_MMAP;
queue_buf.index = index;
if (-1 == ioctl(fd, VIDIOC_QBUF, &queue_buf))
{
emit display_error(tr( "VIDIOC_QBUF: %1" ).arg(QString( strerror ( errno ))));
return
-1; }
return
0; }
return
-1; } |
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
|
#ifndef PROCESSIMAGE_H
#define PROCESSIMAGE_H
#include <QtGui> #include "videodevice.h"
class ProcessImage :
public QWidget
{ Q_OBJECT
public :
ProcessImage(QWidget *parent=0);
~ProcessImage();
private :
QPainter *painter;
QLabel *label;
QImage *frame;
//QPixmap *frame;
QTimer *timer; int
rs; uchar *pp;
uchar * p;
unsigned
int len;
int
convert_yuv_to_rgb_pixel( int
y, int
u, int v);
int
convert_yuv_to_rgb_buffer(unsigned char
*yuv, unsigned char
*rgb, unsigned int
width, unsigned int
height); VideoDevice *vd;
private
slots: void
paintEvent(QPaintEvent *); void
display_error(QString err); }; #endif |
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
|
#include <QtGui>
#include "processImage.h"
#include "videodevice.h"
extern
"C" { #include <stdio.h>
#include <stdlib.h>
} ProcessImage::ProcessImage(QWidget *parent):QWidget(parent)
{ pp = (unsigned
char *) malloc (640 * 480 /*QWidget::width()*QWidget::height()*/ *
3 * sizeof ( char ));
painter =
new QPainter( this );
frame =
new QImage(pp,640,480,QImage::Format_RGB888);
// frame = new QPixmap(640,320);
label = new
QLabel(); vd =
new VideoDevice(tr( "/dev/video0" ));
connect(vd, SIGNAL(display_error(QString)),
this ,SLOT(display_error(QString)));
rs = vd->open_device();
if (-1==rs)
{
QMessageBox::warning( this ,tr( "error" ),tr( "open
/dev/dsp error" ),QMessageBox::Yes); vd->close_device();
}
rs = vd->init_device();
if (-1==rs)
{
QMessageBox::warning( this ,tr( "error" ),tr( "init
failed" ),QMessageBox::Yes); vd->close_device();
}
rs = vd->start_capturing();
if (-1==rs)
{
QMessageBox::warning( this ,tr( "error" ),tr( "start
capture failed" ),QMessageBox::Yes); vd->close_device();
}
if (-1==rs)
{
QMessageBox::warning( this ,tr( "error" ),tr( "get
frame failed" ),QMessageBox::Yes); vd->stop_capturing();
}
timer =
new QTimer( this );
connect(timer,SIGNAL(timeout()), this ,SLOT(update()));
timer->start(30);
QHBoxLayout *hLayout =
new QHBoxLayout();
hLayout->addWidget(label);
setLayout(hLayout);
setWindowTitle(tr( "Capture" ));
} ProcessImage::~ProcessImage()
{ rs = vd->stop_capturing();
rs = vd->uninit_device();
rs = vd->close_device();
} void
ProcessImage::paintEvent(QPaintEvent *) { rs = vd->get_frame(( void
**)&p,&len); convert_yuv_to_rgb_buffer(p,pp,640,480 /*QWidget::width(),QWidget::height()*/ );
frame->loadFromData((uchar *)pp, /*len*/ 640 * 480 * 3 *
sizeof ( char ));
// painter->begin(this);
// painter->drawImage(0,0,*frame);
// painter->end();
// rs = vd->unget_frame();
// frame->load("./img3.jpg"); label->setPixmap(QPixmap::fromImage(*frame,Qt::AutoColor));
// label->show();
rs = vd->unget_frame(); // label->drawFrame();
// QPixmap *pixImage = new QPixmap();
// pixImage->loadFromData((uchar *)pp,sizeof(pp),0,Qt::AutoColor);
// QPainter painter(this);
// painter.begin(this);
// painter.drawPixmap(0,0,QWidget::width(),QWidget::height(),*pixImage);
// painter.end();
} void
ProcessImage::display_error(QString err) { QMessageBox::warning( this ,tr( "error" ),
err,QMessageBox::Yes); } /*yuv格式转换为rgb格式*/
int ProcessImage::convert_yuv_to_rgb_buffer(unsigned
char *yuv, unsigned
char *rgb, unsigned
int width, unsigned
int height)
{ unsigned
int in, out = 0;
unsigned
int pixel_16;
unsigned
char pixel_24[3];
unsigned
int pixel32;
int
y0, u, y1, v; for (in = 0; in < width * height * 2; in += 4) {
pixel_16 =
yuv[in + 3] << 24 |
yuv[in + 2] << 16 |
yuv[in + 1] << 8 |
yuv[in + 0];
y0 = (pixel_16 & 0x000000ff);
u = (pixel_16 & 0x0000ff00) >> 8;
y1 = (pixel_16 & 0x00ff0000) >> 16;
v = (pixel_16 & 0xff000000) >> 24;
pixel32 = convert_yuv_to_rgb_pixel(y0, u, v);
pixel_24[0] = (pixel32 & 0x000000ff);
pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;
rgb[out++] = pixel_24[0];
rgb[out++] = pixel_24[1];
rgb[out++] = pixel_24[2];
pixel32 = convert_yuv_to_rgb_pixel(y1, u, v);
pixel_24[0] = (pixel32 & 0x000000ff);
pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;
rgb[out++] = pixel_24[0];
rgb[out++] = pixel_24[1];
rgb[out++] = pixel_24[2];
}
return
0; } int
ProcessImage::convert_yuv_to_rgb_pixel( int
y, int
u, int v)
{ unsigned
int pixel32 = 0;
unsigned
char *pixel = (unsigned
char *)&pixel32;
int
r, g, b; r = y + (1.370705 * (v-128));
g = y - (0.698001 * (v-128)) - (0.337633 * (u-128));
b = y + (1.732446 * (u-128));
if (r > 255) r = 255;
if (g > 255) g = 255;
if (b > 255) b = 255;
if (r < 0) r = 0;
if (g < 0) g = 0;
if (b < 0) b = 0;
pixel[0] = r * 220 / 256;
pixel[1] = g * 220 / 256;
pixel[2] = b * 220 / 256;
return
pixel32; } /*yuv格式转换为rgb格式*/ |
1
2
3
4
5
6
7
8
9
10
11
12
|
#include <QtGui>
#include "processImage.h"
int main( int
argc, char
*argv[]) { QApplication app(argc,argv);
ProcessImage process;
process.resize(640,480);
process.show();
return
app.exec(); } |
可以复制也可以到我的资源里去找有的我上传了字库的移植,还有这个代码。
先在虚拟机上跑起来一般没问题 记得设备号在虚拟机为/dev/video0 至于虚拟机用摄像头看我别的博文
跑好了再交叉编译记得改设备号为/dev/video2具体的看你自己插拔摄像头在dev里是哪个。
然后有的人直接可以使用了,但是有的一堆问题的我就是
第一个问题::s3c-fimc: invalid target size
把这句话添加进去就没有问题了
fmt.fmt.pix.priv=1;
这句话加在cpp里的有个地方差不多全是这种。
第二个问题就是 :: segmentation fault
我够倒霉的全遇到了 解决花了一周。最后没办法我单步调试的
在in=155644的时候就会出现我看了下大小计算发现问题出在640*480上面,容易内存溢出,我改成320*240就不会超过155644了这样问题就解决了。
当时还有点小激动呢。图片小了你可以用opencv再放大的。
最后开发板图像出来了 这里不拍照随便拿个把
需要的到我资源里下载代码 字库什么的
移植opencv到开发板,摄像头在开发板6410上的采集使用(2)
原文地址:http://blog.csdn.net/chenaini119/article/details/39312645