1 /**************************************************************************** |
|
2 ** |
|
3 ** Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies). |
|
4 ** All rights reserved. |
|
5 ** Contact: Nokia Corporation (qt-info@nokia.com) |
|
6 ** |
|
7 ** This file is part of the Qt Mobility Components. |
|
8 ** |
|
9 ** $QT_BEGIN_LICENSE:LGPL$ |
|
10 ** No Commercial Usage |
|
11 ** This file contains pre-release code and may not be distributed. |
|
12 ** You may use this file in accordance with the terms and conditions |
|
13 ** contained in the Technology Preview License Agreement accompanying |
|
14 ** this package. |
|
15 ** |
|
16 ** GNU Lesser General Public License Usage |
|
17 ** Alternatively, this file may be used under the terms of the GNU Lesser |
|
18 ** General Public License version 2.1 as published by the Free Software |
|
19 ** Foundation and appearing in the file LICENSE.LGPL included in the |
|
20 ** packaging of this file. Please review the following information to |
|
21 ** ensure the GNU Lesser General Public License version 2.1 requirements |
|
22 ** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. |
|
23 ** |
|
24 ** In addition, as a special exception, Nokia gives you certain additional |
|
25 ** rights. These rights are described in the Nokia Qt LGPL Exception |
|
26 ** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. |
|
27 ** |
|
28 ** If you have questions regarding the use of this file, please contact |
|
29 ** Nokia at qt-info@nokia.com. |
|
30 ** |
|
31 ** |
|
32 ** |
|
33 ** |
|
34 ** |
|
35 ** |
|
36 ** |
|
37 ** |
|
38 ** $QT_END_LICENSE$ |
|
39 ** |
|
40 ****************************************************************************/ |
|
41 |
|
42 #include <QtCore/qdebug.h> |
|
43 #include <QWidget> |
|
44 #include <QFile> |
|
45 |
|
46 #include <linux/types.h> |
|
47 #include <sys/time.h> |
|
48 #include <sys/ioctl.h> |
|
49 #include <sys/poll.h> |
|
50 #include <unistd.h> |
|
51 #include <fcntl.h> |
|
52 #include <errno.h> |
|
53 #include <string.h> |
|
54 #include <stdlib.h> |
|
55 #include <sys/mman.h> |
|
56 #include <linux/videodev2.h> |
|
57 |
|
58 #include "v4lcamerasession.h" |
|
59 #include "v4lvideorenderer.h" |
|
60 #include "v4lvideobuffer.h" |
|
61 |
|
62 #include <QtMultimedia/qabstractvideobuffer.h> |
|
63 #include <QtMultimedia/qvideosurfaceformat.h> |
|
64 |
|
65 V4LCameraSession::V4LCameraSession(QObject *parent) |
|
66 :QObject(parent) |
|
67 { |
|
68 available = false; |
|
69 resolutions.clear(); |
|
70 formats.clear(); |
|
71 m_state = QMediaRecorder::StoppedState; |
|
72 m_device = "/dev/video1"; |
|
73 preview = false; |
|
74 toFile = false; |
|
75 converter = 0; |
|
76 active = false; |
|
77 |
|
78 sfd = ::open(m_device.constData(), O_RDWR); |
|
79 |
|
80 if (sfd != -1) { |
|
81 available = true; |
|
82 |
|
83 // get formats available |
|
84 v4l2_fmtdesc fmt; |
|
85 memset(&fmt, 0, sizeof(v4l2_fmtdesc)); |
|
86 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
|
87 int sanity = 0; |
|
88 for (fmt.index = 0;; fmt.index++) { |
|
89 if (sanity++ > 8) |
|
90 break; |
|
91 if( ::ioctl(sfd, VIDIOC_ENUM_FMT, &fmt) == -1) { |
|
92 if(errno == EINVAL) |
|
93 break; |
|
94 } |
|
95 formats.append(fmt.pixelformat); |
|
96 } |
|
97 |
|
98 // get sizes available |
|
99 resolutions << QSize(176, 144) << QSize(320, 240) << QSize(640, 480); |
|
100 |
|
101 ::close(sfd); |
|
102 sfd = -1; |
|
103 } |
|
104 m_output = 0; |
|
105 m_surface = 0; |
|
106 m_windowSize = QSize(320,240); |
|
107 pixelF = QVideoFrame::Format_RGB32; |
|
108 savedPixelF = QVideoFrame::Format_RGB32; |
|
109 } |
|
110 |
|
111 V4LCameraSession::~V4LCameraSession() |
|
112 { |
|
113 } |
|
114 |
|
115 void V4LCameraSession::captureImage(const QString &fileName) |
|
116 { |
|
117 m_snapshot = fileName; |
|
118 } |
|
119 |
|
120 void V4LCameraSession::setSurface(QAbstractVideoSurface* surface) |
|
121 { |
|
122 m_surface = surface; |
|
123 } |
|
124 |
|
125 bool V4LCameraSession::deviceReady() |
|
126 { |
|
127 return available; |
|
128 } |
|
129 |
|
130 int V4LCameraSession::framerate() const |
|
131 { |
|
132 return -1; |
|
133 } |
|
134 |
|
135 void V4LCameraSession::setFrameRate(int rate) |
|
136 { |
|
137 Q_UNUSED(rate) |
|
138 } |
|
139 |
|
140 int V4LCameraSession::brightness() const |
|
141 { |
|
142 if(sfd == -1) return -1; |
|
143 |
|
144 struct v4l2_control control_s; |
|
145 int err; |
|
146 control_s.id = V4L2_CID_BRIGHTNESS; |
|
147 if ((err = ::ioctl(sfd, VIDIOC_G_CTRL, &control_s)) < 0) |
|
148 return -1; |
|
149 |
|
150 return control_s.value; |
|
151 } |
|
152 |
|
153 void V4LCameraSession::setBrightness(int b) |
|
154 { |
|
155 if(sfd == -1) return; |
|
156 |
|
157 struct v4l2_control control_s; |
|
158 int err; |
|
159 control_s.id = V4L2_CID_BRIGHTNESS; |
|
160 control_s.value = b; |
|
161 if ((err = ::ioctl(sfd, VIDIOC_S_CTRL, &control_s)) < 0) |
|
162 return; |
|
163 } |
|
164 |
|
165 int V4LCameraSession::contrast() const |
|
166 { |
|
167 if(sfd == -1) return -1; |
|
168 |
|
169 struct v4l2_control control_s; |
|
170 int err; |
|
171 control_s.id = V4L2_CID_CONTRAST; |
|
172 if ((err = ::ioctl(sfd, VIDIOC_G_CTRL, &control_s)) < 0) |
|
173 return -1; |
|
174 |
|
175 return control_s.value; |
|
176 } |
|
177 |
|
178 void V4LCameraSession::setContrast(int c) |
|
179 { |
|
180 if(sfd == -1) return; |
|
181 |
|
182 struct v4l2_control control_s; |
|
183 int err; |
|
184 control_s.id = V4L2_CID_CONTRAST; |
|
185 control_s.value = c; |
|
186 if ((err = ::ioctl(sfd, VIDIOC_S_CTRL, &control_s)) < 0) |
|
187 return; |
|
188 } |
|
189 |
|
190 int V4LCameraSession::saturation() const |
|
191 { |
|
192 if(sfd == -1) return -1; |
|
193 |
|
194 struct v4l2_control control_s; |
|
195 int err; |
|
196 control_s.id = V4L2_CID_SATURATION; |
|
197 if ((err = ::ioctl(sfd, VIDIOC_G_CTRL, &control_s)) < 0) |
|
198 return -1; |
|
199 |
|
200 return control_s.value; |
|
201 } |
|
202 |
|
203 void V4LCameraSession::setSaturation(int s) |
|
204 { |
|
205 if(sfd == -1) return; |
|
206 |
|
207 struct v4l2_control control_s; |
|
208 int err; |
|
209 control_s.id = V4L2_CID_SATURATION; |
|
210 control_s.value = s; |
|
211 if ((err = ::ioctl(sfd, VIDIOC_S_CTRL, &control_s)) < 0) |
|
212 return; |
|
213 } |
|
214 |
|
215 int V4LCameraSession::hue() const |
|
216 { |
|
217 if(sfd == -1) return -1; |
|
218 |
|
219 struct v4l2_control control_s; |
|
220 int err; |
|
221 control_s.id = V4L2_CID_HUE; |
|
222 if ((err = ::ioctl(sfd, VIDIOC_G_CTRL, &control_s)) < 0) |
|
223 return -1; |
|
224 |
|
225 return control_s.value; |
|
226 } |
|
227 |
|
228 void V4LCameraSession::setHue(int h) |
|
229 { |
|
230 if(sfd == -1) return; |
|
231 |
|
232 struct v4l2_control control_s; |
|
233 int err; |
|
234 control_s.id = V4L2_CID_HUE; |
|
235 control_s.value = h; |
|
236 if ((err = ::ioctl(sfd, VIDIOC_S_CTRL, &control_s)) < 0) |
|
237 return; |
|
238 } |
|
239 |
|
240 int V4LCameraSession::sharpness() const |
|
241 { |
|
242 return -1; |
|
243 } |
|
244 |
|
245 void V4LCameraSession::setSharpness(int s) |
|
246 { |
|
247 Q_UNUSED(s) |
|
248 } |
|
249 |
|
250 int V4LCameraSession::zoom() const |
|
251 { |
|
252 return -1; |
|
253 } |
|
254 |
|
255 void V4LCameraSession::setZoom(int z) |
|
256 { |
|
257 Q_UNUSED(z) |
|
258 } |
|
259 |
|
260 bool V4LCameraSession::backlightCompensation() const |
|
261 { |
|
262 return false; |
|
263 } |
|
264 |
|
265 void V4LCameraSession::setBacklightCompensation(bool b) |
|
266 { |
|
267 Q_UNUSED(b) |
|
268 } |
|
269 |
|
270 int V4LCameraSession::whitelevel() const |
|
271 { |
|
272 return -1; |
|
273 } |
|
274 |
|
275 void V4LCameraSession::setWhitelevel(int w) |
|
276 { |
|
277 Q_UNUSED(w) |
|
278 } |
|
279 |
|
280 int V4LCameraSession::rotation() const |
|
281 { |
|
282 return 0; |
|
283 } |
|
284 |
|
285 void V4LCameraSession::setRotation(int r) |
|
286 { |
|
287 Q_UNUSED(r) |
|
288 } |
|
289 |
|
290 bool V4LCameraSession::flash() const |
|
291 { |
|
292 return false; |
|
293 } |
|
294 |
|
295 void V4LCameraSession::setFlash(bool f) |
|
296 { |
|
297 Q_UNUSED(f) |
|
298 } |
|
299 |
|
300 bool V4LCameraSession::autofocus() const |
|
301 { |
|
302 return false; |
|
303 } |
|
304 |
|
305 void V4LCameraSession::setAutofocus(bool f) |
|
306 { |
|
307 Q_UNUSED(f) |
|
308 } |
|
309 |
|
310 QSize V4LCameraSession::frameSize() const |
|
311 { |
|
312 return m_windowSize; |
|
313 } |
|
314 |
|
315 void V4LCameraSession::setFrameSize(const QSize& s) |
|
316 { |
|
317 m_windowSize = s; |
|
318 } |
|
319 |
|
320 void V4LCameraSession::setDevice(const QString &device) |
|
321 { |
|
322 available = false; |
|
323 m_state = QMediaRecorder::StoppedState; |
|
324 m_device = QByteArray(device.toLocal8Bit().constData()); |
|
325 |
|
326 sfd = ::open(m_device.constData(), O_RDWR); |
|
327 |
|
328 if (sfd != -1) { |
|
329 available = true; |
|
330 |
|
331 // get formats available |
|
332 v4l2_fmtdesc fmt; |
|
333 memset(&fmt, 0, sizeof(v4l2_fmtdesc)); |
|
334 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
|
335 int sanity = 0; |
|
336 for (fmt.index = 0;; fmt.index++) { |
|
337 if (sanity++ > 8) |
|
338 break; |
|
339 if( ::ioctl(sfd, VIDIOC_ENUM_FMT, &fmt) == -1) { |
|
340 if(errno == EINVAL) |
|
341 break; |
|
342 } |
|
343 formats.append(fmt.pixelformat); |
|
344 } |
|
345 |
|
346 // get sizes available |
|
347 resolutions << QSize(176, 144) << QSize(320, 240) << QSize(640, 480); |
|
348 |
|
349 ::close(sfd); |
|
350 sfd = -1; |
|
351 } |
|
352 active = false; |
|
353 } |
|
354 |
|
355 QList<QVideoFrame::PixelFormat> V4LCameraSession::supportedPixelFormats() |
|
356 { |
|
357 QList<QVideoFrame::PixelFormat> list; |
|
358 |
|
359 if(available) { |
|
360 for(int i=0;i<formats.size();i++) { |
|
361 if(formats.at(i) == V4L2_PIX_FMT_YUYV) |
|
362 list << QVideoFrame::Format_YUYV; |
|
363 else if(formats.at(i) == V4L2_PIX_FMT_UYVY) |
|
364 list << QVideoFrame::Format_UYVY; |
|
365 else if(formats.at(i) == V4L2_PIX_FMT_RGB24) |
|
366 list << QVideoFrame::Format_RGB24; |
|
367 else if(formats.at(i) == V4L2_PIX_FMT_RGB32) |
|
368 list << QVideoFrame::Format_RGB32; |
|
369 else if(formats.at(i) == V4L2_PIX_FMT_RGB565) |
|
370 list << QVideoFrame::Format_RGB565; |
|
371 } |
|
372 } |
|
373 |
|
374 return list; |
|
375 } |
|
376 |
|
377 QVideoFrame::PixelFormat V4LCameraSession::pixelFormat() const |
|
378 { |
|
379 return pixelF; |
|
380 } |
|
381 |
|
382 void V4LCameraSession::setPixelFormat(QVideoFrame::PixelFormat fmt) |
|
383 { |
|
384 pixelF = fmt; |
|
385 savedPixelF = fmt; |
|
386 } |
|
387 |
|
388 QList<QSize> V4LCameraSession::supportedResolutions() |
|
389 { |
|
390 QList<QSize> list; |
|
391 |
|
392 if(available) { |
|
393 list << resolutions; |
|
394 } |
|
395 |
|
396 return list; |
|
397 } |
|
398 |
|
399 bool V4LCameraSession::setOutputLocation(const QUrl &sink) |
|
400 { |
|
401 m_sink = sink; |
|
402 |
|
403 return true; |
|
404 } |
|
405 |
|
406 QUrl V4LCameraSession::outputLocation() const |
|
407 { |
|
408 return m_sink; |
|
409 } |
|
410 |
|
411 qint64 V4LCameraSession::position() const |
|
412 { |
|
413 return timeStamp.elapsed(); |
|
414 } |
|
415 |
|
416 QMediaRecorder::State V4LCameraSession::state() const |
|
417 { |
|
418 return m_state; |
|
419 } |
|
420 |
|
421 void V4LCameraSession::previewMode(bool value) |
|
422 { |
|
423 preview = value; |
|
424 } |
|
425 |
|
426 void V4LCameraSession::captureToFile(bool value) |
|
427 { |
|
428 if(toFile && m_file.isOpen()) |
|
429 m_file.close(); |
|
430 |
|
431 toFile = value; |
|
432 } |
|
433 |
|
434 bool V4LCameraSession::isFormatSupported(QVideoFrame::PixelFormat pFormat) |
|
435 { |
|
436 if(sfd == -1) |
|
437 return false; |
|
438 |
|
439 int ret; |
|
440 struct v4l2_format fmt; |
|
441 |
|
442 memset(&fmt, 0, sizeof(fmt)); |
|
443 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
|
444 fmt.fmt.pix.width = m_windowSize.width(); |
|
445 fmt.fmt.pix.height = m_windowSize.height(); |
|
446 |
|
447 if(pFormat == QVideoFrame::Format_YUYV) |
|
448 fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; |
|
449 if(pFormat == QVideoFrame::Format_RGB24) |
|
450 fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; |
|
451 if(pFormat == QVideoFrame::Format_RGB32) |
|
452 fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB32; |
|
453 if(pFormat == QVideoFrame::Format_RGB565) |
|
454 fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB565; |
|
455 if(pFormat == QVideoFrame::Format_UYVY) |
|
456 fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY; |
|
457 |
|
458 fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; |
|
459 ret = ::ioctl(sfd, VIDIOC_S_FMT, &fmt); |
|
460 |
|
461 if(ret == -1) |
|
462 return false; |
|
463 |
|
464 return true; |
|
465 } |
|
466 |
|
467 void V4LCameraSession::record() |
|
468 { |
|
469 pixelF = savedPixelF; |
|
470 |
|
471 if(active && toFile && m_state != QMediaRecorder::PausedState) { |
|
472 // Camera is active and captureToFile(true) |
|
473 m_state = QMediaRecorder::RecordingState; |
|
474 emit recordStateChanged(m_state); |
|
475 return; |
|
476 } else if(sfd > 0) |
|
477 return; |
|
478 |
|
479 sfd = ::open(m_device.constData(), O_RDWR); |
|
480 if(sfd == -1) { |
|
481 qWarning()<<"can't open v4l "<<m_device; |
|
482 m_state = QMediaRecorder::StoppedState; |
|
483 emit cameraStateChanged(QCamera::StoppedState); |
|
484 return; |
|
485 } |
|
486 |
|
487 bool match = false; |
|
488 QList<QVideoFrame::PixelFormat> fmts = supportedPixelFormats(); |
|
489 |
|
490 // first try and use what we have |
|
491 if ((fmts.contains(pixelF)) && isFormatSupported(pixelF) && |
|
492 m_surface->isFormatSupported(QVideoSurfaceFormat(m_windowSize,pixelF))) |
|
493 match = true; |
|
494 |
|
495 // try and find a match between camera and surface that doesn't require a converter |
|
496 if(!match) { |
|
497 foreach(QVideoFrame::PixelFormat format, fmts) { |
|
498 if (m_surface->isFormatSupported(QVideoSurfaceFormat(m_windowSize,format))) { |
|
499 // found a match, try to use it! |
|
500 if(isFormatSupported(format)) { |
|
501 qWarning()<<"found a match "<<format; |
|
502 match = true; |
|
503 pixelF = format; |
|
504 break; |
|
505 } |
|
506 } |
|
507 } |
|
508 } |
|
509 |
|
510 // try to see if we can use the converter |
|
511 if(!match) { |
|
512 // no direct match up found, see if we can use the converter |
|
513 if(m_surface->isFormatSupported(QVideoSurfaceFormat(m_windowSize,QVideoFrame::Format_RGB32)) || |
|
514 m_surface->isFormatSupported(QVideoSurfaceFormat(m_windowSize,QVideoFrame::Format_RGB24)) || |
|
515 m_surface->isFormatSupported(QVideoSurfaceFormat(m_windowSize,QVideoFrame::Format_RGB565))) { |
|
516 // converter can convert YUYV->RGB565, UYVY->RGB565 and YUV420P->RGB565 |
|
517 if (pixelF == QVideoFrame::Format_YUYV || pixelF == QVideoFrame::Format_UYVY || |
|
518 pixelF == QVideoFrame::Format_YUV420P) { |
|
519 if(isFormatSupported(pixelF)) { |
|
520 match = true; |
|
521 converter = CameraFormatConverter::createFormatConverter(pixelF,m_windowSize.width(), |
|
522 m_windowSize.height()); |
|
523 qWarning()<<"found a converter match from: "<<pixelF<<" to RGB565"; |
|
524 } |
|
525 } |
|
526 if (!match) { |
|
527 // fallback, cant convert your format so set to one that I can get working! |
|
528 foreach(QVideoFrame::PixelFormat format, fmts) { |
|
529 if(format == QVideoFrame::Format_YUYV || format == QVideoFrame::Format_UYVY || |
|
530 format == QVideoFrame::Format_YUV420P) { |
|
531 if(isFormatSupported(format)) { |
|
532 pixelF = format; |
|
533 match = true; |
|
534 converter = CameraFormatConverter::createFormatConverter(pixelF,m_windowSize.width(), |
|
535 m_windowSize.height()); |
|
536 qWarning()<<"fallback, convert from: "<<pixelF<<" to RGB565"; |
|
537 break; |
|
538 } |
|
539 } |
|
540 } |
|
541 } |
|
542 } |
|
543 } |
|
544 if(!match) { |
|
545 qWarning() << "error setting camera format, no supported formats available"; |
|
546 m_state = QMediaRecorder::StoppedState; |
|
547 emit cameraStateChanged(QCamera::StoppedState); |
|
548 return; |
|
549 } |
|
550 |
|
551 int ret; |
|
552 struct v4l2_requestbuffers req; |
|
553 memset(&req, 0, sizeof(req)); |
|
554 req.count = 4; |
|
555 req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
|
556 req.memory = V4L2_MEMORY_MMAP; |
|
557 ret = ::ioctl(sfd, VIDIOC_REQBUFS, &req); |
|
558 if(ret == -1) { |
|
559 qWarning()<<"error allocating buffers"; |
|
560 m_state = QMediaRecorder::StoppedState; |
|
561 emit cameraStateChanged(QCamera::StoppedState); |
|
562 return; |
|
563 } |
|
564 for(int i=0;i<(int)req.count;++i) { |
|
565 struct v4l2_buffer buf; |
|
566 memset(&buf, 0 , sizeof(buf)); |
|
567 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
|
568 buf.memory = V4L2_MEMORY_MMAP; |
|
569 buf.index = i; |
|
570 ret = ::ioctl(sfd, VIDIOC_QUERYBUF, &buf); |
|
571 if(ret == -1) { |
|
572 qWarning()<<"error allocating buffers"; |
|
573 m_state = QMediaRecorder::StoppedState; |
|
574 emit cameraStateChanged(QCamera::StoppedState); |
|
575 return; |
|
576 } |
|
577 void* mmap_data = ::mmap(0,buf.length,PROT_READ | PROT_WRITE,MAP_SHARED,sfd,buf.m.offset); |
|
578 if (mmap_data == reinterpret_cast<void*>(-1)) { |
|
579 qWarning()<<"can't mmap video data"; |
|
580 ::close(sfd); |
|
581 sfd = -1; |
|
582 m_state = QMediaRecorder::StoppedState; |
|
583 emit cameraStateChanged(QCamera::StoppedState); |
|
584 return; |
|
585 } |
|
586 video_buffer v4l_buf; |
|
587 v4l_buf.start = reinterpret_cast<char*>(mmap_data); |
|
588 v4l_buf.length = buf.length; |
|
589 buffers.append(v4l_buf); |
|
590 } |
|
591 // start stream |
|
592 for(int i=0;i<(int)req.count;++i) { |
|
593 struct v4l2_buffer buf; |
|
594 memset(&buf,0,sizeof(buf)); |
|
595 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
|
596 buf.memory = V4L2_MEMORY_MMAP; |
|
597 buf.index = i; |
|
598 ret = ::ioctl(sfd, VIDIOC_QBUF, &buf); |
|
599 if(ret == -1) { |
|
600 qWarning()<<"can't mmap video data"; |
|
601 ::close(sfd); |
|
602 sfd = -1; |
|
603 m_state = QMediaRecorder::StoppedState; |
|
604 emit cameraStateChanged(QCamera::StoppedState); |
|
605 return; |
|
606 } |
|
607 } |
|
608 v4l2_buf_type buf_type; |
|
609 buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
|
610 ret = ::ioctl(sfd, VIDIOC_STREAMON, &buf_type); |
|
611 if(ret < 0) { |
|
612 qWarning()<<"can't start capture"; |
|
613 ::close(sfd); |
|
614 sfd = -1; |
|
615 m_state = QMediaRecorder::StoppedState; |
|
616 emit cameraStateChanged(QCamera::StoppedState); |
|
617 return; |
|
618 } |
|
619 notifier = new QSocketNotifier(sfd, QSocketNotifier::Read, this); |
|
620 connect(notifier, SIGNAL(activated(int)), this, SLOT(captureFrame())); |
|
621 notifier->setEnabled(1); |
|
622 if (!converter) { |
|
623 QVideoSurfaceFormat requestedFormat(m_windowSize,pixelF); |
|
624 |
|
625 bool check = m_surface->isFormatSupported(requestedFormat); |
|
626 |
|
627 if(check) { |
|
628 m_surface->start(requestedFormat); |
|
629 |
|
630 m_state = QMediaRecorder::RecordingState; |
|
631 emit cameraStateChanged(QCamera::ActiveState); |
|
632 timeStamp.restart(); |
|
633 } |
|
634 } else { |
|
635 QVideoSurfaceFormat requestedFormat(m_windowSize,QVideoFrame::Format_RGB32); |
|
636 m_surface->start(requestedFormat); |
|
637 m_state = QMediaRecorder::RecordingState; |
|
638 emit cameraStateChanged(QCamera::ActiveState); |
|
639 timeStamp.restart(); |
|
640 } |
|
641 active = true; |
|
642 } |
|
643 |
|
644 void V4LCameraSession::pause() |
|
645 { |
|
646 if(sfd != -1) { |
|
647 v4l2_buf_type buf_type; |
|
648 buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
|
649 ::ioctl(sfd, VIDIOC_STREAMOFF, &buf_type); |
|
650 if (notifier) { |
|
651 notifier->setEnabled(false); |
|
652 disconnect(notifier, 0, 0, 0); |
|
653 delete notifier; |
|
654 notifier = 0; |
|
655 } |
|
656 ::close(sfd); |
|
657 sfd = -1; |
|
658 m_state = QMediaRecorder::PausedState; |
|
659 emit recordStateChanged(m_state); |
|
660 } |
|
661 } |
|
662 |
|
663 void V4LCameraSession::stop() |
|
664 { |
|
665 if(sfd != -1) { |
|
666 |
|
667 if(toFile && m_file.isOpen() && m_state != QMediaRecorder::StoppedState) { |
|
668 // just stop writing to file. |
|
669 m_file.close(); |
|
670 |
|
671 m_state = QMediaRecorder::StoppedState; |
|
672 emit recordStateChanged(m_state); |
|
673 return; |
|
674 } |
|
675 |
|
676 v4l2_buf_type buf_type; |
|
677 buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
|
678 ::ioctl(sfd, VIDIOC_STREAMOFF, &buf_type); |
|
679 if (notifier) { |
|
680 notifier->setEnabled(false); |
|
681 disconnect(notifier, 0, 0, 0); |
|
682 delete notifier; |
|
683 notifier = 0; |
|
684 } |
|
685 // Dequeue remaining buffers |
|
686 for(int i = 0;i < 4; ++i) { |
|
687 v4l2_buffer buf; |
|
688 memset(&buf, 0, sizeof(struct v4l2_buffer)); |
|
689 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
|
690 buf.memory = V4L2_MEMORY_MMAP; |
|
691 buf.index = i; |
|
692 if (ioctl(sfd, VIDIOC_QUERYBUF, &buf) == 0) { |
|
693 if (buf.flags & V4L2_BUF_FLAG_QUEUED) { |
|
694 ::ioctl(sfd, VIDIOC_DQBUF, &buf); |
|
695 } |
|
696 } |
|
697 munmap(buffers.at(buf.index).start,buf.length); |
|
698 } |
|
699 |
|
700 ::close(sfd); |
|
701 sfd = -1; |
|
702 m_state = QMediaRecorder::StoppedState; |
|
703 emit cameraStateChanged(QCamera::StoppedState); |
|
704 |
|
705 if(converter) |
|
706 delete converter; |
|
707 converter = 0; |
|
708 active = false; |
|
709 } |
|
710 } |
|
711 |
|
712 void V4LCameraSession::captureFrame() |
|
713 { |
|
714 if(sfd == -1) return; |
|
715 v4l2_buffer buf; |
|
716 memset(&buf, 0, sizeof(struct v4l2_buffer)); |
|
717 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
|
718 buf.memory = V4L2_MEMORY_MMAP; |
|
719 int ret = ioctl(sfd, VIDIOC_DQBUF, &buf); |
|
720 if (ret < 0 ) { |
|
721 qWarning()<<"error reading frame"; |
|
722 return; |
|
723 } |
|
724 //qWarning()<<"size: "<<buf.bytesused<<", time: "<<buf.timestamp.tv_sec; |
|
725 |
|
726 if(m_surface) { |
|
727 if(converter) { |
|
728 QImage image; |
|
729 image = QImage(converter->convert((unsigned char*)buffers.at(buf.index).start,buf.bytesused), |
|
730 m_windowSize.width(),m_windowSize.height(),QImage::Format_RGB16).convertToFormat(QImage::Format_RGB32); |
|
731 if(m_snapshot.length() > 0) { |
|
732 image.save(m_snapshot,"JPG"); |
|
733 m_snapshot.clear(); |
|
734 } |
|
735 if(m_sink.toString().length() > 0 && toFile) { |
|
736 // save to file |
|
737 if(!m_file.isOpen()) { |
|
738 if(m_sink.toLocalFile().length() > 0) |
|
739 m_file.setFileName(m_sink.toLocalFile()); |
|
740 else |
|
741 m_file.setFileName(m_sink.toString()); |
|
742 m_file.open(QIODevice::WriteOnly); |
|
743 } |
|
744 image.save(qobject_cast<QIODevice*>(&m_file),"JPG"); |
|
745 } |
|
746 if(preview) { |
|
747 QVideoFrame frame(image); |
|
748 m_surface->present(frame); |
|
749 } |
|
750 ret = ioctl(sfd, VIDIOC_QBUF, &buf); |
|
751 |
|
752 } else { |
|
753 QVideoSurfaceFormat sfmt = m_surface->surfaceFormat(); |
|
754 if(sfmt.pixelFormat() == QVideoFrame::Format_RGB565) { |
|
755 QImage image; |
|
756 image = QImage((unsigned char*)buffers.at(buf.index).start, |
|
757 m_windowSize.width(), m_windowSize.height(), QImage::Format_RGB16); |
|
758 QVideoFrame frame(image); |
|
759 m_surface->present(frame); |
|
760 ret = ioctl(sfd, VIDIOC_QBUF, &buf); |
|
761 |
|
762 } else { |
|
763 V4LVideoBuffer* packet = new V4LVideoBuffer((unsigned char*)buffers.at(buf.index).start, sfd, buf); |
|
764 packet->setBytesPerLine(m_windowSize.width()*4); |
|
765 QVideoFrame frame(packet,m_windowSize,pixelF); |
|
766 frame.setStartTime(buf.timestamp.tv_sec); |
|
767 m_surface->present(frame); |
|
768 } |
|
769 } |
|
770 |
|
771 } else |
|
772 ret = ioctl(sfd, VIDIOC_QBUF, &buf); |
|
773 } |
|