1 /**************************************************************************** |
|
2 ** |
|
3 ** Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies). |
|
4 ** All rights reserved. |
|
5 ** Contact: Nokia Corporation (qt-info@nokia.com) |
|
6 ** |
|
7 ** This file is part of the Qt Mobility Components. |
|
8 ** |
|
9 ** $QT_BEGIN_LICENSE:LGPL$ |
|
10 ** No Commercial Usage |
|
11 ** This file contains pre-release code and may not be distributed. |
|
12 ** You may use this file in accordance with the terms and conditions |
|
13 ** contained in the Technology Preview License Agreement accompanying |
|
14 ** this package. |
|
15 ** |
|
16 ** GNU Lesser General Public License Usage |
|
17 ** Alternatively, this file may be used under the terms of the GNU Lesser |
|
18 ** General Public License version 2.1 as published by the Free Software |
|
19 ** Foundation and appearing in the file LICENSE.LGPL included in the |
|
20 ** packaging of this file. Please review the following information to |
|
21 ** ensure the GNU Lesser General Public License version 2.1 requirements |
|
22 ** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. |
|
23 ** |
|
24 ** In addition, as a special exception, Nokia gives you certain additional |
|
25 ** rights. These rights are described in the Nokia Qt LGPL Exception |
|
26 ** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. |
|
27 ** |
|
28 ** If you have questions regarding the use of this file, please contact |
|
29 ** Nokia at qt-info@nokia.com. |
|
30 ** |
|
31 ** |
|
32 ** |
|
33 ** |
|
34 ** |
|
35 ** |
|
36 ** |
|
37 ** |
|
38 ** $QT_END_LICENSE$ |
|
39 ** |
|
40 ****************************************************************************/ |
|
41 |
|
42 #include <QtCore/qdebug.h> |
|
43 #include <QWidget> |
|
44 #include <QFile> |
|
45 |
|
46 #include "dscamerasession.h" |
|
47 #include "dsvideorenderer.h" |
|
48 |
|
49 #include <QtMultimedia/qabstractvideobuffer.h> |
|
50 #include <QtMultimedia/qvideosurfaceformat.h> |
|
51 |
|
52 #include <uuids.h> |
|
53 DEFINE_GUID(MEDIASUBTYPE_I420, |
|
54 0x30323449,0x0000,0x0010,0x80,0x00,0x00,0xAA,0x00,0x38,0x9B,0x71); |
|
55 |
|
56 #define SAFE_RELEASE(x) { if(x) x->Release(); x = NULL; } |
|
57 |
|
58 |
|
59 class SampleGrabberCallbackPrivate : public ISampleGrabberCB |
|
60 { |
|
61 public: |
|
62 STDMETHODIMP_(ULONG) AddRef() { return 1; } |
|
63 STDMETHODIMP_(ULONG) Release() { return 2; } |
|
64 |
|
65 STDMETHODIMP QueryInterface(REFIID riid, void **ppvObject) |
|
66 { |
|
67 if(NULL==ppvObject) return E_POINTER; |
|
68 if(riid==__uuidof(IUnknown)) { |
|
69 *ppvObject = static_cast<IUnknown*>(this); |
|
70 return S_OK; |
|
71 } |
|
72 if(riid==__uuidof(ISampleGrabberCB)) { |
|
73 *ppvObject = static_cast<ISampleGrabberCB*>(this); |
|
74 return S_OK; |
|
75 } |
|
76 return E_NOTIMPL; |
|
77 } |
|
78 |
|
79 STDMETHODIMP SampleCB(double Time, IMediaSample *pSample) |
|
80 { |
|
81 return E_NOTIMPL; |
|
82 } |
|
83 |
|
84 STDMETHODIMP BufferCB(double Time, BYTE *pBuffer, long BufferLen) |
|
85 { |
|
86 if(!cs || active) |
|
87 return S_OK; |
|
88 |
|
89 if((cs->StillMediaType.majortype != MEDIATYPE_Video) || |
|
90 (cs->StillMediaType.formattype != FORMAT_VideoInfo) || |
|
91 (cs->StillMediaType.cbFormat < sizeof(VIDEOINFOHEADER))) |
|
92 return VFW_E_INVALIDMEDIATYPE; |
|
93 |
|
94 active = true; |
|
95 |
|
96 if(toggle == true) toggle = false; |
|
97 else toggle = true; |
|
98 |
|
99 if(toggle) { |
|
100 active = false; |
|
101 return S_OK; |
|
102 } |
|
103 |
|
104 bool check = false; |
|
105 cs->mutex.lock(); |
|
106 if(cs->frames.size() > 5) |
|
107 check = true; |
|
108 cs->mutex.unlock(); |
|
109 if(check) |
|
110 return S_OK; |
|
111 |
|
112 |
|
113 unsigned char* vidData = new unsigned char[BufferLen]; |
|
114 memcpy(vidData,pBuffer,BufferLen); |
|
115 |
|
116 cs->mutex.lock(); |
|
117 |
|
118 video_buffer buf; |
|
119 buf.buffer = vidData; |
|
120 buf.length = BufferLen; |
|
121 buf.time = (qint64)Time; |
|
122 |
|
123 cs->frames.append(&buf); |
|
124 |
|
125 //qWarning()<<"create frame "<<buf.buffer<<", len="<<buf.length<<" buffered="<<cs->frames.size(); |
|
126 |
|
127 cs->mutex.unlock(); |
|
128 |
|
129 QMetaObject::invokeMethod(cs, "captureFrame", Qt::QueuedConnection); |
|
130 |
|
131 active = false; |
|
132 |
|
133 return S_OK; |
|
134 } |
|
135 |
|
136 DSCameraSession* cs; |
|
137 bool active; |
|
138 bool toggle; |
|
139 }; |
|
140 |
|
141 |
|
142 DSCameraSession::DSCameraSession(QObject *parent) |
|
143 :QObject(parent) |
|
144 { |
|
145 opened = false; |
|
146 available = false; |
|
147 resolutions.clear(); |
|
148 m_state = QCamera::StoppedState; |
|
149 m_device = "default"; |
|
150 |
|
151 StillCapCB = new SampleGrabberCallbackPrivate; |
|
152 StillCapCB->cs = this; |
|
153 StillCapCB->active = false; |
|
154 StillCapCB->toggle = false; |
|
155 |
|
156 m_output = 0; |
|
157 m_surface = 0; |
|
158 m_windowSize = QSize(320,240); |
|
159 pixelF = QVideoFrame::Format_RGB24; |
|
160 actualFormat = QVideoSurfaceFormat(m_windowSize,pixelF); |
|
161 |
|
162 graph = false; |
|
163 } |
|
164 |
|
165 DSCameraSession::~DSCameraSession() |
|
166 { |
|
167 if(opened) |
|
168 closeStream(); |
|
169 |
|
170 CoUninitialize(); |
|
171 |
|
172 SAFE_RELEASE(pCap); |
|
173 SAFE_RELEASE(pSG_Filter); |
|
174 SAFE_RELEASE(pGraph); |
|
175 SAFE_RELEASE(pBuild); |
|
176 |
|
177 if(StillCapCB) |
|
178 delete StillCapCB; |
|
179 } |
|
180 |
|
181 void DSCameraSession::captureImage(const QString &fileName) |
|
182 { |
|
183 m_snapshot = fileName; |
|
184 } |
|
185 |
|
186 void DSCameraSession::setSurface(QAbstractVideoSurface* surface) |
|
187 { |
|
188 m_surface = surface; |
|
189 } |
|
190 |
|
191 bool DSCameraSession::deviceReady() |
|
192 { |
|
193 return available; |
|
194 } |
|
195 |
|
196 int DSCameraSession::framerate() const |
|
197 { |
|
198 return -1; |
|
199 } |
|
200 |
|
201 void DSCameraSession::setFrameRate(int rate) |
|
202 { |
|
203 Q_UNUSED(rate) |
|
204 } |
|
205 |
|
206 int DSCameraSession::brightness() const |
|
207 { |
|
208 return -1; |
|
209 } |
|
210 |
|
211 void DSCameraSession::setBrightness(int b) |
|
212 { |
|
213 Q_UNUSED(b) |
|
214 } |
|
215 |
|
216 int DSCameraSession::contrast() const |
|
217 { |
|
218 return -1; |
|
219 } |
|
220 |
|
221 void DSCameraSession::setContrast(int c) |
|
222 { |
|
223 Q_UNUSED(c) |
|
224 } |
|
225 |
|
226 int DSCameraSession::saturation() const |
|
227 { |
|
228 return -1; |
|
229 } |
|
230 |
|
231 void DSCameraSession::setSaturation(int s) |
|
232 { |
|
233 Q_UNUSED(s) |
|
234 } |
|
235 |
|
236 int DSCameraSession::hue() const |
|
237 { |
|
238 return -1; |
|
239 } |
|
240 |
|
241 void DSCameraSession::setHue(int h) |
|
242 { |
|
243 Q_UNUSED(h) |
|
244 } |
|
245 |
|
246 int DSCameraSession::sharpness() const |
|
247 { |
|
248 return -1; |
|
249 } |
|
250 |
|
251 void DSCameraSession::setSharpness(int s) |
|
252 { |
|
253 Q_UNUSED(s) |
|
254 } |
|
255 |
|
256 int DSCameraSession::zoom() const |
|
257 { |
|
258 return -1; |
|
259 } |
|
260 |
|
261 void DSCameraSession::setZoom(int z) |
|
262 { |
|
263 Q_UNUSED(z) |
|
264 } |
|
265 |
|
266 bool DSCameraSession::backlightCompensation() const |
|
267 { |
|
268 return false; |
|
269 } |
|
270 |
|
271 void DSCameraSession::setBacklightCompensation(bool b) |
|
272 { |
|
273 Q_UNUSED(b) |
|
274 } |
|
275 |
|
276 int DSCameraSession::whitelevel() const |
|
277 { |
|
278 return -1; |
|
279 } |
|
280 |
|
281 void DSCameraSession::setWhitelevel(int w) |
|
282 { |
|
283 Q_UNUSED(w) |
|
284 } |
|
285 |
|
286 int DSCameraSession::rotation() const |
|
287 { |
|
288 return 0; |
|
289 } |
|
290 |
|
291 void DSCameraSession::setRotation(int r) |
|
292 { |
|
293 Q_UNUSED(r) |
|
294 } |
|
295 |
|
296 bool DSCameraSession::flash() const |
|
297 { |
|
298 return false; |
|
299 } |
|
300 |
|
301 void DSCameraSession::setFlash(bool f) |
|
302 { |
|
303 Q_UNUSED(f) |
|
304 } |
|
305 |
|
306 bool DSCameraSession::autofocus() const |
|
307 { |
|
308 return false; |
|
309 } |
|
310 |
|
311 void DSCameraSession::setAutofocus(bool f) |
|
312 { |
|
313 Q_UNUSED(f) |
|
314 } |
|
315 |
|
316 QSize DSCameraSession::frameSize() const |
|
317 { |
|
318 return m_windowSize; |
|
319 } |
|
320 |
|
321 void DSCameraSession::setFrameSize(const QSize& s) |
|
322 { |
|
323 if(supportedResolutions(pixelF).contains(s)) |
|
324 m_windowSize = s; |
|
325 else { |
|
326 qWarning()<<"frame size if not supported for current pixel format, no change"; |
|
327 } |
|
328 } |
|
329 |
|
330 void DSCameraSession::setDevice(const QString &device) |
|
331 { |
|
332 if(opened) |
|
333 stopStream(); |
|
334 |
|
335 if(graph) { |
|
336 SAFE_RELEASE(pCap); |
|
337 SAFE_RELEASE(pSG_Filter); |
|
338 SAFE_RELEASE(pGraph); |
|
339 SAFE_RELEASE(pBuild); |
|
340 } |
|
341 |
|
342 //qWarning()<<"setDevice: "<<device; |
|
343 |
|
344 available = false; |
|
345 m_state = QCamera::StoppedState; |
|
346 |
|
347 CoInitialize(NULL); |
|
348 |
|
349 ICreateDevEnum* pDevEnum = NULL; |
|
350 IEnumMoniker* pEnum = NULL; |
|
351 |
|
352 // Create the System device enumerator |
|
353 HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, |
|
354 CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, |
|
355 reinterpret_cast<void**>(&pDevEnum)); |
|
356 if(SUCCEEDED(hr)) { |
|
357 // Create the enumerator for the video capture category |
|
358 hr = pDevEnum->CreateClassEnumerator( |
|
359 CLSID_VideoInputDeviceCategory, &pEnum, 0); |
|
360 pEnum->Reset(); |
|
361 // go through and find all video capture devices |
|
362 IMoniker* pMoniker = NULL; |
|
363 while(pEnum->Next(1, &pMoniker, NULL) == S_OK) { |
|
364 IPropertyBag *pPropBag; |
|
365 hr = pMoniker->BindToStorage(0,0,IID_IPropertyBag, |
|
366 (void**)(&pPropBag)); |
|
367 if(FAILED(hr)) { |
|
368 pMoniker->Release(); |
|
369 continue; // skip this one |
|
370 } |
|
371 // Find the description |
|
372 WCHAR str[120]; |
|
373 VARIANT varName; |
|
374 varName.vt = VT_BSTR; |
|
375 hr = pPropBag->Read(L"Description", &varName, 0); |
|
376 if(FAILED(hr)) |
|
377 hr = pPropBag->Read(L"FriendlyName", &varName, 0); |
|
378 if(SUCCEEDED(hr)) { |
|
379 hr = StringCchCopyW(str,sizeof(str)/sizeof(str[0]), varName.bstrVal); |
|
380 QString temp(QString::fromUtf16((unsigned short*)str)); |
|
381 if(temp.contains(device)) { |
|
382 //qWarning()<<"device valid use it: "<<device; |
|
383 available = true; |
|
384 } |
|
385 } |
|
386 pPropBag->Release(); |
|
387 pMoniker->Release(); |
|
388 } |
|
389 } |
|
390 CoUninitialize(); |
|
391 |
|
392 if(available) { |
|
393 m_device = QByteArray(device.toLocal8Bit().constData()); |
|
394 graph = createFilterGraph(); |
|
395 if(!graph) |
|
396 available = false; |
|
397 } |
|
398 } |
|
399 |
|
400 QList<QVideoFrame::PixelFormat> DSCameraSession::supportedPixelFormats() |
|
401 { |
|
402 return types; |
|
403 } |
|
404 |
|
405 QVideoFrame::PixelFormat DSCameraSession::pixelFormat() const |
|
406 { |
|
407 return pixelF; |
|
408 } |
|
409 |
|
410 void DSCameraSession::setPixelFormat(QVideoFrame::PixelFormat fmt) |
|
411 { |
|
412 pixelF = fmt; |
|
413 } |
|
414 |
|
415 QList<QSize> DSCameraSession::supportedResolutions(QVideoFrame::PixelFormat format) |
|
416 { |
|
417 if(!resolutions.contains(format)) return QList<QSize>(); |
|
418 return resolutions.value(format); |
|
419 } |
|
420 |
|
421 bool DSCameraSession::setOutputLocation(const QUrl &sink) |
|
422 { |
|
423 m_sink = sink; |
|
424 |
|
425 return true; |
|
426 } |
|
427 |
|
428 QUrl DSCameraSession::outputLocation() const |
|
429 { |
|
430 return m_sink; |
|
431 } |
|
432 |
|
433 qint64 DSCameraSession::position() const |
|
434 { |
|
435 return timeStamp.elapsed(); |
|
436 } |
|
437 |
|
438 int DSCameraSession::state() const |
|
439 { |
|
440 return int(m_state); |
|
441 } |
|
442 |
|
443 void DSCameraSession::record() |
|
444 { |
|
445 if(opened) return; |
|
446 |
|
447 if(m_surface) { |
|
448 bool match = false; |
|
449 |
|
450 if(!m_surface->isFormatSupported(actualFormat)) { |
|
451 QList<QVideoFrame::PixelFormat> fmts; |
|
452 foreach(QVideoFrame::PixelFormat f, types) { |
|
453 if(fmts.contains(f)) { |
|
454 match = true; |
|
455 pixelF = f; |
|
456 actualFormat = QVideoSurfaceFormat(m_windowSize,pixelF); |
|
457 //qWarning()<<"try to use format: "<<pixelF; |
|
458 break; |
|
459 } |
|
460 } |
|
461 } |
|
462 if(!m_surface->isFormatSupported(actualFormat) && !match) { |
|
463 // fallback |
|
464 if(types.contains(QVideoFrame::Format_RGB24)) { |
|
465 // get RGB24 from camera and convert to RGB32 for surface! |
|
466 pixelF = QVideoFrame::Format_RGB32; |
|
467 actualFormat = QVideoSurfaceFormat(m_windowSize,pixelF); |
|
468 //qWarning()<<"get RGB24 from camera and convert to RGB32 for surface!"; |
|
469 } |
|
470 } |
|
471 |
|
472 if(m_surface->isFormatSupported(actualFormat)) { |
|
473 m_surface->start(actualFormat); |
|
474 m_state = QCamera::ActiveState; |
|
475 emit stateChanged(QCamera::ActiveState); |
|
476 } else { |
|
477 qWarning()<<"surface doesn't support camera format, cant start"; |
|
478 m_state = QCamera::StoppedState; |
|
479 emit stateChanged(QCamera::StoppedState); |
|
480 return; |
|
481 } |
|
482 } else { |
|
483 qWarning()<<"no video surface, cant start"; |
|
484 m_state = QCamera::StoppedState; |
|
485 emit stateChanged(QCamera::StoppedState); |
|
486 return; |
|
487 } |
|
488 |
|
489 opened = startStream(); |
|
490 |
|
491 if(!opened) { |
|
492 m_state = QCamera::StoppedState; |
|
493 emit stateChanged(QCamera::StoppedState); |
|
494 } |
|
495 } |
|
496 |
|
497 void DSCameraSession::pause() |
|
498 { |
|
499 suspendStream(); |
|
500 } |
|
501 |
|
502 void DSCameraSession::stop() |
|
503 { |
|
504 if(!opened) return; |
|
505 |
|
506 stopStream(); |
|
507 opened = false; |
|
508 m_state = QCamera::StoppedState; |
|
509 emit stateChanged(QCamera::StoppedState); |
|
510 } |
|
511 |
|
512 void DSCameraSession::captureFrame() |
|
513 { |
|
514 if(m_surface && frames.count() > 0) { |
|
515 |
|
516 QImage image; |
|
517 |
|
518 if(pixelF == QVideoFrame::Format_RGB24) { |
|
519 |
|
520 mutex.lock(); |
|
521 |
|
522 image = QImage(frames.at(0)->buffer,m_windowSize.width(),m_windowSize.height(), |
|
523 QImage::Format_RGB888).rgbSwapped().mirrored(true); |
|
524 |
|
525 QVideoFrame frame(image); |
|
526 frame.setStartTime(frames.at(0)->time); |
|
527 |
|
528 mutex.unlock(); |
|
529 |
|
530 m_surface->present(frame); |
|
531 |
|
532 } else if(pixelF == QVideoFrame::Format_RGB32) { |
|
533 |
|
534 mutex.lock(); |
|
535 |
|
536 image = QImage(frames.at(0)->buffer,m_windowSize.width(),m_windowSize.height(), |
|
537 QImage::Format_RGB888).rgbSwapped().mirrored(true); |
|
538 |
|
539 QVideoFrame frame(image.convertToFormat(QImage::Format_RGB32)); |
|
540 frame.setStartTime(frames.at(0)->time); |
|
541 |
|
542 mutex.unlock(); |
|
543 |
|
544 m_surface->present(frame); |
|
545 |
|
546 } else { |
|
547 qWarning()<<"TODO:captureFrame() format ="<<pixelF; |
|
548 } |
|
549 |
|
550 if(m_snapshot.length() > 0) { |
|
551 emit imageCaptured(m_snapshot,image); |
|
552 image.save(m_snapshot,"JPG"); |
|
553 m_snapshot.clear(); |
|
554 } |
|
555 |
|
556 frames.removeFirst(); |
|
557 } |
|
558 } |
|
559 |
|
560 HRESULT DSCameraSession::GetPin(IBaseFilter *pFilter,PIN_DIRECTION PinDir, IPin **ppPin) |
|
561 { |
|
562 *ppPin = 0; |
|
563 IEnumPins *pEnum = 0; |
|
564 IPin *pPin = 0; |
|
565 HRESULT hr = pFilter->EnumPins(&pEnum); |
|
566 if(FAILED(hr)) |
|
567 return hr; |
|
568 pEnum->Reset(); |
|
569 while(pEnum->Next(1,&pPin,NULL) == S_OK) { |
|
570 PIN_DIRECTION ThisPinDir; |
|
571 pPin->QueryDirection(&ThisPinDir); |
|
572 if(ThisPinDir == PinDir) { |
|
573 pEnum->Release(); |
|
574 *ppPin = pPin; |
|
575 return S_OK; |
|
576 } |
|
577 pEnum->Release(); |
|
578 } |
|
579 pEnum->Release(); |
|
580 return E_FAIL; |
|
581 } |
|
582 |
|
583 bool DSCameraSession::createFilterGraph() |
|
584 { |
|
585 HRESULT hr; |
|
586 IMoniker* pMoniker = NULL; |
|
587 ICreateDevEnum* pDevEnum = NULL; |
|
588 IEnumMoniker* pEnum = NULL; |
|
589 AM_MEDIA_TYPE am_media_type; |
|
590 |
|
591 CoInitialize(NULL); |
|
592 |
|
593 // Create the filter graph |
|
594 hr = CoCreateInstance(CLSID_FilterGraph,NULL,CLSCTX_INPROC, |
|
595 IID_IGraphBuilder, (void**)&pGraph); |
|
596 if(FAILED(hr)) { |
|
597 qWarning()<<"failed to create filter graph"; |
|
598 return false; |
|
599 } |
|
600 // Create the capture graph builder |
|
601 hr = CoCreateInstance(CLSID_CaptureGraphBuilder2,NULL,CLSCTX_INPROC, |
|
602 IID_ICaptureGraphBuilder2, (void**)&pBuild); |
|
603 if(FAILED(hr)) { |
|
604 qWarning()<<"failed to create graph builder"; |
|
605 return false; |
|
606 } |
|
607 // Attach the filter graph to the capture graph |
|
608 hr = pBuild->SetFiltergraph(pGraph); |
|
609 if(FAILED(hr)) { |
|
610 qWarning()<<"failed to connect capture graph and filter graph"; |
|
611 return false; |
|
612 } |
|
613 // Find the Capture device |
|
614 hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, |
|
615 CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, |
|
616 reinterpret_cast<void**>(&pDevEnum)); |
|
617 if(SUCCEEDED(hr)) { |
|
618 // Create an enumerator for the video capture category |
|
619 hr = pDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnum, 0); |
|
620 pDevEnum->Release(); |
|
621 pEnum->Reset(); |
|
622 //go through and find all video capture devices |
|
623 while(pEnum->Next(1, &pMoniker, NULL) == S_OK) { |
|
624 IPropertyBag *pPropBag; |
|
625 hr = pMoniker->BindToStorage(0,0,IID_IPropertyBag,(void**)(&pPropBag)); |
|
626 if(FAILED(hr)) { |
|
627 pMoniker->Release(); |
|
628 continue; // skip this one |
|
629 } |
|
630 // Find the description |
|
631 WCHAR str[120]; |
|
632 VARIANT varName; |
|
633 varName.vt = VT_BSTR; |
|
634 hr = pPropBag->Read(L"FriendlyName", &varName, 0); |
|
635 if(SUCCEEDED(hr)) { |
|
636 // check if it is the selected device |
|
637 hr = StringCchCopyW(str,sizeof(str)/sizeof(str[0]), varName.bstrVal); |
|
638 QString output = QString::fromUtf16((unsigned short*)str); |
|
639 if(m_device.contains(output.toLocal8Bit().constData())) { |
|
640 hr = pMoniker->BindToObject(0,0,IID_IBaseFilter,(void**)&pCap); |
|
641 if(SUCCEEDED(hr)) { |
|
642 pPropBag->Release(); |
|
643 pMoniker->Release(); |
|
644 break; |
|
645 } |
|
646 } |
|
647 } |
|
648 pPropBag->Release(); |
|
649 pMoniker->Release(); |
|
650 } |
|
651 } |
|
652 pEnum->Release(); |
|
653 |
|
654 // Get capture device settings |
|
655 hr = pBuild->FindInterface(&PIN_CATEGORY_CAPTURE,&MEDIATYPE_Video,pCap, |
|
656 IID_IAMStreamConfig,(void**)&pConfig); |
|
657 if(FAILED(hr)) { |
|
658 qWarning()<<"failed to get config on capture device"; |
|
659 return false; |
|
660 } |
|
661 |
|
662 updateProperties(); |
|
663 |
|
664 // Sample grabber filter |
|
665 hr = CoCreateInstance(CLSID_SampleGrabber,NULL,CLSCTX_INPROC,IID_IBaseFilter,(void**)&pSG_Filter); |
|
666 if(FAILED(hr)) { |
|
667 qWarning()<<"failed to create sample grabber"; |
|
668 return false; |
|
669 } |
|
670 pSG_Filter->QueryInterface(IID_ISampleGrabber,(void**)&pSG); |
|
671 if(FAILED(hr)) { |
|
672 qWarning()<<"failed to get sample grabber"; |
|
673 return false; |
|
674 } |
|
675 pSG->SetOneShot(FALSE); |
|
676 pSG->SetBufferSamples(TRUE); |
|
677 pSG->SetCallback(StillCapCB, 1); |
|
678 |
|
679 // Get stream control |
|
680 hr = pGraph->QueryInterface(IID_IMediaControl,(void**)&pControl); |
|
681 if(FAILED(hr)) { |
|
682 qWarning()<<"failed to get stream control"; |
|
683 return false; |
|
684 } |
|
685 hr = pCap->QueryInterface(IID_IAMVideoControl,(void**)&pVideoControl); |
|
686 if(FAILED(hr)) { |
|
687 qWarning()<<"failed to get video control handle"; |
|
688 return false; |
|
689 } |
|
690 hr = pCap->QueryInterface(IID_IAMVideoProcAmp,(void**)&pProcAmp); |
|
691 if(FAILED(hr)) { |
|
692 qWarning()<<"failed to get video settings handle"; |
|
693 return false; |
|
694 } |
|
695 CoUninitialize(); |
|
696 |
|
697 return true; |
|
698 } |
|
699 |
|
700 void DSCameraSession::updateProperties() |
|
701 { |
|
702 HRESULT hr; |
|
703 AM_MEDIA_TYPE *pmt = NULL; |
|
704 VIDEOINFOHEADER *pvi = NULL; |
|
705 VIDEO_STREAM_CONFIG_CAPS scc; |
|
706 int iCount, iSize; |
|
707 hr = pConfig->GetNumberOfCapabilities(&iCount,&iSize); |
|
708 if(FAILED(hr)) { |
|
709 qWarning()<<"failed to get capabilities"; |
|
710 return; |
|
711 } |
|
712 |
|
713 QList<QSize> sizes; |
|
714 QVideoFrame::PixelFormat f; |
|
715 |
|
716 types.clear(); |
|
717 resolutions.clear(); |
|
718 |
|
719 for(int i=0;i<iCount;i++) { |
|
720 hr = pConfig->GetStreamCaps(i,&pmt,reinterpret_cast<BYTE*>(&scc)); |
|
721 if(hr == S_OK) { |
|
722 pvi = (VIDEOINFOHEADER*)pmt->pbFormat; |
|
723 if((pmt->majortype == MEDIATYPE_Video) && |
|
724 (pmt->formattype == FORMAT_VideoInfo)) { |
|
725 // Add types |
|
726 if(pmt->subtype == MEDIASUBTYPE_RGB24) { |
|
727 if(!types.contains(QVideoFrame::Format_RGB24)) { |
|
728 types.append(QVideoFrame::Format_RGB24); |
|
729 f = QVideoFrame::Format_RGB24; |
|
730 //qWarning()<<"camera supports RGB24"; |
|
731 } |
|
732 } else if(pmt->subtype == MEDIASUBTYPE_RGB32) { |
|
733 if(!types.contains(QVideoFrame::Format_RGB32)) { |
|
734 types.append(QVideoFrame::Format_RGB32); |
|
735 f = QVideoFrame::Format_RGB32; |
|
736 //qWarning()<<"camera supports RGB32"; |
|
737 } |
|
738 } else if(pmt->subtype == MEDIASUBTYPE_YUY2) { |
|
739 if(!types.contains(QVideoFrame::Format_YUYV)) { |
|
740 types.append(QVideoFrame::Format_YUYV); |
|
741 f = QVideoFrame::Format_YUYV; |
|
742 //qWarning()<<"camera supports YUY2"; |
|
743 } |
|
744 } else if(pmt->subtype == MEDIASUBTYPE_MJPG) { |
|
745 //qWarning("MJPG format not supported"); |
|
746 |
|
747 } else if(pmt->subtype == MEDIASUBTYPE_I420) { |
|
748 if(!types.contains(QVideoFrame::Format_YUV420P)) { |
|
749 types.append(QVideoFrame::Format_YUV420P); |
|
750 f = QVideoFrame::Format_YUV420P; |
|
751 //qWarning()<<"camera supports YUV420P"; |
|
752 } |
|
753 } else if(pmt->subtype == MEDIASUBTYPE_RGB555) { |
|
754 if(!types.contains(QVideoFrame::Format_RGB555)) { |
|
755 types.append(QVideoFrame::Format_RGB555); |
|
756 f = QVideoFrame::Format_RGB555; |
|
757 //qWarning()<<"camera supports RGB555"; |
|
758 } |
|
759 } else if(pmt->subtype == MEDIASUBTYPE_YVU9) { |
|
760 //qWarning("YVU9 format not supported"); |
|
761 |
|
762 } else if(pmt->subtype == MEDIASUBTYPE_UYVY) { |
|
763 if(!types.contains(QVideoFrame::Format_UYVY)) { |
|
764 types.append(QVideoFrame::Format_UYVY); |
|
765 f = QVideoFrame::Format_UYVY; |
|
766 //qWarning()<<"camera supports UYVY"; |
|
767 } |
|
768 } else { |
|
769 qWarning()<<"UNKNOWN FORMAT: "<<pmt->subtype.Data1; |
|
770 } |
|
771 // Add resolutions |
|
772 QSize res(pvi->bmiHeader.biWidth,pvi->bmiHeader.biHeight); |
|
773 if(!resolutions.contains(f)) { |
|
774 sizes.clear(); |
|
775 resolutions.insert(f,sizes); |
|
776 } |
|
777 resolutions[f].append(res); |
|
778 } |
|
779 } |
|
780 } |
|
781 } |
|
782 |
|
783 bool DSCameraSession::setProperties() |
|
784 { |
|
785 CoInitialize(NULL); |
|
786 |
|
787 HRESULT hr; |
|
788 AM_MEDIA_TYPE am_media_type; |
|
789 AM_MEDIA_TYPE *pmt = NULL; |
|
790 VIDEOINFOHEADER *pvi = NULL; |
|
791 VIDEO_STREAM_CONFIG_CAPS scc; |
|
792 int iCount, iSize; |
|
793 hr = pConfig->GetNumberOfCapabilities(&iCount,&iSize); |
|
794 if(FAILED(hr)) { |
|
795 qWarning()<<"failed to get capabilities"; |
|
796 return false; |
|
797 } |
|
798 for(int i=0;i<iCount;i++) { |
|
799 hr = pConfig->GetStreamCaps(i,&pmt,reinterpret_cast<BYTE*>(&scc)); |
|
800 if(hr == S_OK) { |
|
801 pvi = (VIDEOINFOHEADER*)pmt->pbFormat; |
|
802 |
|
803 if((pmt->majortype == MEDIATYPE_Video) && |
|
804 (pmt->formattype == FORMAT_VideoInfo)) { |
|
805 if((actualFormat.pixelFormat() == QVideoFrame::Format_RGB24) || |
|
806 (actualFormat.pixelFormat() == QVideoFrame::Format_RGB32) || |
|
807 (actualFormat.pixelFormat() == QVideoFrame::Format_YUYV)) { |
|
808 if((actualFormat.frameWidth() == pvi->bmiHeader.biWidth) && |
|
809 (actualFormat.frameHeight() == pvi->bmiHeader.biHeight)) { |
|
810 if(actualFormat.pixelFormat() == QVideoFrame::Format_RGB24) |
|
811 pmt->subtype = MEDIASUBTYPE_RGB24; |
|
812 else if(actualFormat.pixelFormat() == QVideoFrame::Format_YUYV) |
|
813 pmt->subtype = MEDIASUBTYPE_YUY2; |
|
814 else if(actualFormat.pixelFormat() == QVideoFrame::Format_RGB32) |
|
815 pmt->subtype = MEDIASUBTYPE_RGB24; |
|
816 else if(actualFormat.pixelFormat() == QVideoFrame::Format_YUV420P) |
|
817 pmt->subtype = MEDIASUBTYPE_I420; |
|
818 else if(actualFormat.pixelFormat() == QVideoFrame::Format_RGB555) |
|
819 pmt->subtype = MEDIASUBTYPE_RGB555; |
|
820 else if(actualFormat.pixelFormat() == QVideoFrame::Format_UYVY) |
|
821 pmt->subtype = MEDIASUBTYPE_UYVY; |
|
822 else { |
|
823 qWarning()<<"unknown format?"; |
|
824 return false; |
|
825 } |
|
826 hr = pConfig->SetFormat(pmt); |
|
827 if(FAILED(hr)) { |
|
828 qWarning()<<"failed to set format"; |
|
829 return false; |
|
830 } else |
|
831 break; |
|
832 } |
|
833 } |
|
834 } |
|
835 } |
|
836 } |
|
837 // Set Sample Grabber config to match capture |
|
838 ZeroMemory(&am_media_type, sizeof(am_media_type)); |
|
839 am_media_type.majortype = MEDIATYPE_Video; |
|
840 |
|
841 if(actualFormat.pixelFormat() == QVideoFrame::Format_RGB32) |
|
842 am_media_type.subtype = MEDIASUBTYPE_RGB24; |
|
843 else if(actualFormat.pixelFormat() == QVideoFrame::Format_RGB24) |
|
844 am_media_type.subtype = MEDIASUBTYPE_RGB24; |
|
845 else if(actualFormat.pixelFormat() == QVideoFrame::Format_YUYV) |
|
846 am_media_type.subtype = MEDIASUBTYPE_YUY2; |
|
847 else if(actualFormat.pixelFormat() == QVideoFrame::Format_YUV420P) |
|
848 am_media_type.subtype = MEDIASUBTYPE_I420; |
|
849 else if(actualFormat.pixelFormat() == QVideoFrame::Format_RGB555) |
|
850 am_media_type.subtype = MEDIASUBTYPE_RGB555; |
|
851 else if(actualFormat.pixelFormat() == QVideoFrame::Format_UYVY) |
|
852 am_media_type.subtype = MEDIASUBTYPE_UYVY; |
|
853 else { |
|
854 qWarning()<<"unknown format? for SG"; |
|
855 return false; |
|
856 } |
|
857 |
|
858 am_media_type.formattype = FORMAT_VideoInfo; |
|
859 hr = pSG->SetMediaType(&am_media_type); |
|
860 if(FAILED(hr)) { |
|
861 qWarning()<<"failed to set video format on grabber"; |
|
862 return false; |
|
863 } |
|
864 pSG->GetConnectedMediaType(&StillMediaType); |
|
865 |
|
866 CoUninitialize(); |
|
867 |
|
868 return true; |
|
869 } |
|
870 |
|
871 bool DSCameraSession::openStream() |
|
872 { |
|
873 //Opens the stream for reading and allocates any neccesary resources needed |
|
874 //Return true if success, false otherwise |
|
875 |
|
876 if(opened) return true; |
|
877 |
|
878 if(!graph) |
|
879 graph = createFilterGraph(); |
|
880 |
|
881 if(!graph) |
|
882 return false; |
|
883 |
|
884 CoInitialize(NULL); |
|
885 |
|
886 HRESULT hr; |
|
887 |
|
888 hr = pGraph->AddFilter(pCap, L"Capture Filter"); |
|
889 if(FAILED(hr)) { |
|
890 qWarning()<<"failed to create capture filter"; |
|
891 return false; |
|
892 } |
|
893 hr = pGraph->AddFilter(pSG_Filter, L"Sample Grabber"); |
|
894 if(FAILED(hr)) { |
|
895 qWarning()<<"failed to add sample grabber"; |
|
896 return false; |
|
897 } |
|
898 hr = pBuild->RenderStream(&PIN_CATEGORY_CAPTURE,&MEDIATYPE_Video, |
|
899 pCap,NULL,pSG_Filter); |
|
900 if(FAILED(hr)) { |
|
901 qWarning()<<"failed to renderstream"; |
|
902 return false; |
|
903 } |
|
904 pSG->GetConnectedMediaType(&StillMediaType); |
|
905 pSG_Filter->Release(); |
|
906 |
|
907 CoUninitialize(); |
|
908 |
|
909 return true; |
|
910 } |
|
911 |
|
912 void DSCameraSession::closeStream() |
|
913 { |
|
914 // Closes the stream and internally frees any resources used |
|
915 opened = false; |
|
916 |
|
917 HRESULT hr; |
|
918 IPin *pPin = 0; |
|
919 hr = GetPin(pCap,PINDIR_OUTPUT,&pPin); |
|
920 if(FAILED(hr)) { |
|
921 qWarning()<<"failed to disconnect capture filter"; |
|
922 return; |
|
923 } |
|
924 pGraph->Disconnect(pPin); |
|
925 if(FAILED(hr)) { |
|
926 qWarning()<<"failed to disconnect grabber filter"; |
|
927 return; |
|
928 } |
|
929 hr = GetPin(pSG_Filter,PINDIR_INPUT,&pPin); |
|
930 pGraph->Disconnect(pPin); |
|
931 pGraph->RemoveFilter(pSG_Filter); |
|
932 pGraph->RemoveFilter(pCap); |
|
933 |
|
934 SAFE_RELEASE(pCap); |
|
935 SAFE_RELEASE(pSG_Filter); |
|
936 SAFE_RELEASE(pGraph); |
|
937 SAFE_RELEASE(pBuild); |
|
938 |
|
939 graph = false; |
|
940 } |
|
941 |
|
942 bool DSCameraSession::startStream() |
|
943 { |
|
944 // Starts the stream, by emitting either QVideoPackets |
|
945 // or QvideoFrames, depending on Format chosen |
|
946 |
|
947 if(!setProperties()) { |
|
948 qWarning()<<"try to create!!!!"; |
|
949 closeStream(); |
|
950 if(openStream()) |
|
951 return false; |
|
952 } |
|
953 |
|
954 if(!opened) { |
|
955 opened = openStream(); |
|
956 } |
|
957 if(!opened) { |
|
958 qWarning()<<"failed to openStream()"; |
|
959 return false; |
|
960 } |
|
961 |
|
962 HRESULT hr; |
|
963 |
|
964 hr = pGraph->QueryInterface(IID_IMediaControl,(void**)&pControl); |
|
965 if(FAILED(hr)) { |
|
966 qWarning()<<"failed to get stream control"; |
|
967 return false; |
|
968 } |
|
969 |
|
970 hr = pControl->Run(); |
|
971 if(FAILED(hr)) { |
|
972 qWarning()<<"failed to start"; |
|
973 return false; |
|
974 } else { |
|
975 pControl->Release(); |
|
976 } |
|
977 active = true; |
|
978 return true; |
|
979 } |
|
980 |
|
981 void DSCameraSession::stopStream() |
|
982 { |
|
983 // Stops the stream from emitting packets |
|
984 |
|
985 HRESULT hr; |
|
986 |
|
987 hr = pGraph->QueryInterface(IID_IMediaControl,(void**)&pControl); |
|
988 if(FAILED(hr)) { |
|
989 qWarning()<<"failed to get stream control"; |
|
990 return; |
|
991 } |
|
992 |
|
993 hr = pControl->Stop(); |
|
994 if(FAILED(hr)) { |
|
995 qWarning()<<"failed to stop"; |
|
996 return; |
|
997 } else { |
|
998 pControl->Release(); |
|
999 } |
|
1000 active = false; |
|
1001 |
|
1002 if(opened) |
|
1003 closeStream(); |
|
1004 } |
|
1005 |
|
1006 void DSCameraSession::suspendStream() |
|
1007 { |
|
1008 // Pauses the stream |
|
1009 HRESULT hr; |
|
1010 |
|
1011 hr = pGraph->QueryInterface(IID_IMediaControl,(void**)&pControl); |
|
1012 if(FAILED(hr)) { |
|
1013 qWarning()<<"failed to get stream control"; |
|
1014 return; |
|
1015 } |
|
1016 |
|
1017 hr = pControl->Pause(); |
|
1018 if(FAILED(hr)) { |
|
1019 qWarning()<<"failed to pause"; |
|
1020 return; |
|
1021 } else { |
|
1022 pControl->Release(); |
|
1023 } |
|
1024 active = false; |
|
1025 } |
|
1026 |
|
1027 void DSCameraSession::resumeStream() |
|
1028 { |
|
1029 // Resumes a paused stream |
|
1030 startStream(); |
|
1031 } |
|
1032 |
|
1033 |
|
1034 |
|
1035 |
|