65 |
64 |
66 QGstreamerCaptureSession::QGstreamerCaptureSession(QGstreamerCaptureSession::CaptureMode captureMode, QObject *parent) |
65 QGstreamerCaptureSession::QGstreamerCaptureSession(QGstreamerCaptureSession::CaptureMode captureMode, QObject *parent) |
67 :QObject(parent), |
66 :QObject(parent), |
68 m_state(StoppedState), |
67 m_state(StoppedState), |
69 m_pendingState(StoppedState), |
68 m_pendingState(StoppedState), |
|
69 m_waitingForEos(false), |
70 m_pipelineMode(EmptyPipeline), |
70 m_pipelineMode(EmptyPipeline), |
71 m_captureMode(captureMode), |
71 m_captureMode(captureMode), |
72 m_audioInputFactory(0), |
72 m_audioInputFactory(0), |
73 m_audioPreviewFactory(0), |
73 m_audioPreviewFactory(0), |
74 m_videoInputFactory(0), |
74 m_videoInputFactory(0), |
92 m_bus = gst_element_get_bus(m_pipeline); |
92 m_bus = gst_element_get_bus(m_pipeline); |
93 m_busHelper = new QGstreamerBusHelper(m_bus, this); |
93 m_busHelper = new QGstreamerBusHelper(m_bus, this); |
94 m_busHelper->installSyncEventFilter(this); |
94 m_busHelper->installSyncEventFilter(this); |
95 connect(m_busHelper, SIGNAL(message(QGstreamerMessage)), SLOT(busMessage(QGstreamerMessage))); |
95 connect(m_busHelper, SIGNAL(message(QGstreamerMessage)), SLOT(busMessage(QGstreamerMessage))); |
96 m_audioEncodeControl = new QGstreamerAudioEncode(this); |
96 m_audioEncodeControl = new QGstreamerAudioEncode(this); |
97 m_videoEncodeControl = new QGstreamerVideoEncode(this); |
97 m_videoEncodeControl = new QGstreamerVideoEncode(this); |
98 m_imageEncodeControl = new QGstreamerImageEncode(this); |
|
99 m_recorderControl = new QGstreamerRecorderControl(this); |
98 m_recorderControl = new QGstreamerRecorderControl(this); |
100 m_mediaContainerControl = new QGstreamerMediaContainerControl(this); |
99 m_mediaContainerControl = new QGstreamerMediaContainerControl(this); |
101 |
100 |
102 setState(StoppedState); |
101 setState(StoppedState); |
103 } |
102 } |
104 |
103 |
105 QGstreamerCaptureSession::~QGstreamerCaptureSession() |
104 QGstreamerCaptureSession::~QGstreamerCaptureSession() |
106 { |
105 { |
107 gst_object_unref(GST_OBJECT(m_pipeline)); |
106 gst_object_unref(GST_OBJECT(m_pipeline)); |
108 } |
107 } |
109 |
|
110 |
108 |
111 GstElement *QGstreamerCaptureSession::buildEncodeBin() |
109 GstElement *QGstreamerCaptureSession::buildEncodeBin() |
112 { |
110 { |
113 bool ok = true; |
111 bool ok = true; |
114 |
112 |
327 } |
325 } |
328 |
326 |
329 return previewElement; |
327 return previewElement; |
330 } |
328 } |
331 |
329 |
332 |
|
333 static gboolean passImageFilter(GstElement *element, |
|
334 GstBuffer *buffer, |
|
335 void *appdata) |
|
336 { |
|
337 Q_UNUSED(element); |
|
338 Q_UNUSED(buffer); |
|
339 |
|
340 QGstreamerCaptureSession *session = (QGstreamerCaptureSession *)appdata; |
|
341 if (session->m_passImage || session->m_passPrerollImage) { |
|
342 session->m_passImage = false; |
|
343 |
|
344 if (session->m_passPrerollImage) { |
|
345 session->m_passPrerollImage = false; |
|
346 return TRUE; |
|
347 } |
|
348 session->m_passPrerollImage = false; |
|
349 |
|
350 QImage img; |
|
351 |
|
352 GstCaps *caps = gst_buffer_get_caps(buffer); |
|
353 if (caps) { |
|
354 GstStructure *structure = gst_caps_get_structure (caps, 0); |
|
355 gint width = 0; |
|
356 gint height = 0; |
|
357 |
|
358 if (structure && |
|
359 gst_structure_get_int(structure, "width", &width) && |
|
360 gst_structure_get_int(structure, "height", &height) && |
|
361 width > 0 && height > 0) { |
|
362 if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-yuv") == 0) { |
|
363 guint32 fourcc = 0; |
|
364 gst_structure_get_fourcc(structure, "format", &fourcc); |
|
365 |
|
366 if (fourcc == GST_MAKE_FOURCC('I','4','2','0')) { |
|
367 img = QImage(width/2, height/2, QImage::Format_RGB32); |
|
368 |
|
369 const uchar *data = (const uchar *)buffer->data; |
|
370 |
|
371 for (int y=0; y<height; y+=2) { |
|
372 const uchar *yLine = data + y*width; |
|
373 const uchar *uLine = data + width*height + y*width/4; |
|
374 const uchar *vLine = data + width*height*5/4 + y*width/4; |
|
375 |
|
376 for (int x=0; x<width; x+=2) { |
|
377 const qreal Y = 1.164*(yLine[x]-16); |
|
378 const int U = uLine[x/2]-128; |
|
379 const int V = vLine[x/2]-128; |
|
380 |
|
381 int b = qBound(0, int(Y + 2.018*U), 255); |
|
382 int g = qBound(0, int(Y - 0.813*V - 0.391*U), 255); |
|
383 int r = qBound(0, int(Y + 1.596*V), 255); |
|
384 |
|
385 img.setPixel(x/2,y/2,qRgb(r,g,b)); |
|
386 } |
|
387 } |
|
388 } |
|
389 |
|
390 } else if (qstrcmp(gst_structure_get_name(structure), "video/x-raw-rgb") == 0) { |
|
391 QImage::Format format = QImage::Format_Invalid; |
|
392 int bpp = 0; |
|
393 gst_structure_get_int(structure, "bpp", &bpp); |
|
394 |
|
395 if (bpp == 24) |
|
396 format = QImage::Format_RGB888; |
|
397 else if (bpp == 32) |
|
398 format = QImage::Format_RGB32; |
|
399 |
|
400 if (format != QImage::Format_Invalid) { |
|
401 img = QImage((const uchar *)buffer->data, |
|
402 width, |
|
403 height, |
|
404 format); |
|
405 img.bits(); //detach |
|
406 } |
|
407 } |
|
408 } |
|
409 gst_caps_unref(caps); |
|
410 } |
|
411 |
|
412 static int signalIndex = session->metaObject()->indexOfSignal("imageCaptured(QString,QImage)"); |
|
413 session->metaObject()->method(signalIndex).invoke(session, |
|
414 Qt::QueuedConnection, |
|
415 Q_ARG(QString,session->m_imageFileName), |
|
416 Q_ARG(QImage,img)); |
|
417 |
|
418 return TRUE; |
|
419 } else { |
|
420 return FALSE; |
|
421 } |
|
422 } |
|
423 |
|
424 static gboolean saveImageFilter(GstElement *element, |
|
425 GstBuffer *buffer, |
|
426 GstPad *pad, |
|
427 void *appdata) |
|
428 { |
|
429 Q_UNUSED(element); |
|
430 Q_UNUSED(pad); |
|
431 QGstreamerCaptureSession *session = (QGstreamerCaptureSession *)appdata; |
|
432 |
|
433 QString fileName = session->m_imageFileName; |
|
434 |
|
435 if (!fileName.isEmpty()) { |
|
436 QFile f(fileName); |
|
437 if (f.open(QFile::WriteOnly)) { |
|
438 f.write((const char *)buffer->data, buffer->size); |
|
439 f.close(); |
|
440 |
|
441 static int signalIndex = session->metaObject()->indexOfSignal("imageSaved(QString)"); |
|
442 session->metaObject()->method(signalIndex).invoke(session, |
|
443 Qt::QueuedConnection, |
|
444 Q_ARG(QString,fileName)); |
|
445 } |
|
446 } |
|
447 |
|
448 return TRUE; |
|
449 } |
|
450 |
|
451 GstElement *QGstreamerCaptureSession::buildImageCapture() |
|
452 { |
|
453 GstElement *bin = gst_bin_new("image-capture-bin"); |
|
454 GstElement *queue = gst_element_factory_make("queue", "queue-image-capture"); |
|
455 GstElement *colorspace = gst_element_factory_make("ffmpegcolorspace", "ffmpegcolorspace-image-capture"); |
|
456 GstElement *encoder = gst_element_factory_make("jpegenc", "image-encoder"); |
|
457 GstElement *sink = gst_element_factory_make("fakesink","sink-image-capture"); |
|
458 |
|
459 GstPad *pad = gst_element_get_static_pad(queue, "src"); |
|
460 Q_ASSERT(pad); |
|
461 gst_pad_add_buffer_probe(pad, G_CALLBACK(passImageFilter), this); |
|
462 |
|
463 g_object_set(G_OBJECT(sink), "signal-handoffs", TRUE, NULL); |
|
464 g_signal_connect(G_OBJECT(sink), "handoff", |
|
465 G_CALLBACK(saveImageFilter), this); |
|
466 |
|
467 gst_bin_add_many(GST_BIN(bin), queue, colorspace, encoder, sink, NULL); |
|
468 gst_element_link_many(queue, colorspace, encoder, sink, NULL); |
|
469 |
|
470 // add ghostpads |
|
471 pad = gst_element_get_static_pad(queue, "sink"); |
|
472 Q_ASSERT(pad); |
|
473 gst_element_add_pad(GST_ELEMENT(bin), gst_ghost_pad_new("imagesink", pad)); |
|
474 gst_object_unref(GST_OBJECT(pad)); |
|
475 |
|
476 m_passImage = false; |
|
477 m_passPrerollImage = true; |
|
478 m_imageFileName = QString(); |
|
479 |
|
480 return bin; |
|
481 } |
|
482 |
|
483 void QGstreamerCaptureSession::captureImage(const QString &fileName) |
|
484 { |
|
485 m_imageFileName = fileName; |
|
486 m_passImage = true; |
|
487 } |
|
488 |
|
489 |
|
490 #define REMOVE_ELEMENT(element) { if (element) {gst_bin_remove(GST_BIN(m_pipeline), element); element = 0;} } |
330 #define REMOVE_ELEMENT(element) { if (element) {gst_bin_remove(GST_BIN(m_pipeline), element); element = 0;} } |
491 |
331 |
492 bool QGstreamerCaptureSession::rebuildGraph(QGstreamerCaptureSession::PipelineMode newMode) |
332 bool QGstreamerCaptureSession::rebuildGraph(QGstreamerCaptureSession::PipelineMode newMode) |
493 { |
333 { |
494 REMOVE_ELEMENT(m_audioSrc); |
334 REMOVE_ELEMENT(m_audioSrc); |
522 } |
361 } |
523 if (m_captureMode & Video) { |
362 if (m_captureMode & Video) { |
524 m_videoSrc = buildVideoSrc(); |
363 m_videoSrc = buildVideoSrc(); |
525 m_videoTee = gst_element_factory_make("tee", "video-preview-tee"); |
364 m_videoTee = gst_element_factory_make("tee", "video-preview-tee"); |
526 m_videoPreviewQueue = gst_element_factory_make("queue", "video-preview-queue"); |
365 m_videoPreviewQueue = gst_element_factory_make("queue", "video-preview-queue"); |
527 m_videoPreview = buildVideoPreview(); |
366 m_videoPreview = buildVideoPreview(); |
528 m_imageCaptureBin = buildImageCapture(); |
367 |
529 |
368 ok &= m_videoSrc && m_videoTee && m_videoPreviewQueue && m_videoPreview; |
530 ok &= m_videoSrc && m_videoTee && m_videoPreviewQueue && m_videoPreview && m_imageCaptureBin; |
|
531 |
369 |
532 if (ok) { |
370 if (ok) { |
533 gst_bin_add_many(GST_BIN(m_pipeline), m_videoSrc, m_videoTee, |
371 gst_bin_add_many(GST_BIN(m_pipeline), m_videoSrc, m_videoTee, |
534 m_videoPreviewQueue, m_videoPreview, |
372 m_videoPreviewQueue, m_videoPreview, NULL); |
535 m_imageCaptureBin, NULL); |
|
536 |
373 |
537 ok &= gst_element_link(m_videoSrc, m_videoTee); |
374 ok &= gst_element_link(m_videoSrc, m_videoTee); |
538 ok &= gst_element_link(m_videoTee, m_videoPreviewQueue); |
375 ok &= gst_element_link(m_videoTee, m_videoPreviewQueue); |
539 ok &= gst_element_link(m_videoPreviewQueue, m_videoPreview); |
376 ok &= gst_element_link(m_videoPreviewQueue, m_videoPreview); |
540 ok &= gst_element_link(m_videoTee, m_imageCaptureBin); |
|
541 } |
377 } |
542 } |
378 } |
543 break; |
379 break; |
544 case RecordingPipeline: |
380 case RecordingPipeline: |
545 m_encodeBin = buildEncodeBin(); |
381 m_encodeBin = buildEncodeBin(); |
712 newMode = EmptyPipeline; |
548 newMode = EmptyPipeline; |
713 break; |
549 break; |
714 } |
550 } |
715 |
551 |
716 if (newMode != m_pipelineMode) { |
552 if (newMode != m_pipelineMode) { |
|
553 if (m_pipelineMode == PreviewAndRecordingPipeline) { |
|
554 if (!m_waitingForEos) { |
|
555 m_waitingForEos = true; |
|
556 //qDebug() << "Waiting for EOS"; |
|
557 //with live sources it's necessary to send EOS even to pipeline |
|
558 //before going to STOPPED state |
|
559 gst_element_send_event(m_pipeline, gst_event_new_eos()); |
|
560 return; |
|
561 } else { |
|
562 m_waitingForEos = false; |
|
563 //qDebug() << "EOS received"; |
|
564 } |
|
565 } |
|
566 |
|
567 //select suitable default codecs/containers, if necessary |
|
568 m_recorderControl->applySettings(); |
|
569 |
717 gst_element_set_state(m_pipeline, GST_STATE_NULL); |
570 gst_element_set_state(m_pipeline, GST_STATE_NULL); |
718 |
571 |
719 //It would be better to do this async. but |
572 //It would be better to do this async. but |
720 //gstreamer doesn't notify about pipeline went to NULL state |
573 //gstreamer doesn't notify about pipeline went to NULL state |
721 waitForStopped(); |
574 waitForStopped(); |