|
1 /**************************************************************************** |
|
2 ** |
|
3 ** Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies). |
|
4 ** All rights reserved. |
|
5 ** Contact: Nokia Corporation (qt-info@nokia.com) |
|
6 ** |
|
7 ** This file is part of the Qt Mobility Components. |
|
8 ** |
|
9 ** $QT_BEGIN_LICENSE:LGPL$ |
|
10 ** No Commercial Usage |
|
11 ** This file contains pre-release code and may not be distributed. |
|
12 ** You may use this file in accordance with the terms and conditions |
|
13 ** contained in the Technology Preview License Agreement accompanying |
|
14 ** this package. |
|
15 ** |
|
16 ** GNU Lesser General Public License Usage |
|
17 ** Alternatively, this file may be used under the terms of the GNU Lesser |
|
18 ** General Public License version 2.1 as published by the Free Software |
|
19 ** Foundation and appearing in the file LICENSE.LGPL included in the |
|
20 ** packaging of this file. Please review the following information to |
|
21 ** ensure the GNU Lesser General Public License version 2.1 requirements |
|
22 ** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. |
|
23 ** |
|
24 ** In addition, as a special exception, Nokia gives you certain additional |
|
25 ** rights. These rights are described in the Nokia Qt LGPL Exception |
|
26 ** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. |
|
27 ** |
|
28 ** If you have questions regarding the use of this file, please contact |
|
29 ** Nokia at qt-info@nokia.com. |
|
30 ** |
|
31 ** |
|
32 ** |
|
33 ** |
|
34 ** |
|
35 ** |
|
36 ** |
|
37 ** |
|
38 ** $QT_END_LICENSE$ |
|
39 ** |
|
40 ****************************************************************************/ |
|
41 |
|
42 #include "qgstreamercapturesession.h" |
|
43 #include "qgstreamerrecordercontrol.h" |
|
44 #include "qgstreamermediacontainercontrol.h" |
|
45 #include "qgstreameraudioencode.h" |
|
46 #include "qgstreamervideoencode.h" |
|
47 #include "qgstreamerbushelper.h" |
|
48 #include <qmediarecorder.h> |
|
49 |
|
50 #include <gst/gsttagsetter.h> |
|
51 #include <gst/gstversion.h> |
|
52 |
|
53 #include <QtCore/qdebug.h> |
|
54 #include <QtCore/qurl.h> |
|
55 #include <QtCore/qset.h> |
|
56 #include <QCoreApplication> |
|
57 #include <QtCore/qmetaobject.h> |
|
58 #include <QtCore/qfile.h> |
|
59 |
|
60 #include <QtGui/qimage.h> |
|
61 |
|
62 #define gstRef(element) { gst_object_ref(GST_OBJECT(element)); gst_object_sink(GST_OBJECT(element)); } |
|
63 #define gstUnref(element) { if (element) { gst_object_unref(GST_OBJECT(element)); element = 0; } } |
|
64 |
|
65 QGstreamerCaptureSession::QGstreamerCaptureSession(QGstreamerCaptureSession::CaptureMode captureMode, QObject *parent) |
|
66 :QObject(parent), |
|
67 m_state(StoppedState), |
|
68 m_pendingState(StoppedState), |
|
69 m_waitingForEos(false), |
|
70 m_pipelineMode(EmptyPipeline), |
|
71 m_captureMode(captureMode), |
|
72 m_audioInputFactory(0), |
|
73 m_audioPreviewFactory(0), |
|
74 m_videoInputFactory(0), |
|
75 m_videoPreviewFactory(0), |
|
76 m_audioSrc(0), |
|
77 m_audioTee(0), |
|
78 m_audioPreviewQueue(0), |
|
79 m_audioPreview(0), |
|
80 m_videoSrc(0), |
|
81 m_videoTee(0), |
|
82 m_videoPreviewQueue(0), |
|
83 m_videoPreview(0), |
|
84 m_imageCaptureBin(0), |
|
85 m_encodeBin(0), |
|
86 m_passImage(false), |
|
87 m_passPrerollImage(false) |
|
88 { |
|
89 m_pipeline = gst_pipeline_new("media-capture-pipeline"); |
|
90 gstRef(m_pipeline); |
|
91 |
|
92 m_bus = gst_element_get_bus(m_pipeline); |
|
93 m_busHelper = new QGstreamerBusHelper(m_bus, this); |
|
94 m_busHelper->installSyncEventFilter(this); |
|
95 connect(m_busHelper, SIGNAL(message(QGstreamerMessage)), SLOT(busMessage(QGstreamerMessage))); |
|
96 m_audioEncodeControl = new QGstreamerAudioEncode(this); |
|
97 m_videoEncodeControl = new QGstreamerVideoEncode(this); |
|
98 m_recorderControl = new QGstreamerRecorderControl(this); |
|
99 m_mediaContainerControl = new QGstreamerMediaContainerControl(this); |
|
100 |
|
101 setState(StoppedState); |
|
102 } |
|
103 |
|
104 QGstreamerCaptureSession::~QGstreamerCaptureSession() |
|
105 { |
|
106 setState(StoppedState); |
|
107 gst_element_set_state(m_pipeline, GST_STATE_NULL); |
|
108 gst_object_unref(GST_OBJECT(m_pipeline)); |
|
109 } |
|
110 |
|
111 GstElement *QGstreamerCaptureSession::buildEncodeBin() |
|
112 { |
|
113 bool ok = true; |
|
114 |
|
115 GstElement *encodeBin = gst_bin_new("encode-bin"); |
|
116 |
|
117 GstElement *muxer = gst_element_factory_make( m_mediaContainerControl->formatElementName().constData(), "muxer"); |
|
118 if (!muxer) { |
|
119 gst_object_unref(encodeBin); |
|
120 encodeBin = 0; |
|
121 return 0; |
|
122 } |
|
123 |
|
124 GstElement *fileSink = gst_element_factory_make("filesink", "filesink"); |
|
125 |
|
126 g_object_set(G_OBJECT(fileSink), "location", m_sink.toString().toLocal8Bit().constData(), NULL); |
|
127 |
|
128 gst_bin_add_many(GST_BIN(encodeBin), muxer, fileSink, NULL); |
|
129 ok &= gst_element_link(muxer, fileSink); |
|
130 |
|
131 if (m_captureMode & Audio) { |
|
132 GstElement *audioConvert = gst_element_factory_make("audioconvert", "audioconvert"); |
|
133 GstElement *audioQueue = gst_element_factory_make("queue", "audio-encode-queue"); |
|
134 GstElement *volume = gst_element_factory_make("volume", "volume"); |
|
135 GstElement *audioEncoder = m_audioEncodeControl->createEncoder(); |
|
136 |
|
137 ok &= audioEncoder != 0; |
|
138 |
|
139 gst_bin_add_many(GST_BIN(encodeBin), audioConvert, audioQueue, volume, audioEncoder, NULL); |
|
140 |
|
141 ok &= gst_element_link_many(audioConvert, audioQueue, volume, audioEncoder, muxer, NULL); |
|
142 //g_object_set(G_OBJECT(volume), "volume", 10.0, NULL); |
|
143 |
|
144 // add ghostpads |
|
145 GstPad *pad = gst_element_get_static_pad(audioConvert, "sink"); |
|
146 Q_ASSERT(pad); |
|
147 gst_element_add_pad(GST_ELEMENT(encodeBin), gst_ghost_pad_new("audiosink", pad)); |
|
148 gst_object_unref(GST_OBJECT(pad)); |
|
149 } |
|
150 |
|
151 if (m_captureMode & Video) { |
|
152 GstElement *videoQueue = gst_element_factory_make("queue", "video-encode-queue"); |
|
153 GstElement *colorspace = gst_element_factory_make("ffmpegcolorspace", "ffmpegcolorspace-encoder"); |
|
154 GstElement *videoscale = gst_element_factory_make("videoscale","videoscale-encoder"); |
|
155 |
|
156 GstElement *videoEncoder = m_videoEncodeControl->createEncoder(); |
|
157 |
|
158 ok &= videoEncoder != 0; |
|
159 |
|
160 gst_bin_add_many(GST_BIN(encodeBin), videoQueue, colorspace, videoscale, videoEncoder, NULL); |
|
161 ok &= gst_element_link_many(videoQueue, colorspace, videoscale, videoEncoder, muxer, NULL); |
|
162 |
|
163 // add ghostpads |
|
164 GstPad *pad = gst_element_get_static_pad(videoQueue, "sink"); |
|
165 Q_ASSERT(pad); |
|
166 gst_element_add_pad(GST_ELEMENT(encodeBin), gst_ghost_pad_new("videosink", pad)); |
|
167 gst_object_unref(GST_OBJECT(pad)); |
|
168 } |
|
169 |
|
170 if (!ok) { |
|
171 gst_object_unref(encodeBin); |
|
172 encodeBin = 0; |
|
173 } |
|
174 |
|
175 return encodeBin; |
|
176 } |
|
177 |
|
178 GstElement *QGstreamerCaptureSession::buildAudioSrc() |
|
179 { |
|
180 GstElement *audioSrc = 0; |
|
181 if (m_audioInputFactory) |
|
182 audioSrc = m_audioInputFactory->buildElement(); |
|
183 else { |
|
184 |
|
185 #if defined(Q_WS_MAEMO_5) || defined(Q_WS_MAEMO_6) |
|
186 audioSrc = gst_element_factory_make("pulsesrc", "audio_src"); |
|
187 #elif defined(QT_QWS_N810) |
|
188 audioSrc = gst_element_factory_make("dsppcmsrc", "audio_src"); |
|
189 #else |
|
190 QString elementName = "alsasrc"; |
|
191 QString device; |
|
192 |
|
193 if (m_captureDevice.startsWith("alsa:")) { |
|
194 device = m_captureDevice.mid(QString("alsa:").length()); |
|
195 } else if (m_captureDevice.startsWith("oss:")) { |
|
196 elementName = "osssrc"; |
|
197 device = m_captureDevice.mid(QString("oss:").length()); |
|
198 } else if (m_captureDevice.startsWith("pulseaudio:")) { |
|
199 elementName = "pulsesrc"; |
|
200 } else { |
|
201 elementName = "autoaudiosrc"; |
|
202 } |
|
203 |
|
204 audioSrc = gst_element_factory_make(elementName.toAscii().constData(), "audio_src"); |
|
205 if (audioSrc && !device.isEmpty()) |
|
206 g_object_set(G_OBJECT(audioSrc), "device", device.toLocal8Bit().constData(), NULL); |
|
207 #endif |
|
208 } |
|
209 |
|
210 if (!audioSrc) { |
|
211 emit error(int(QMediaRecorder::ResourceError), tr("Could not create an audio source element")); |
|
212 audioSrc = gst_element_factory_make("fakesrc", NULL); |
|
213 } |
|
214 |
|
215 return audioSrc; |
|
216 } |
|
217 |
|
218 GstElement *QGstreamerCaptureSession::buildAudioPreview() |
|
219 { |
|
220 GstElement *previewElement = 0; |
|
221 |
|
222 if (m_audioPreviewFactory) { |
|
223 previewElement = m_audioPreviewFactory->buildElement(); |
|
224 } else { |
|
225 |
|
226 |
|
227 #if 1 |
|
228 previewElement = gst_element_factory_make("fakesink", "audio-preview"); |
|
229 #else |
|
230 GstElement *bin = gst_bin_new("audio-preview-bin"); |
|
231 GstElement *visual = gst_element_factory_make("libvisual_lv_scope", "audio-preview"); |
|
232 GstElement *sink = gst_element_factory_make("ximagesink", NULL); |
|
233 gst_bin_add_many(GST_BIN(bin), visual, sink, NULL); |
|
234 gst_element_link_many(visual,sink, NULL); |
|
235 |
|
236 |
|
237 // add ghostpads |
|
238 GstPad *pad = gst_element_get_static_pad(visual, "sink"); |
|
239 Q_ASSERT(pad); |
|
240 gst_element_add_pad(GST_ELEMENT(bin), gst_ghost_pad_new("audiosink", pad)); |
|
241 gst_object_unref(GST_OBJECT(pad)); |
|
242 |
|
243 previewElement = bin; |
|
244 #endif |
|
245 } |
|
246 |
|
247 return previewElement; |
|
248 } |
|
249 |
|
250 GstElement *QGstreamerCaptureSession::buildVideoSrc() |
|
251 { |
|
252 GstElement *videoSrc = 0; |
|
253 if (m_videoInputFactory) { |
|
254 videoSrc = m_videoInputFactory->buildElement(); |
|
255 } else { |
|
256 videoSrc = gst_element_factory_make("videotestsrc", "video_test_src"); |
|
257 //videoSrc = gst_element_factory_make("v4l2src", "video_test_src"); |
|
258 } |
|
259 |
|
260 return videoSrc; |
|
261 } |
|
262 |
|
263 GstElement *QGstreamerCaptureSession::buildVideoPreview() |
|
264 { |
|
265 GstElement *previewElement = 0; |
|
266 |
|
267 if (m_videoPreviewFactory) { |
|
268 GstElement *bin = gst_bin_new("video-preview-bin"); |
|
269 GstElement *colorspace = gst_element_factory_make("ffmpegcolorspace", "ffmpegcolorspace-preview"); |
|
270 GstElement *capsFilter = gst_element_factory_make("capsfilter", "capsfilter-video-preview"); |
|
271 GstElement *preview = m_videoPreviewFactory->buildElement(); |
|
272 |
|
273 gst_bin_add_many(GST_BIN(bin), colorspace, capsFilter, preview, NULL); |
|
274 gst_element_link(colorspace,capsFilter); |
|
275 gst_element_link(capsFilter,preview); |
|
276 |
|
277 QVideoEncoderSettings videoSettings = m_videoEncodeControl->videoSettings(); |
|
278 if (!videoSettings.resolution().isEmpty() || videoSettings.frameRate() > 0.001) { |
|
279 QSize resolution = videoSettings.resolution(); |
|
280 qreal frameRate = videoSettings.frameRate(); |
|
281 |
|
282 GstCaps *caps = gst_caps_new_empty(); |
|
283 QStringList structureTypes; |
|
284 structureTypes << "video/x-raw-yuv" << "video/x-raw-rgb"; |
|
285 |
|
286 foreach(const QString &structureType, structureTypes) { |
|
287 GstStructure *structure = gst_structure_new(structureType.toAscii().constData(), NULL); |
|
288 |
|
289 if (!resolution.isEmpty()) { |
|
290 gst_structure_set(structure, "width", G_TYPE_INT, resolution.width(), NULL); |
|
291 gst_structure_set(structure, "height", G_TYPE_INT, resolution.height(), NULL); |
|
292 } |
|
293 |
|
294 if (frameRate > 0.001) { |
|
295 QPair<int,int> rate = m_videoEncodeControl->rateAsRational(); |
|
296 |
|
297 //qDebug() << "frame rate:" << num << denum; |
|
298 |
|
299 gst_structure_set(structure, "framerate", GST_TYPE_FRACTION, rate.first, rate.second, NULL); |
|
300 } |
|
301 |
|
302 gst_caps_append_structure(caps,structure); |
|
303 } |
|
304 |
|
305 //qDebug() << "set video preview caps filter:" << gst_caps_to_string(caps); |
|
306 |
|
307 g_object_set(G_OBJECT(capsFilter), "caps", caps, NULL); |
|
308 |
|
309 } |
|
310 |
|
311 // add ghostpads |
|
312 GstPad *pad = gst_element_get_static_pad(colorspace, "sink"); |
|
313 Q_ASSERT(pad); |
|
314 gst_element_add_pad(GST_ELEMENT(bin), gst_ghost_pad_new("videosink", pad)); |
|
315 gst_object_unref(GST_OBJECT(pad)); |
|
316 |
|
317 previewElement = bin; |
|
318 } else { |
|
319 #if 1 |
|
320 previewElement = gst_element_factory_make("fakesink", "video-preview"); |
|
321 #else |
|
322 GstElement *bin = gst_bin_new("video-preview-bin"); |
|
323 GstElement *colorspace = gst_element_factory_make("ffmpegcolorspace", "ffmpegcolorspace-preview"); |
|
324 GstElement *preview = gst_element_factory_make("ximagesink", "video-preview"); |
|
325 gst_bin_add_many(GST_BIN(bin), colorspace, preview, NULL); |
|
326 gst_element_link(colorspace,preview); |
|
327 |
|
328 // add ghostpads |
|
329 GstPad *pad = gst_element_get_static_pad(colorspace, "sink"); |
|
330 Q_ASSERT(pad); |
|
331 gst_element_add_pad(GST_ELEMENT(bin), gst_ghost_pad_new("videosink", pad)); |
|
332 gst_object_unref(GST_OBJECT(pad)); |
|
333 |
|
334 previewElement = bin; |
|
335 #endif |
|
336 } |
|
337 |
|
338 return previewElement; |
|
339 } |
|
340 |
|
341 #define REMOVE_ELEMENT(element) { if (element) {gst_bin_remove(GST_BIN(m_pipeline), element); element = 0;} } |
|
342 |
|
343 bool QGstreamerCaptureSession::rebuildGraph(QGstreamerCaptureSession::PipelineMode newMode) |
|
344 { |
|
345 REMOVE_ELEMENT(m_audioSrc); |
|
346 REMOVE_ELEMENT(m_audioPreview); |
|
347 REMOVE_ELEMENT(m_audioPreviewQueue); |
|
348 REMOVE_ELEMENT(m_audioTee); |
|
349 REMOVE_ELEMENT(m_videoSrc); |
|
350 REMOVE_ELEMENT(m_videoPreview); |
|
351 REMOVE_ELEMENT(m_videoPreviewQueue); |
|
352 REMOVE_ELEMENT(m_videoTee); |
|
353 REMOVE_ELEMENT(m_encodeBin); |
|
354 |
|
355 bool ok = true; |
|
356 |
|
357 switch (newMode) { |
|
358 case EmptyPipeline: |
|
359 break; |
|
360 case PreviewPipeline: |
|
361 if (m_captureMode & Audio) { |
|
362 m_audioSrc = buildAudioSrc(); |
|
363 m_audioPreview = buildAudioPreview(); |
|
364 |
|
365 ok &= m_audioSrc && m_audioPreview; |
|
366 |
|
367 if (ok) { |
|
368 gst_bin_add_many(GST_BIN(m_pipeline), m_audioSrc, m_audioPreview, NULL); |
|
369 ok &= gst_element_link(m_audioSrc, m_audioPreview); |
|
370 } |
|
371 } |
|
372 if (m_captureMode & Video) { |
|
373 m_videoSrc = buildVideoSrc(); |
|
374 m_videoTee = gst_element_factory_make("tee", "video-preview-tee"); |
|
375 m_videoPreviewQueue = gst_element_factory_make("queue", "video-preview-queue"); |
|
376 m_videoPreview = buildVideoPreview(); |
|
377 |
|
378 ok &= m_videoSrc && m_videoTee && m_videoPreviewQueue && m_videoPreview; |
|
379 |
|
380 if (ok) { |
|
381 gst_bin_add_many(GST_BIN(m_pipeline), m_videoSrc, m_videoTee, |
|
382 m_videoPreviewQueue, m_videoPreview, NULL); |
|
383 |
|
384 ok &= gst_element_link(m_videoSrc, m_videoTee); |
|
385 ok &= gst_element_link(m_videoTee, m_videoPreviewQueue); |
|
386 ok &= gst_element_link(m_videoPreviewQueue, m_videoPreview); |
|
387 } |
|
388 } |
|
389 break; |
|
390 case RecordingPipeline: |
|
391 m_encodeBin = buildEncodeBin(); |
|
392 gst_bin_add(GST_BIN(m_pipeline), m_encodeBin); |
|
393 |
|
394 if (m_captureMode & Audio) { |
|
395 m_audioSrc = buildAudioSrc(); |
|
396 ok &= m_audioSrc != 0; |
|
397 |
|
398 gst_bin_add(GST_BIN(m_pipeline), m_audioSrc); |
|
399 ok &= gst_element_link(m_audioSrc, m_encodeBin); |
|
400 } |
|
401 |
|
402 if (m_captureMode & Video) { |
|
403 m_videoSrc = buildVideoSrc(); |
|
404 ok &= m_videoSrc != 0; |
|
405 |
|
406 gst_bin_add(GST_BIN(m_pipeline), m_videoSrc); |
|
407 ok &= gst_element_link(m_videoSrc, m_encodeBin); |
|
408 } |
|
409 |
|
410 if (!m_metaData.isEmpty()) |
|
411 setMetaData(m_metaData); |
|
412 |
|
413 break; |
|
414 case PreviewAndRecordingPipeline: |
|
415 m_encodeBin = buildEncodeBin(); |
|
416 if (m_encodeBin) |
|
417 gst_bin_add(GST_BIN(m_pipeline), m_encodeBin); |
|
418 |
|
419 ok &= m_encodeBin != 0; |
|
420 |
|
421 if (ok && m_captureMode & Audio) { |
|
422 m_audioSrc = buildAudioSrc(); |
|
423 m_audioPreview = buildAudioPreview(); |
|
424 m_audioTee = gst_element_factory_make("tee", NULL); |
|
425 m_audioPreviewQueue = gst_element_factory_make("queue", NULL); |
|
426 |
|
427 ok &= m_audioSrc && m_audioPreview && m_audioTee && m_audioPreviewQueue; |
|
428 |
|
429 if (ok) { |
|
430 gst_bin_add_many(GST_BIN(m_pipeline), m_audioSrc, m_audioTee, |
|
431 m_audioPreviewQueue, m_audioPreview, NULL); |
|
432 ok &= gst_element_link(m_audioSrc, m_audioTee); |
|
433 ok &= gst_element_link(m_audioTee, m_audioPreviewQueue); |
|
434 ok &= gst_element_link(m_audioPreviewQueue, m_audioPreview); |
|
435 ok &= gst_element_link(m_audioTee, m_encodeBin); |
|
436 } |
|
437 } |
|
438 |
|
439 if (ok && (m_captureMode & Video)) { |
|
440 m_videoSrc = buildVideoSrc(); |
|
441 m_videoPreview = buildVideoPreview(); |
|
442 m_videoTee = gst_element_factory_make("tee", NULL); |
|
443 m_videoPreviewQueue = gst_element_factory_make("queue", NULL); |
|
444 |
|
445 ok &= m_videoSrc && m_videoPreview && m_videoTee && m_videoPreviewQueue; |
|
446 |
|
447 if (ok) { |
|
448 gst_bin_add_many(GST_BIN(m_pipeline), m_videoSrc, m_videoTee, |
|
449 m_videoPreviewQueue, m_videoPreview, NULL); |
|
450 ok &= gst_element_link(m_videoSrc, m_videoTee); |
|
451 ok &= gst_element_link(m_videoTee, m_videoPreviewQueue); |
|
452 ok &= gst_element_link(m_videoPreviewQueue, m_videoPreview); |
|
453 ok &= gst_element_link(m_videoTee, m_encodeBin); |
|
454 } |
|
455 } |
|
456 |
|
457 if (!m_metaData.isEmpty()) |
|
458 setMetaData(m_metaData); |
|
459 |
|
460 break; |
|
461 } |
|
462 |
|
463 if (!ok) { |
|
464 emit error(int(QMediaRecorder::FormatError),tr("Failed to build media capture pipeline.")); |
|
465 } |
|
466 |
|
467 dumpGraph( QString("rebuild_graph_%1_%2").arg(m_pipelineMode).arg(newMode) ); |
|
468 if (m_encodeBin) { |
|
469 QString fileName = QString("rebuild_graph_encode_%1_%2").arg(m_pipelineMode).arg(newMode); |
|
470 #if !(GST_DISABLE_GST_DEBUG) && (GST_VERSION_MAJOR >= 0) && (GST_VERSION_MINOR >= 10) && (GST_VERSION_MICRO >= 19) |
|
471 _gst_debug_bin_to_dot_file(GST_BIN(m_encodeBin), GST_DEBUG_GRAPH_SHOW_ALL, fileName.toAscii()); |
|
472 #endif |
|
473 } |
|
474 |
|
475 if (ok) { |
|
476 m_pipelineMode = newMode; |
|
477 } else { |
|
478 m_pipelineMode = EmptyPipeline; |
|
479 |
|
480 REMOVE_ELEMENT(m_audioSrc); |
|
481 REMOVE_ELEMENT(m_audioPreview); |
|
482 REMOVE_ELEMENT(m_audioPreviewQueue); |
|
483 REMOVE_ELEMENT(m_audioTee); |
|
484 REMOVE_ELEMENT(m_videoSrc); |
|
485 REMOVE_ELEMENT(m_videoPreview); |
|
486 REMOVE_ELEMENT(m_videoPreviewQueue); |
|
487 REMOVE_ELEMENT(m_videoTee); |
|
488 REMOVE_ELEMENT(m_encodeBin); |
|
489 } |
|
490 |
|
491 return ok; |
|
492 } |
|
493 |
|
494 void QGstreamerCaptureSession::dumpGraph(const QString &fileName) |
|
495 { |
|
496 #if !(GST_DISABLE_GST_DEBUG) && (GST_VERSION_MAJOR >= 0) && (GST_VERSION_MINOR >= 10) && (GST_VERSION_MICRO >= 19) |
|
497 _gst_debug_bin_to_dot_file(GST_BIN(m_pipeline), |
|
498 GstDebugGraphDetails(/*GST_DEBUG_GRAPH_SHOW_ALL |*/ GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE | GST_DEBUG_GRAPH_SHOW_NON_DEFAULT_PARAMS | GST_DEBUG_GRAPH_SHOW_STATES), |
|
499 fileName.toAscii()); |
|
500 #endif |
|
501 } |
|
502 |
|
503 QUrl QGstreamerCaptureSession::outputLocation() const |
|
504 { |
|
505 return m_sink; |
|
506 } |
|
507 |
|
508 bool QGstreamerCaptureSession::setOutputLocation(const QUrl& sink) |
|
509 { |
|
510 m_sink = sink; |
|
511 return true; |
|
512 } |
|
513 |
|
514 void QGstreamerCaptureSession::setAudioInput(QGstreamerElementFactory *audioInput) |
|
515 { |
|
516 m_audioInputFactory = audioInput; |
|
517 } |
|
518 |
|
519 void QGstreamerCaptureSession::setAudioPreview(QGstreamerElementFactory *audioPreview) |
|
520 { |
|
521 m_audioPreviewFactory = audioPreview; |
|
522 } |
|
523 |
|
524 void QGstreamerCaptureSession::setVideoInput(QGstreamerVideoInput *videoInput) |
|
525 { |
|
526 m_videoInputFactory = videoInput; |
|
527 } |
|
528 |
|
529 void QGstreamerCaptureSession::setVideoPreview(QGstreamerElementFactory *videoPreview) |
|
530 { |
|
531 m_videoPreviewFactory = videoPreview; |
|
532 } |
|
533 |
|
534 QGstreamerCaptureSession::State QGstreamerCaptureSession::state() const |
|
535 { |
|
536 return m_state; |
|
537 } |
|
538 |
|
539 void QGstreamerCaptureSession::waitForStopped() |
|
540 { |
|
541 GstState state = GST_STATE_PLAYING; |
|
542 gst_element_get_state(m_pipeline, &state, 0, 0); |
|
543 |
|
544 while (state != GST_STATE_NULL) { |
|
545 qApp->processEvents(); |
|
546 gst_element_get_state(m_pipeline, &state, 0, 0); |
|
547 } |
|
548 } |
|
549 |
|
550 void QGstreamerCaptureSession::setState(QGstreamerCaptureSession::State newState) |
|
551 { |
|
552 if (newState == m_pendingState && !m_waitingForEos) |
|
553 return; |
|
554 |
|
555 m_pendingState = newState; |
|
556 |
|
557 PipelineMode newMode = EmptyPipeline; |
|
558 |
|
559 switch (newState) { |
|
560 case PausedState: |
|
561 case RecordingState: |
|
562 newMode = PreviewAndRecordingPipeline; |
|
563 break; |
|
564 case PreviewState: |
|
565 newMode = PreviewPipeline; |
|
566 break; |
|
567 case StoppedState: |
|
568 newMode = EmptyPipeline; |
|
569 break; |
|
570 } |
|
571 |
|
572 if (newMode != m_pipelineMode) { |
|
573 if (m_pipelineMode == PreviewAndRecordingPipeline) { |
|
574 if (!m_waitingForEos) { |
|
575 m_waitingForEos = true; |
|
576 //qDebug() << "Waiting for EOS"; |
|
577 //with live sources it's necessary to send EOS even to pipeline |
|
578 //before going to STOPPED state |
|
579 gst_element_send_event(m_pipeline, gst_event_new_eos()); |
|
580 return; |
|
581 } else { |
|
582 m_waitingForEos = false; |
|
583 //qDebug() << "EOS received"; |
|
584 } |
|
585 } |
|
586 |
|
587 //select suitable default codecs/containers, if necessary |
|
588 m_recorderControl->applySettings(); |
|
589 |
|
590 gst_element_set_state(m_pipeline, GST_STATE_NULL); |
|
591 |
|
592 //It would be better to do this async. but |
|
593 //gstreamer doesn't notify about pipeline went to NULL state |
|
594 waitForStopped(); |
|
595 if (!rebuildGraph(newMode)) { |
|
596 m_pendingState = StoppedState; |
|
597 m_state = StoppedState; |
|
598 emit stateChanged(StoppedState); |
|
599 |
|
600 return; |
|
601 } |
|
602 } |
|
603 |
|
604 switch (newState) { |
|
605 case PausedState: |
|
606 gst_element_set_state(m_pipeline, GST_STATE_PAUSED); |
|
607 break; |
|
608 case RecordingState: |
|
609 case PreviewState: |
|
610 gst_element_set_state(m_pipeline, GST_STATE_PLAYING); |
|
611 break; |
|
612 case StoppedState: |
|
613 gst_element_set_state(m_pipeline, GST_STATE_NULL); |
|
614 } |
|
615 |
|
616 //we have to do it here, since gstreamer will not emit bus messages any more |
|
617 if (newState == StoppedState) { |
|
618 m_state = StoppedState; |
|
619 emit stateChanged(StoppedState); |
|
620 } |
|
621 } |
|
622 |
|
623 |
|
624 qint64 QGstreamerCaptureSession::duration() const |
|
625 { |
|
626 GstFormat format = GST_FORMAT_TIME; |
|
627 gint64 duration = 0; |
|
628 |
|
629 if ( m_encodeBin && gst_element_query_position(m_encodeBin, &format, &duration)) |
|
630 return duration / 1000000; |
|
631 else |
|
632 return 0; |
|
633 } |
|
634 |
|
635 void QGstreamerCaptureSession::setCaptureDevice(const QString &deviceName) |
|
636 { |
|
637 m_captureDevice = deviceName; |
|
638 } |
|
639 |
|
640 void QGstreamerCaptureSession::setMetaData(const QMap<QByteArray, QVariant> &data) |
|
641 { |
|
642 //qDebug() << "QGstreamerCaptureSession::setMetaData" << data; |
|
643 m_metaData = data; |
|
644 |
|
645 if (m_encodeBin) { |
|
646 GstIterator *elements = gst_bin_iterate_all_by_interface(GST_BIN(m_encodeBin), GST_TYPE_TAG_SETTER); |
|
647 GstElement *element = 0; |
|
648 while (gst_iterator_next(elements, (void**)&element) == GST_ITERATOR_OK) { |
|
649 //qDebug() << "found element with tag setter interface:" << gst_element_get_name(element); |
|
650 QMapIterator<QByteArray, QVariant> it(data); |
|
651 while (it.hasNext()) { |
|
652 it.next(); |
|
653 const QString tagName = it.key(); |
|
654 const QVariant tagValue = it.value(); |
|
655 |
|
656 |
|
657 switch(tagValue.type()) { |
|
658 case QVariant::String: |
|
659 gst_tag_setter_add_tags(GST_TAG_SETTER(element), |
|
660 GST_TAG_MERGE_REPLACE_ALL, |
|
661 tagName.toUtf8().constData(), |
|
662 tagValue.toString().toUtf8().constData(), |
|
663 NULL); |
|
664 break; |
|
665 case QVariant::Int: |
|
666 case QVariant::LongLong: |
|
667 gst_tag_setter_add_tags(GST_TAG_SETTER(element), |
|
668 GST_TAG_MERGE_REPLACE_ALL, |
|
669 tagName.toUtf8().constData(), |
|
670 tagValue.toInt(), |
|
671 NULL); |
|
672 break; |
|
673 case QVariant::Double: |
|
674 gst_tag_setter_add_tags(GST_TAG_SETTER(element), |
|
675 GST_TAG_MERGE_REPLACE_ALL, |
|
676 tagName.toUtf8().constData(), |
|
677 tagValue.toDouble(), |
|
678 NULL); |
|
679 break; |
|
680 default: |
|
681 break; |
|
682 } |
|
683 |
|
684 } |
|
685 |
|
686 } |
|
687 } |
|
688 } |
|
689 |
|
690 bool QGstreamerCaptureSession::processSyncMessage(const QGstreamerMessage &message) |
|
691 { |
|
692 GstMessage* gm = message.rawMessage(); |
|
693 |
|
694 if (gm && GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ELEMENT && |
|
695 gst_structure_has_name(gm->structure, "prepare-xwindow-id")) |
|
696 { |
|
697 if (m_audioPreviewFactory) |
|
698 m_audioPreviewFactory->prepareWinId(); |
|
699 |
|
700 if (m_videoPreviewFactory) |
|
701 m_videoPreviewFactory->prepareWinId(); |
|
702 |
|
703 return true; |
|
704 } |
|
705 |
|
706 return false; |
|
707 } |
|
708 |
|
709 void QGstreamerCaptureSession::busMessage(const QGstreamerMessage &message) |
|
710 { |
|
711 GstMessage* gm = message.rawMessage(); |
|
712 |
|
713 if (gm) { |
|
714 if (GST_MESSAGE_TYPE(gm) == GST_MESSAGE_ERROR) { |
|
715 GError *err; |
|
716 gchar *debug; |
|
717 gst_message_parse_error (gm, &err, &debug); |
|
718 emit error(int(QMediaRecorder::ResourceError),QString::fromUtf8(err->message)); |
|
719 g_error_free (err); |
|
720 g_free (debug); |
|
721 } |
|
722 |
|
723 if (GST_MESSAGE_SRC(gm) == GST_OBJECT_CAST(m_pipeline)) { |
|
724 switch (GST_MESSAGE_TYPE(gm)) { |
|
725 case GST_MESSAGE_DURATION: |
|
726 break; |
|
727 |
|
728 case GST_MESSAGE_EOS: |
|
729 if (m_waitingForEos) |
|
730 setState(m_pendingState); |
|
731 break; |
|
732 |
|
733 case GST_MESSAGE_STATE_CHANGED: |
|
734 { |
|
735 |
|
736 GstState oldState; |
|
737 GstState newState; |
|
738 GstState pending; |
|
739 |
|
740 gst_message_parse_state_changed(gm, &oldState, &newState, &pending); |
|
741 |
|
742 QStringList states; |
|
743 states << "GST_STATE_VOID_PENDING" << "GST_STATE_NULL" << "GST_STATE_READY" << "GST_STATE_PAUSED" << "GST_STATE_PLAYING"; |
|
744 |
|
745 /* |
|
746 qDebug() << QString("state changed: old: %1 new: %2 pending: %3") \ |
|
747 .arg(states[oldState]) \ |
|
748 .arg(states[newState]) \ |
|
749 .arg(states[pending]); |
|
750 |
|
751 #define ENUM_NAME(c,e,v) (c::staticMetaObject.enumerator(c::staticMetaObject.indexOfEnumerator(e)).valueToKey((v))) |
|
752 |
|
753 qDebug() << "Current session state:" << ENUM_NAME(QGstreamerCaptureSession,"State",m_state); |
|
754 qDebug() << "Pending session state:" << ENUM_NAME(QGstreamerCaptureSession,"State",m_pendingState); |
|
755 */ |
|
756 |
|
757 switch (newState) { |
|
758 case GST_STATE_VOID_PENDING: |
|
759 case GST_STATE_NULL: |
|
760 case GST_STATE_READY: |
|
761 if (m_state != StoppedState && m_pendingState == StoppedState) { |
|
762 emit stateChanged(m_state = StoppedState); |
|
763 dumpGraph("stopped"); |
|
764 } |
|
765 break; |
|
766 case GST_STATE_PAUSED: |
|
767 if (m_state != PausedState && m_pendingState == PausedState) |
|
768 emit stateChanged(m_state = PausedState); |
|
769 dumpGraph("paused"); |
|
770 |
|
771 if (m_pipelineMode == RecordingPipeline && !m_metaData.isEmpty()) |
|
772 setMetaData(m_metaData); |
|
773 break; |
|
774 case GST_STATE_PLAYING: |
|
775 { |
|
776 if ((m_pendingState == PreviewState || m_pendingState == RecordingState) && |
|
777 m_state != m_pendingState) |
|
778 { |
|
779 m_state = m_pendingState; |
|
780 emit stateChanged(m_state); |
|
781 } |
|
782 |
|
783 if (m_pipelineMode == PreviewPipeline) |
|
784 dumpGraph("preview"); |
|
785 else |
|
786 dumpGraph("recording"); |
|
787 } |
|
788 break; |
|
789 } |
|
790 } |
|
791 break; |
|
792 default: |
|
793 break; |
|
794 } |
|
795 //qDebug() << "New session state:" << ENUM_NAME(QGstreamerCaptureSession,"State",m_state); |
|
796 } |
|
797 } |
|
798 } |
|
799 |