1 /* |
|
2 * Copyright (c) 2002 Nokia Corporation and/or its subsidiary(-ies). |
|
3 * All rights reserved. |
|
4 * This component and the accompanying materials are made available |
|
5 * under the terms of the License "Symbian Foundation License v1.0" |
|
6 * which accompanies this distribution, and is available |
|
7 * at the URL "http://www.symbianfoundation.org/legal/sfl-v10.html". |
|
8 * |
|
9 * Initial Contributors: |
|
10 * Nokia Corporation - initial contribution. |
|
11 * |
|
12 * Contributors: |
|
13 * |
|
14 * Description: Implementation for movie processor |
|
15 * |
|
16 */ |
|
17 |
|
18 |
|
19 // Include Files |
|
20 |
|
21 #include "movieprocessorimpl.h" |
|
22 #include "statusmonitor.h" |
|
23 #include "activequeue.h" |
|
24 #include "mp4parser.h" |
|
25 #include "mp4composer.h" |
|
26 #include "videoencoder.h" |
|
27 #include "yuv2rgb12.h" |
|
28 #include "yuv2rgb16.h" |
|
29 #include "yuv2rgb24.h" |
|
30 #include "VideoProcessorAudioData.h" |
|
31 #include "DisplayChain.h" |
|
32 #include "VedRgb2YuvConverter.h" |
|
33 #include "vedaudiosettings.h" |
|
34 #include "vedvideosettings.h" |
|
35 #include "AudSong.h" |
|
36 #include "audioprocessor.h" |
|
37 #include "SizeEstimate.h" |
|
38 #include "vedavcedit.h" |
|
39 |
|
40 // Local Constants |
|
41 |
|
42 const TUint KReadBufInitSize = 512; // stream start buffer initial size |
|
43 //const TInt KVideoProcessorPriority = CActive::EPriorityHigh; |
|
44 const TUint KVideoQueueBlocks = 16; |
|
45 const TUint KVideoQueueBlockSize = 256; |
|
46 const TInt KDemuxPriority = CActive::EPriorityHigh; |
|
47 const TUint KInitialAudioBufferSize = 1024; |
|
48 const TUint KInitialVideoBufferSize = 10240; |
|
49 const TUint KMaxVideoSpeed = 1000; |
|
50 const TUint KVideoTimeScale = 5000; // for both normal & generated clips |
|
51 const TUint KAMRAudioTimeScale = 8000; |
|
52 |
|
53 const TUint KDiskSafetyLimit = 400000; // Amount of free disk space to leave unused |
|
54 |
|
55 _LIT(KTempFilePath ,"c:\\system\\temp\\"); // path for temp file used in image insertion |
|
56 |
|
57 // An assertion macro wrapper to clean up the code a bit |
|
58 #define VPASSERT(x) __ASSERT_DEBUG(x, User::Panic(_L("CMovieProcessorImpl"), EInvalidInternalState)) |
|
59 |
|
60 #ifdef _DEBUG |
|
61 #include <e32svr.h> |
|
62 #define PRINT(x) RDebug::Print x; |
|
63 #else |
|
64 #define PRINT(x) |
|
65 #endif |
|
66 |
|
67 // ================= MEMBER FUNCTIONS ======================= |
|
68 |
|
69 // ----------------------------------------------------------------------------- |
|
70 // CMovieProcessorImpl::NewL |
|
71 // Two-phased constructor. |
|
72 // ----------------------------------------------------------------------------- |
|
73 // |
|
74 CMovieProcessorImpl* CMovieProcessorImpl::NewL() |
|
75 { |
|
76 CMovieProcessorImpl* self = NewLC(); |
|
77 CleanupStack::Pop(self); |
|
78 return self; |
|
79 } |
|
80 |
|
81 CMovieProcessorImpl* CMovieProcessorImpl::NewLC() |
|
82 { |
|
83 CMovieProcessorImpl* self = new (ELeave) CMovieProcessorImpl(); |
|
84 CleanupStack::PushL(self); |
|
85 self->ConstructL(); |
|
86 return self; |
|
87 } |
|
88 |
|
89 // ----------------------------------------------------------------------------- |
|
90 // CMovieProcessorImpl::CMovieProcessorImpl |
|
91 // C++ default constructor can NOT contain any code, that |
|
92 // might leave. |
|
93 // ----------------------------------------------------------------------------- |
|
94 // |
|
95 CMovieProcessorImpl::CMovieProcessorImpl() |
|
96 : CActive(EPriorityNormal), iReadImageDes(0,0), iReadDes(0, 0) |
|
97 { |
|
98 // Reset state |
|
99 iState = EStateIdle; |
|
100 iDataFormat = EDataAutoDetect; |
|
101 iAudioFramesInSample = KVedAudioFramesInSample; |
|
102 |
|
103 iStartTransitionEffect = EVedStartTransitionEffectNone; |
|
104 iMiddleTransitionEffect = EVedMiddleTransitionEffectNone; |
|
105 iPreviousMiddleTransitionEffect = EVedMiddleTransitionEffectNone; |
|
106 iEndTransitionEffect = EVedEndTransitionEffectNone; |
|
107 iSpeed = KMaxVideoSpeed; |
|
108 iColorEffect = EVedColorEffectNone; |
|
109 iNumberOfVideoClips=1; |
|
110 iTr.iTrPrevNew = -1; |
|
111 iTr.iTrPrevOrig = -1; |
|
112 iStartFrameIndex = -1; |
|
113 iMovieSizeLimit = 0; |
|
114 iFrameParametersSize = 0; |
|
115 |
|
116 // We are now properly initialized |
|
117 iState = EStateIdle; |
|
118 |
|
119 } |
|
120 |
|
121 // ----------------------------------------------------------------------------- |
|
122 // CMovieProcessorImpl::~CMovieProcessorImpl |
|
123 // Destructor. |
|
124 // ----------------------------------------------------------------------------- |
|
125 // |
|
126 CMovieProcessorImpl::~CMovieProcessorImpl() |
|
127 { |
|
128 |
|
129 Cancel(); |
|
130 |
|
131 TInt error = KErrNone; |
|
132 |
|
133 TRAP(error, DoCloseVideoL()); |
|
134 |
|
135 DeleteClipStructures(); |
|
136 |
|
137 if (iFrameParameters) |
|
138 { |
|
139 User::Free(iFrameParameters); |
|
140 iFrameParameters = 0; |
|
141 } |
|
142 |
|
143 if (iVideoClipParameters) |
|
144 { |
|
145 User::Free(iVideoClipParameters); |
|
146 iVideoClipParameters = 0; |
|
147 } |
|
148 |
|
149 if (iOutAudioBuffer) { |
|
150 delete iOutAudioBuffer; |
|
151 iOutAudioBuffer=0; |
|
152 } |
|
153 |
|
154 if (iOutVideoBuffer) { |
|
155 delete iOutVideoBuffer; |
|
156 iOutVideoBuffer=0; |
|
157 } |
|
158 |
|
159 // although this should be released by VideoEditorEngine, |
|
160 // the following is still needed in case of leave |
|
161 |
|
162 if(iDemux) |
|
163 { |
|
164 delete iDemux; |
|
165 iDemux = 0; |
|
166 } |
|
167 |
|
168 if(iVideoProcessor) |
|
169 { |
|
170 delete iVideoProcessor; |
|
171 iVideoProcessor = 0; |
|
172 } |
|
173 |
|
174 if (iComposer) { |
|
175 delete iComposer; |
|
176 iComposer = 0; |
|
177 } |
|
178 |
|
179 if (iAudioProcessor) |
|
180 { |
|
181 delete iAudioProcessor; |
|
182 iAudioProcessor = 0; |
|
183 } |
|
184 |
|
185 if(iImageComposer) |
|
186 { |
|
187 delete iImageComposer; |
|
188 iImageComposer=0; |
|
189 } |
|
190 |
|
191 if (iImageAvcEdit) |
|
192 { |
|
193 delete iImageAvcEdit; |
|
194 iImageAvcEdit = 0; |
|
195 } |
|
196 |
|
197 if (iYuvImageBuf) |
|
198 { |
|
199 User::Free(iYuvImageBuf); |
|
200 iYuvImageBuf=0; |
|
201 } |
|
202 |
|
203 if(iVideoEncoder) |
|
204 { |
|
205 delete iVideoEncoder; |
|
206 iVideoEncoder = 0; |
|
207 } |
|
208 |
|
209 if(iParser) |
|
210 { |
|
211 delete iParser; |
|
212 iParser = 0; |
|
213 } |
|
214 if(iVideoQueue) |
|
215 { |
|
216 delete iVideoQueue; |
|
217 iVideoQueue = 0; |
|
218 } |
|
219 |
|
220 if (iWaitScheduler) |
|
221 { |
|
222 delete iWaitScheduler; |
|
223 iWaitScheduler = 0; |
|
224 } |
|
225 |
|
226 if(iMonitor) |
|
227 { |
|
228 delete iMonitor; |
|
229 iMonitor = 0; |
|
230 } |
|
231 |
|
232 if (iReadBuf) |
|
233 User::Free(iReadBuf); |
|
234 |
|
235 if (iRgbBuf) |
|
236 { |
|
237 delete iRgbBuf; |
|
238 iRgbBuf = 0; |
|
239 } |
|
240 |
|
241 if (iOutBitmap) |
|
242 { |
|
243 delete iOutBitmap; |
|
244 iOutBitmap = 0; |
|
245 } |
|
246 |
|
247 if (iSizeEstimate) |
|
248 { |
|
249 delete iSizeEstimate; |
|
250 iSizeEstimate = 0; |
|
251 } |
|
252 |
|
253 if (iAvcEdit) |
|
254 { |
|
255 delete iAvcEdit; |
|
256 iAvcEdit = 0; |
|
257 } |
|
258 |
|
259 if (iImageYuvConverter) |
|
260 { |
|
261 delete iImageYuvConverter; |
|
262 iImageYuvConverter = 0; |
|
263 } |
|
264 |
|
265 // for transition effect |
|
266 if ( iFsConnected ) |
|
267 { |
|
268 TRAP(error, CloseTransitionInfoL()); |
|
269 iFs.Close(); |
|
270 iFsConnected = EFalse; |
|
271 } |
|
272 } |
|
273 |
|
274 |
|
275 // ----------------------------------------------------------------------------- |
|
276 // CMovieProcessorImpl::ConstructL |
|
277 // Symbian 2nd phase constructor can leave. |
|
278 // ----------------------------------------------------------------------------- |
|
279 // |
|
280 void CMovieProcessorImpl::ConstructL() |
|
281 { |
|
282 |
|
283 // Allocate stream reading buffer |
|
284 iReadBuf = (TUint8*) User::AllocL(KReadBufInitSize); |
|
285 iBufLength = KReadBufInitSize; |
|
286 |
|
287 iClipFileName.Zero(); |
|
288 iOutputMovieFileName.Zero(); |
|
289 |
|
290 iWaitScheduler = new (ELeave) CActiveSchedulerWait; |
|
291 |
|
292 // Add to active scheduler |
|
293 CActiveScheduler::Add(this); |
|
294 |
|
295 iSizeEstimate = CSizeEstimate::NewL(this); |
|
296 |
|
297 iState = EStateIdle; |
|
298 } |
|
299 |
|
300 |
|
301 // ----------------------------------------------------------------------------- |
|
302 // CMovieProcessorImpl::StartMovieL |
|
303 // Prepares the processor for processing a movie and starts processing |
|
304 // (other items were commented in a header). |
|
305 // ----------------------------------------------------------------------------- |
|
306 // |
|
307 void CMovieProcessorImpl::StartMovieL(CVedMovieImp* aMovie, const TDesC& aFileName, RFile* aFileHandle, |
|
308 MVedMovieProcessingObserver* aObserver) |
|
309 { |
|
310 |
|
311 PRINT((_L("CMovieProcessorImpl::StartMovieL() begin"))) |
|
312 |
|
313 // reset member variables |
|
314 ResetL(); |
|
315 |
|
316 // get arguments |
|
317 iMovie = aMovie; |
|
318 if (!iMovie) |
|
319 User::Leave(KErrArgument); |
|
320 |
|
321 iOutputMovieFileName = aFileName; |
|
322 iOutputFileHandle = aFileHandle; |
|
323 |
|
324 CVedMovieImp* movie = (iMovie); |
|
325 iObserver = aObserver; |
|
326 |
|
327 if (!iObserver) |
|
328 User::Leave(KErrArgument); |
|
329 |
|
330 if (iMonitor) |
|
331 { |
|
332 delete iMonitor; |
|
333 iMonitor = 0; |
|
334 } |
|
335 |
|
336 // Create a status monitor object: |
|
337 iMonitor = new (ELeave) CStatusMonitor(iObserver, this, aMovie); |
|
338 iMonitor->ConstructL(); |
|
339 |
|
340 // update movie properties |
|
341 iFramesProcessed=0; |
|
342 iNumberOfVideoClips = iMovie->VideoClipCount(); |
|
343 iNumberOfAudioClips = iMovie->AudioClipCount(); |
|
344 |
|
345 // calculate total movie duration for progress bar: video & audio tracks. |
|
346 // in milliseconds |
|
347 iTotalMovieDuration = TInt64(2) * ( movie->Duration().Int64()/1000 ); |
|
348 |
|
349 for (TInt i = 0; i < iNumberOfVideoClips; i++) |
|
350 { |
|
351 CVedVideoClip* currentClip = movie->VideoClip(i); |
|
352 CVedVideoClipInfo* currentInfo = currentClip->Info(); |
|
353 |
|
354 // Take time to generate a clip into account |
|
355 if (currentInfo->Class() == EVedVideoClipClassGenerated) |
|
356 { |
|
357 iTotalMovieDuration += currentInfo->Duration().Int64()/1000; |
|
358 } |
|
359 } |
|
360 |
|
361 // set media types |
|
362 SetOutputMediaTypesL(); |
|
363 |
|
364 // get transcode factors: bitstream mode & time inc. resolution |
|
365 GetTranscodeFactorsL(); |
|
366 |
|
367 // set video transcoding parameters |
|
368 SetupTranscodingL(); |
|
369 |
|
370 #ifdef VIDEOEDITORENGINE_AVC_EDITING |
|
371 // check if AVC editing is involved |
|
372 TBool avcEditing = ( iOutputVideoType == EVideoAVCProfileBaseline ); |
|
373 for(TInt i = 0; i < movie->VideoClipCount() && avcEditing == EFalse; i++) |
|
374 { |
|
375 CVedVideoClip* currentClip = movie->VideoClip(i); |
|
376 CVedVideoClipInfo* currentInfo = currentClip->Info(); |
|
377 |
|
378 if( currentInfo->Class() == EVedVideoClipClassFile && |
|
379 (currentInfo->VideoType() == EVedVideoTypeAVCBaselineProfile) ) |
|
380 { |
|
381 avcEditing = ETrue; |
|
382 break; |
|
383 } |
|
384 } |
|
385 |
|
386 if (avcEditing) |
|
387 { |
|
388 // create AVC editing instance |
|
389 iAvcEdit = CVedAVCEdit::NewL(); |
|
390 } |
|
391 |
|
392 if (iOutputVideoType == EVideoAVCProfileBaseline) |
|
393 { |
|
394 // set level |
|
395 iAvcEdit->SetOutputLevel( GetOutputAVCLevel() ); |
|
396 } |
|
397 |
|
398 #endif |
|
399 |
|
400 // Check |
|
401 //iOutputAudioType = EAudioAMR; // default is Amr if all are generated |
|
402 |
|
403 if ( iFsConnected == EFalse ) |
|
404 { |
|
405 User::LeaveIfError( iFs.Connect() ); |
|
406 iFsConnected = ETrue; |
|
407 } |
|
408 |
|
409 if (iNumberOfVideoClips) |
|
410 { |
|
411 iStartTransitionEffect = aMovie->StartTransitionEffect(); |
|
412 iEndTransitionEffect = aMovie->EndTransitionEffect(); |
|
413 CloseTransitionInfoL(); |
|
414 } |
|
415 |
|
416 iImageEncodeProcFinished = 0; // to indicate whether encode images process is finished |
|
417 iImageEncodedFlag = 0; // has an image been encoded |
|
418 iFirstTimeProcessing = 0; |
|
419 iTotalImagesProcessed = 0; |
|
420 iImageClipCreated = 0; // has an image clip been created |
|
421 |
|
422 iImageVideoTimeScale = KVideoTimeScale; // Initializing to standard value |
|
423 iGetFrameInProgress = 0; |
|
424 iOutputAudioTimeSet = 0; |
|
425 iOutputVideoTimeSet = 0; |
|
426 iAllGeneratedClips = 0; // Indicates if all the clips in the movie are generated clips - 0 indicates false |
|
427 iEncodeInProgress = 0; // Indicates encoding is not in progress |
|
428 iFirstClipHasNoDecInfo = EFalse; |
|
429 |
|
430 // Allocate memory for frame parameters array |
|
431 if (iFrameParameters) |
|
432 User::Free(iFrameParameters); |
|
433 |
|
434 iFrameParameters = 0; |
|
435 iFrameParameters = (struct TFrameParameters *)User::AllocL(iFrameParametersSize * sizeof(struct TFrameParameters)); |
|
436 Mem::Fill(iFrameParameters, iFrameParametersSize * sizeof(TFrameParameters), 0); |
|
437 |
|
438 InitializeClipStructuresL(); |
|
439 |
|
440 // second pass starts here |
|
441 // load first clip properties |
|
442 iVideoClipNumber=0; |
|
443 iAudioClipNumber=0; |
|
444 |
|
445 if (iNumberOfVideoClips) |
|
446 iVideoClip = movie->VideoClip(iVideoClipNumber); |
|
447 |
|
448 // if clip is not file-based, then call some other initializer |
|
449 TInt firstClipIsGenerated = 0; |
|
450 iFirstClipIsGen = EFalse; |
|
451 |
|
452 // what if iNumberOfVideoClips == 0, this will fail ! |
|
453 if(iNumberOfVideoClips && iVideoClip->Info()->Class() == EVedVideoClipClassGenerated) |
|
454 { |
|
455 // since frame parameters are not available here, use temporary instantiation for composer |
|
456 TemporaryInitializeGeneratedClipL(); |
|
457 // note that we may need to create a parser temporarily |
|
458 firstClipIsGenerated = 1; |
|
459 iFirstClipIsGen = ETrue; |
|
460 } |
|
461 else |
|
462 { |
|
463 InitializeClipL(); // check details inside initialization |
|
464 } |
|
465 |
|
466 if(iOutputAudioType == EAudioAMR) |
|
467 { |
|
468 if( iAudioType == EAudioNone) |
|
469 iAudioType = EAudioAMR; |
|
470 iAudioFramesInSample = KVedAudioFramesInSample; |
|
471 } |
|
472 |
|
473 else if(iOutputAudioType == EAudioAAC) |
|
474 { |
|
475 if( iAudioType == EAudioNone) |
|
476 { |
|
477 iAudioType = EAudioAAC; |
|
478 iFirstClipHasNoDecInfo = ETrue; // because it has no audio so it will have no decoder specific Info |
|
479 } |
|
480 iAudioFramesInSample = 1; |
|
481 } |
|
482 |
|
483 |
|
484 VPASSERT(!iComposer); |
|
485 // initialize composer |
|
486 if (iOutputFileHandle) |
|
487 iComposer = CMP4Composer::NewL(iOutputFileHandle, (CParser::TVideoFormat)iOutputVideoType, (CParser::TAudioFormat)iOutputAudioType, iAvcEdit); |
|
488 else |
|
489 iComposer = CMP4Composer::NewL(iOutputMovieFileName, (CParser::TVideoFormat)iOutputVideoType, (CParser::TAudioFormat)iOutputAudioType, iAvcEdit); |
|
490 |
|
491 iFramesProcessed = 0; |
|
492 iStartingProcessing = ETrue; |
|
493 |
|
494 VPASSERT(iOutputVideoTimeScale); |
|
495 VPASSERT(iOutputAudioTimeScale); |
|
496 VPASSERT(iAudioFramesInSample); |
|
497 |
|
498 // write video & audio descriptions |
|
499 CComposer::TStreamParameters streamParameters; |
|
500 |
|
501 if(iAllGeneratedClips == 0) // if all were generated initialize to default |
|
502 { |
|
503 if (iNumberOfVideoClips) |
|
504 { |
|
505 streamParameters = (CComposer::TStreamParameters &)iParser->iStreamParameters; |
|
506 TSize tmpSize = iMovie->Resolution(); |
|
507 streamParameters.iVideoWidth = tmpSize.iWidth; /* iVideoParameters.iWidth; */ |
|
508 streamParameters.iVideoHeight = tmpSize.iHeight; /* iVideoParameters.iHeight; */ |
|
509 streamParameters.iHaveAudio = ETrue; //because u always insert silent amr frames atleast//aac frames |
|
510 if(iOutputAudioType == EAudioAMR) |
|
511 streamParameters.iAudioFormat = (CComposer::TAudioFormat)CComposer::EAudioFormatAMR; |
|
512 |
|
513 else if(iOutputAudioType == EAudioAAC) |
|
514 streamParameters.iAudioFormat = (CComposer::TAudioFormat)CComposer::EAudioFormatAAC; //if amr out is amr else aac if none it will be none |
|
515 |
|
516 } |
|
517 else |
|
518 { |
|
519 // No video, only audio; generate black frames. Can't use iParser->iStreamParameters since iParser doesn't exist |
|
520 // SetHeaderDefaults() checked the resolution from movie to iVideoParameters |
|
521 streamParameters.iVideoWidth = iVideoParameters.iWidth; |
|
522 streamParameters.iVideoHeight = iVideoParameters.iHeight; |
|
523 } |
|
524 } |
|
525 else |
|
526 { |
|
527 |
|
528 /* since all clips inserted are generated */ |
|
529 iVideoType = iOutputVideoType; |
|
530 |
|
531 VPASSERT( (iVideoType == EVideoH263Profile0Level10) || |
|
532 (iVideoType == EVideoH263Profile0Level45) || |
|
533 (iVideoType == EVideoMPEG4) || |
|
534 (iVideoType == EVideoAVCProfileBaseline) ); |
|
535 |
|
536 streamParameters.iCanSeek = ETrue; |
|
537 streamParameters.iNumDemuxChannels = 1; /* Because video will be there */ |
|
538 TTimeIntervalMicroSeconds movduration = TTimeIntervalMicroSeconds(movie->Duration()); |
|
539 TInt64 alllength = movduration.Int64(); |
|
540 streamParameters.iAudioLength = I64INT(alllength); |
|
541 streamParameters.iVideoLength = I64INT(alllength); |
|
542 streamParameters.iStreamLength = I64INT(alllength); |
|
543 streamParameters.iVideoWidth = iVideoParameters.iWidth; |
|
544 streamParameters.iVideoHeight = iVideoParameters.iHeight; |
|
545 |
|
546 if (iOutputAudioType == EAudioAMR) |
|
547 { |
|
548 streamParameters.iAudioFormat = (CComposer::TAudioFormat) CComposer::EAudioFormatAMR; |
|
549 streamParameters.iHaveVideo = ETrue; |
|
550 streamParameters.iNumDemuxChannels++; |
|
551 streamParameters.iAudioFramesInSample = KVedAudioFramesInSample; |
|
552 iAudioFramesInSample = KVedAudioFramesInSample; |
|
553 } |
|
554 |
|
555 else if (iOutputAudioType == EAudioAAC) |
|
556 { |
|
557 streamParameters.iAudioFormat = (CComposer::TAudioFormat) CComposer::EAudioFormatAAC; |
|
558 streamParameters.iHaveVideo = ETrue; |
|
559 streamParameters.iNumDemuxChannels++; |
|
560 streamParameters.iAudioFramesInSample = 1; |
|
561 iAudioFramesInSample = 1; // Same as above |
|
562 } |
|
563 |
|
564 else |
|
565 { |
|
566 |
|
567 streamParameters.iAudioFormat = (CComposer::TAudioFormat) CComposer::EAudioFormatNone; |
|
568 streamParameters.iHaveAudio =EFalse; |
|
569 streamParameters.iAudioLength =0; /* reset audio length as it was set to videolength */ |
|
570 streamParameters.iAudioFramesInSample = KVedAudioFramesInSample; |
|
571 iAudioFramesInSample = KVedAudioFramesInSample; |
|
572 } |
|
573 |
|
574 streamParameters.iAudioTimeScale = KAMRAudioTimeScale; |
|
575 streamParameters.iVideoTimeScale = iImageVideoTimeScale; |
|
576 iOutputVideoTimeScale = iImageVideoTimeScale; |
|
577 iOutputAudioTimeScale = KAMRAudioTimeScale; |
|
578 |
|
579 } |
|
580 |
|
581 TAudFileProperties outProp = iMovie->Song()->OutputFileProperties(); |
|
582 |
|
583 if (iMovie->Song()->ClipCount(KAllTrackIndices) > 0) |
|
584 { |
|
585 if ( outProp.iAudioType == EAudAAC_MPEG4) |
|
586 { |
|
587 iOutputAudioTimeScale = outProp.iSamplingRate; |
|
588 |
|
589 } |
|
590 } |
|
591 |
|
592 streamParameters.iVideoFormat = (CComposer::TVideoFormat)iOutputVideoType; |
|
593 streamParameters.iStreamBitrate = iMovie->VideoStandardBitrate(); |
|
594 iComposer->ComposeHeaderL(streamParameters, iOutputVideoTimeScale, iOutputAudioTimeScale, iAudioFramesInSample); |
|
595 |
|
596 if( firstClipIsGenerated == 1 ) |
|
597 { |
|
598 // since first clip is generated, destroy parser |
|
599 iMonitor->PrepareComplete(); |
|
600 iMonitor->ProcessingStarted(iStartingProcessing); |
|
601 iStartingProcessing = EFalse; |
|
602 iState = EStateProcessing; |
|
603 |
|
604 // since first clip is generated, destroy parser |
|
605 if(iAllGeneratedClips == 1) |
|
606 { |
|
607 if(iParser) |
|
608 { |
|
609 delete iParser; |
|
610 iParser =0; |
|
611 } |
|
612 } |
|
613 } |
|
614 |
|
615 PRINT((_L("CMovieProcessorImpl::StartMovieL() end"))) |
|
616 |
|
617 } |
|
618 |
|
619 // ----------------------------------------------------------------------------- |
|
620 // CMovieProcessorImpl::SetOutputMediaTypesL |
|
621 // Set output audio/video types |
|
622 // (other items were commented in a header). |
|
623 // ----------------------------------------------------------------------------- |
|
624 // |
|
625 void CMovieProcessorImpl::SetOutputMediaTypesL() |
|
626 { |
|
627 |
|
628 CVedMovieImp* movie = (iMovie); |
|
629 |
|
630 if( movie->VideoType() == EVedVideoTypeH263Profile0Level10 ) |
|
631 iOutputVideoType = EVideoH263Profile0Level10; |
|
632 |
|
633 else if ( movie->VideoType() == EVedVideoTypeH263Profile0Level45 ) |
|
634 iOutputVideoType = EVideoH263Profile0Level45; |
|
635 |
|
636 else if ( movie->VideoType() == EVedVideoTypeMPEG4SimpleProfile ) |
|
637 iOutputVideoType = EVideoMPEG4; |
|
638 |
|
639 #ifdef VIDEOEDITORENGINE_AVC_EDITING |
|
640 else if ( movie->VideoType() == EVedVideoTypeAVCBaselineProfile ) |
|
641 iOutputVideoType = EVideoAVCProfileBaseline; |
|
642 #endif |
|
643 |
|
644 else |
|
645 User::Leave(KErrArgument); |
|
646 |
|
647 CAudSong* songPointer = movie->Song(); |
|
648 if ( songPointer->ClipCount(KAllTrackIndices) > 0 ) |
|
649 { |
|
650 if( movie->AudioType() == EVedAudioTypeAMR ) |
|
651 iOutputAudioType = EAudioAMR; |
|
652 |
|
653 else if ( movie->AudioType() == EVedAudioTypeAAC_LC ) |
|
654 iOutputAudioType = EAudioAAC; |
|
655 else |
|
656 User::Leave(KErrArgument); |
|
657 } |
|
658 else |
|
659 { |
|
660 // no audio |
|
661 iOutputAudioType = EAudioNone; |
|
662 } |
|
663 |
|
664 } |
|
665 |
|
666 // ----------------------------------------------------------------------------- |
|
667 // CMovieProcessorImpl::GetTranscodeFactorsL |
|
668 // Retrieve bitstream modes for input clips |
|
669 // (other items were commented in a header). |
|
670 // ----------------------------------------------------------------------------- |
|
671 // |
|
672 void CMovieProcessorImpl::GetTranscodeFactorsL() |
|
673 { |
|
674 |
|
675 if (!iNumberOfVideoClips) |
|
676 return; |
|
677 |
|
678 CVedMovieImp* movie = (iMovie); |
|
679 |
|
680 iThumbnailInProgress = ETrue; |
|
681 |
|
682 InitializeClipStructuresL(); |
|
683 |
|
684 for(TInt i = 0; i < movie->VideoClipCount(); i++) |
|
685 { |
|
686 CVedVideoClip* currentClip = movie->VideoClip(i); |
|
687 CVedVideoClipInfo* currentInfo = currentClip->Info(); |
|
688 |
|
689 if( currentInfo->Class() == EVedVideoClipClassFile ) |
|
690 { |
|
691 TVedTranscodeFactor factor; |
|
692 |
|
693 if ( currentInfo->FileHandle() ) |
|
694 { |
|
695 iClipFileName.Zero(); |
|
696 iClipFileHandle = currentInfo->FileHandle(); |
|
697 } |
|
698 else |
|
699 { |
|
700 iClipFileHandle = NULL; |
|
701 iClipFileName = currentInfo->FileName(); |
|
702 } |
|
703 |
|
704 InitializeClipL(); // opens the file & parses header |
|
705 // open demux & decoder |
|
706 |
|
707 // Calculate the number of frames in the output clip |
|
708 iFrameParametersSize += iParser->GetNumberOfVideoFrames(); |
|
709 |
|
710 iState = EStateProcessing; |
|
711 |
|
712 TInt error = iVideoProcessor->GetTranscodeFactorL(factor); |
|
713 |
|
714 if (error != KErrNone) |
|
715 User::Leave(error); |
|
716 |
|
717 if ( ((factor.iStreamType == EVedVideoBitstreamModeMPEG4Resyn) |
|
718 || (factor.iStreamType == EVedVideoBitstreamModeMPEG4DP) |
|
719 || (factor.iStreamType == EVedVideoBitstreamModeMPEG4DP_RVLC) |
|
720 || (factor.iStreamType == EVedVideoBitstreamModeMPEG4Resyn_DP) |
|
721 || (factor.iStreamType == EVedVideoBitstreamModeMPEG4Resyn_DP_RVLC)) |
|
722 && ( currentClip->Info()->Resolution() == movie->Resolution() ) |
|
723 && ( iOutputVideoType != EVideoAVCProfileBaseline ) ) |
|
724 { |
|
725 // we do compressed domain transcoding for this clip, and it has |
|
726 // other mpeg4 modes than the regular; we need to ensure the VOS/VOL header |
|
727 // has the resync marked flag enabled |
|
728 iMpeg4ModeTranscoded = ETrue; |
|
729 } |
|
730 |
|
731 currentInfo->SetTranscodeFactor(factor); |
|
732 |
|
733 DoCloseVideoL(); // close all |
|
734 |
|
735 if (iAvcEdit) |
|
736 { |
|
737 delete iAvcEdit; |
|
738 iAvcEdit = 0; |
|
739 } |
|
740 } |
|
741 else |
|
742 { |
|
743 // Calculate the number of frames in the output clip |
|
744 iFrameParametersSize += (TInt) currentInfo->VideoFrameCount(); |
|
745 } |
|
746 } |
|
747 |
|
748 iThumbnailInProgress = EFalse; |
|
749 iState = EStateIdle; |
|
750 } |
|
751 |
|
752 |
|
753 // ----------------------------------------------------------------------------- |
|
754 // CMovieProcessorImpl::SetupTranscodingL |
|
755 // Set video transcoding parameters |
|
756 // (other items were commented in a header). |
|
757 // ----------------------------------------------------------------------------- |
|
758 // |
|
759 void CMovieProcessorImpl::SetupTranscodingL() |
|
760 { |
|
761 |
|
762 if (!iNumberOfVideoClips) |
|
763 return; |
|
764 |
|
765 CVedMovieImp* tmovie = (iMovie); |
|
766 TBool transitionExists = EFalse;// Assume there are no middle transitions in any clip |
|
767 TBool cutExists = EFalse; // Assuming there is no cut in any clip |
|
768 TBool blackFrames = EFalse; // Assume there is no need to encode black frames in the end |
|
769 TBool firstClipIsGen = EFalse; |
|
770 TBool firstClipIsFullTranscoded = EFalse; |
|
771 TBool clipFullTranscodingExists = EFalse; |
|
772 |
|
773 // Vos issues if first clip uses encoder |
|
774 TBool atleastOneH263 = EFalse; |
|
775 TBool atleastOneMPEG4 = EFalse; |
|
776 // TBool atleastOneAVC = EFalse; |
|
777 TBool differentModesExist = EFalse; |
|
778 |
|
779 // initially assume no mpeg4 files so no streammode |
|
780 TVedVideoBitstreamMode streamMode = EVedVideoBitstreamModeUnknown; |
|
781 TBool atleastOneGenerated = EFalse; // assuming that there are no generated clips |
|
782 TBool allGeneratedClips = ETrue; // asssuming all are generated |
|
783 iModeTranslationRequired = EFalse; // no need for translation in all generated |
|
784 |
|
785 TSize outputVideoResolution = tmovie->Resolution(); // since movie resolution is minimum resolution |
|
786 |
|
787 for(TInt i = 0; i < tmovie->VideoClipCount(); i++) |
|
788 { |
|
789 CVedVideoClip* currentClip = tmovie->VideoClip(i); |
|
790 CVedVideoClipInfo* currentInfo = currentClip->Info(); |
|
791 if( currentInfo->Class() == EVedVideoClipClassFile ) |
|
792 { |
|
793 allGeneratedClips = EFalse; // there is a file based clip |
|
794 |
|
795 if( (currentInfo->VideoType() == EVedVideoTypeH263Profile0Level10) || |
|
796 (currentInfo->VideoType() == EVedVideoTypeH263Profile0Level45) ) |
|
797 { |
|
798 // if there is even one H263 output is H263 |
|
799 atleastOneH263 = ETrue; |
|
800 } |
|
801 else if(currentInfo->VideoType() == EVedVideoTypeAVCBaselineProfile) |
|
802 { |
|
803 // atleastOneAVC = ETrue; |
|
804 } |
|
805 else |
|
806 { |
|
807 if(currentInfo->VideoType() == EVedVideoTypeMPEG4SimpleProfile) // if there is even one H263 output is H263 |
|
808 { |
|
809 atleastOneMPEG4 = ETrue; |
|
810 if(streamMode == EVedVideoBitstreamModeUnknown) |
|
811 { |
|
812 // since previously no mpeg4 was found to set streammode |
|
813 streamMode = currentInfo->TranscodeFactor().iStreamType; |
|
814 } |
|
815 else |
|
816 { |
|
817 if(streamMode != currentInfo->TranscodeFactor().iStreamType) // different modes in Mpeg4 |
|
818 differentModesExist = ETrue; |
|
819 } |
|
820 } |
|
821 else |
|
822 { |
|
823 //Error - improper or unsupported type file |
|
824 User::Leave(KErrNotSupported); |
|
825 } |
|
826 } |
|
827 |
|
828 // Here check if any clip is cut and also if first clip is cut |
|
829 TTimeIntervalMicroSeconds cutinTime = TTimeIntervalMicroSeconds(currentClip->CutInTime()); |
|
830 if(cutinTime != TTimeIntervalMicroSeconds(0)) |
|
831 { |
|
832 // cut does exist so encoder will be used in at least one clip |
|
833 if(i==0) // which means cut exists in first clip itself |
|
834 { |
|
835 iFirstClipIsCut = ETrue; |
|
836 } |
|
837 cutExists = ETrue; |
|
838 } |
|
839 |
|
840 // check if the clip will be full transcoded also as then we can |
|
841 // decide whether to change VOS bit and to set ModeTranslation as all would be encoded again |
|
842 TSize currClipRes = currentInfo->Resolution(); |
|
843 if( ( (outputVideoResolution.iWidth != currClipRes.iWidth) && |
|
844 (outputVideoResolution.iHeight != currClipRes.iHeight) ) || |
|
845 ( iOutputVideoType == EVideoMPEG4 && currentInfo->VideoType() == EVedVideoTypeAVCBaselineProfile ) ) |
|
846 { |
|
847 if(i == 0) |
|
848 { |
|
849 firstClipIsFullTranscoded = ETrue; //for VOS bit change |
|
850 } |
|
851 clipFullTranscodingExists = ETrue; |
|
852 } |
|
853 } |
|
854 else |
|
855 { |
|
856 atleastOneGenerated = ETrue; |
|
857 if(i == 0) // then first clip is generated |
|
858 { |
|
859 firstClipIsGen = ETrue; |
|
860 } |
|
861 } |
|
862 |
|
863 if( i != tmovie->VideoClipCount()-1 ) |
|
864 { |
|
865 if(tmovie->MiddleTransitionEffect(i) != EVedMiddleTransitionEffectNone ) |
|
866 { |
|
867 // this is required to check if any clips have |
|
868 // middle transitions so mode translation will be required |
|
869 transitionExists = ETrue; // even if all clips are of same mode |
|
870 } |
|
871 } |
|
872 } |
|
873 |
|
874 if( (TVedStartTransitionEffect)tmovie->StartTransitionEffect() != EVedStartTransitionEffectNone ) |
|
875 { |
|
876 // even if all clips are of same mode but if it is different than what encoder uses transcoding is needed |
|
877 transitionExists = ETrue; |
|
878 } |
|
879 |
|
880 if( (TVedEndTransitionEffect)tmovie->EndTransitionEffect() != EVedEndTransitionEffectNone) |
|
881 { |
|
882 // even if all clips are of same mode but if it is different than what encoder uses transcoding is needed |
|
883 transitionExists = ETrue; |
|
884 } |
|
885 if ( tmovie->Duration().Int64()/1000 > (tmovie->VideoClip(iNumberOfVideoClips-1)->EndTime().Int64()/1000) ) |
|
886 { |
|
887 // movie is longer than video track => black frames are encoded in the end => even if all clips are of same mode but if it is different than what encoder uses transcoding is needed |
|
888 blackFrames = ETrue; |
|
889 } |
|
890 |
|
891 if(iOutputVideoType == EVideoMPEG4) // if different modes dont exist then if output is Mpeg4 |
|
892 { |
|
893 PRINT((_L("CMovieProcessorImpl::SetupTranscodingL(), output type = MPEG-4"))) |
|
894 |
|
895 if(differentModesExist) |
|
896 { |
|
897 PRINT((_L("CMovieProcessorImpl::SetupTranscodingL(), different modes exist"))) |
|
898 |
|
899 // if there are differnet mode Mpeg4's then mode translation is required |
|
900 iModeTranslationRequired = ETrue; // regardless of there being generated clips |
|
901 } |
|
902 else |
|
903 { |
|
904 PRINT((_L("CMovieProcessorImpl::SetupTranscodingL(), different modes don't exist"))) |
|
905 |
|
906 if(atleastOneGenerated) // if there are any generated clips |
|
907 { |
|
908 PRINT((_L("CMovieProcessorImpl::SetupTranscodingL(), at least one generated"))) |
|
909 |
|
910 if(atleastOneMPEG4) // if there is one Mpeg4 clip atleast |
|
911 { |
|
912 PRINT((_L("CMovieProcessorImpl::SetupTranscodingL(), at least one MPEG-4"))) |
|
913 |
|
914 if( (streamMode == EVedVideoBitstreamModeMPEG4Regular) ) //generated clips mode will be regular so u need to convert others |
|
915 { |
|
916 PRINT((_L("CMovieProcessorImpl::SetupTranscodingL(), streamMode is regular"))) |
|
917 |
|
918 // if all are regular mode no need to change mode |
|
919 iModeTranslationRequired = EFalse; |
|
920 } |
|
921 else |
|
922 { |
|
923 PRINT((_L("CMovieProcessorImpl::SetupTranscodingL(), streamMode != regular"))) |
|
924 |
|
925 // to regular (with resync) |
|
926 iModeTranslationRequired = ETrue; |
|
927 } |
|
928 } |
|
929 else |
|
930 { |
|
931 if(atleastOneH263) |
|
932 { |
|
933 PRINT((_L("CMovieProcessorImpl::SetupTranscodingL(), at least one H.263"))) |
|
934 iModeTranslationRequired = ETrue; |
|
935 } |
|
936 else |
|
937 { |
|
938 PRINT((_L("CMovieProcessorImpl::SetupTranscodingL(), all are generated"))) |
|
939 // all are generated no h263 or mpeg4 |
|
940 iModeTranslationRequired = EFalse; |
|
941 } |
|
942 } |
|
943 } |
|
944 else |
|
945 { |
|
946 PRINT((_L("CMovieProcessorImpl::SetupTranscodingL(), no generated clips"))) |
|
947 |
|
948 if(atleastOneH263) |
|
949 { |
|
950 PRINT((_L("CMovieProcessorImpl::SetupTranscodingL(), at least one H.263"))) |
|
951 iModeTranslationRequired = ETrue; |
|
952 } |
|
953 else |
|
954 { |
|
955 PRINT((_L("CMovieProcessorImpl::SetupTranscodingL(), no H.263"))) |
|
956 // still open if we have MPEG-4 nonregular + encoding |
|
957 iModeTranslationRequired = EFalse; |
|
958 } |
|
959 } |
|
960 } |
|
961 } |
|
962 else |
|
963 { |
|
964 // output is H263 or AVC so mode translation not required |
|
965 iModeTranslationRequired = EFalse; |
|
966 } |
|
967 |
|
968 // make decision of mode translation based on whether there |
|
969 // was a cut or transition or resolution transcoding in any clip |
|
970 |
|
971 if(iOutputVideoType == EVideoMPEG4) // if output is Mpeg4 |
|
972 { |
|
973 if ((!allGeneratedClips) && (streamMode != EVedVideoBitstreamModeMPEG4Regular)) // in case of differentModesExist, iModeTranslationRequired is already ETrue |
|
974 { |
|
975 // If we need to encode smth but not all (if all generated => no mix), encoding results in regular stream. |
|
976 // However, if input has nonregular, we need to translate the mode |
|
977 if(transitionExists || clipFullTranscodingExists || cutExists || blackFrames) |
|
978 { |
|
979 iModeTranslationRequired = ETrue; |
|
980 } |
|
981 } |
|
982 // make Decision for changing the Vos of the output movie based on whether the first frame would be encoded |
|
983 if(firstClipIsGen || firstClipIsFullTranscoded || ((TVedStartTransitionEffect)tmovie->StartTransitionEffect() != EVedStartTransitionEffectNone) || iFirstClipIsCut ) |
|
984 { |
|
985 iFirstClipUsesEncoder = ETrue; // this indicates that you may need to change Vos bit but final decision is |
|
986 } // done based on whether it was due to cut on the fly |
|
987 } |
|
988 |
|
989 } |
|
990 |
|
991 |
|
992 |
|
993 |
|
994 // ----------------------------------------------------------------------------- |
|
995 // CMovieProcessorImpl::CreateImage3GPFilesL |
|
996 // creates the necessary 3gp files from the given frames |
|
997 // (other items were commented in a header). |
|
998 // ----------------------------------------------------------------------------- |
|
999 // |
|
1000 TInt CMovieProcessorImpl::CreateImage3GPFilesL(TVideoOperation aCreateMode) |
|
1001 { |
|
1002 |
|
1003 if (iProcessingCancelled) |
|
1004 return KErrNone; |
|
1005 |
|
1006 if(aCreateMode == EVideoEncodeFrame) |
|
1007 { |
|
1008 // encode frame |
|
1009 TTimeIntervalMicroSeconds inMicroSeconds = TTimeIntervalMicroSeconds(iVideoClip->Info()->Generator()->VideoFrameStartTime(iTotalImagesProcessed)); |
|
1010 |
|
1011 if (!IsActive()) |
|
1012 { |
|
1013 SetActive(); |
|
1014 iStatus = KRequestPending; |
|
1015 } |
|
1016 iVideoEncoder->EncodeFrameL(iReadImageDes, iStatus, inMicroSeconds); |
|
1017 |
|
1018 // EncodeGiven |
|
1019 iImageEncodedFlag = 1; |
|
1020 iEncodeInProgress = 1; |
|
1021 |
|
1022 return KErrNone; |
|
1023 } |
|
1024 else |
|
1025 { |
|
1026 // a frame has been encoded, write it to output 3gp buffer |
|
1027 |
|
1028 // composer composing the movie |
|
1029 if(iStatus == KErrNone) |
|
1030 { |
|
1031 TBool isKeyFrame = 0; |
|
1032 TPtrC8 buf(iVideoEncoder->GetBufferL(isKeyFrame)); |
|
1033 |
|
1034 TBool modeChanged = EFalse; |
|
1035 |
|
1036 if ( iTotalImagesProcessed == 1 && iVideoType == EVideoMPEG4 ) |
|
1037 { |
|
1038 modeChanged = ETrue; |
|
1039 } |
|
1040 |
|
1041 /* composing is based on isIntra only */ |
|
1042 TBool firstFrame = EFalse; |
|
1043 if(iTotalImagesProcessed == 1) |
|
1044 { |
|
1045 firstFrame = ETrue; |
|
1046 |
|
1047 // VOS header size is parsed in composer for MPEG-4 |
|
1048 iMP4SpecificSize = 0; |
|
1049 } |
|
1050 |
|
1051 TInt64 durntTest = iVideoClip->Info()->Generator()->VideoFrameDuration(iTotalImagesProcessed-1).Int64(); |
|
1052 TInt64 durntsix = TInt64(((I64REAL(durntTest)/(TReal)1000)*(TReal) iImageVideoTimeScale) +0.5); |
|
1053 TInt durnt = I64INT(durntsix); |
|
1054 /* converting to ticks */ |
|
1055 durnt = (durnt)/1000; |
|
1056 |
|
1057 iGeneratedProcessed += durntTest/1000; |
|
1058 IncProgressBar(); /* indicate to gui about progress */ |
|
1059 |
|
1060 iImageComposer->WriteFrames((TDesC8&)buf, buf.Size(), durnt, isKeyFrame , |
|
1061 1/*numberOfFrames*/, CMP4Composer::EFrameTypeVideo, iMP4SpecificSize,modeChanged, |
|
1062 firstFrame,iVideoClip->Info()->TranscodeFactor().iStreamType, ETrue); |
|
1063 |
|
1064 iVideoEncoder->ReturnBuffer(); |
|
1065 } |
|
1066 /* end composing */ |
|
1067 |
|
1068 return KErrNone; |
|
1069 } |
|
1070 } |
|
1071 |
|
1072 // ----------------------------------------------------------------------------- |
|
1073 // CMovieProcessorImpl::ProcessImageSetsL |
|
1074 // Prepares for creating the necessary 3gp files from the given frames |
|
1075 // (other items were commented in a header). |
|
1076 // ----------------------------------------------------------------------------- |
|
1077 // |
|
1078 TInt CMovieProcessorImpl::ProcessImageSetsL(TVideoOperation aCreateMode) |
|
1079 { |
|
1080 |
|
1081 if (iProcessingCancelled) |
|
1082 return KErrNone; |
|
1083 |
|
1084 if(iTotalImagesProcessed == 0 && iImageEncodeProcFinished == 0) |
|
1085 { |
|
1086 TFileName outputFileName = TPtrC(KTempFilePath); |
|
1087 iCurrentMovieName = outputFileName; |
|
1088 iCurrentMovieName.Append( _L( "Im" ) ); |
|
1089 iCurrentMovieName.Append( _L( "_nokia_vpi.tmp" ) ); |
|
1090 |
|
1091 #ifdef _DEBUG |
|
1092 if (iOutputVideoType == EVideoAVCProfileBaseline) |
|
1093 VPASSERT(iImageAvcEdit); |
|
1094 #endif |
|
1095 |
|
1096 iImageComposer = CMP4Composer::NewL(iCurrentMovieName, (CParser::TVideoFormat)iOutputVideoType, |
|
1097 CParser::EAudioFormatNone, |
|
1098 iImageAvcEdit); |
|
1099 |
|
1100 CComposer::TStreamParameters innerStreamParameters; |
|
1101 /* there will be no video clips initially, so initialize to default parameters of movie */ |
|
1102 TSize innerRes =(TSize)iVideoClip->Info()->Resolution(); /* resolution from generator */ |
|
1103 innerStreamParameters.iVideoWidth = innerRes.iWidth; /* iVideoParameters.iWidth */ |
|
1104 innerStreamParameters.iVideoHeight = innerRes.iHeight; /* iVideoParameters.iHeight */; |
|
1105 /* width and height are initialised to the proper values from the clip */ |
|
1106 innerStreamParameters.iStreamBitrate = 25000 /* iStreamBitrate */; |
|
1107 |
|
1108 /* set the duration of the video clip */ |
|
1109 TTimeIntervalMicroSeconds iTempVideoLength= TTimeIntervalMicroSeconds(iVideoClip->Info()->Duration()); |
|
1110 TInt64 iTimeInMicro = (iTempVideoLength.Int64()/1000); |
|
1111 innerStreamParameters.iVideoLength= I64INT(iTimeInMicro); /* set the video length properly */ |
|
1112 innerStreamParameters.iStreamLength= I64INT(iTimeInMicro); |
|
1113 innerStreamParameters.iAudioLength = 0; |
|
1114 innerStreamParameters.iAudioFormat = (CComposer::TAudioFormat)0; |
|
1115 CVedMovieImp* tempm = (iMovie); |
|
1116 |
|
1117 if(iOutputVideoType == EVideoMPEG4) |
|
1118 { |
|
1119 /* initialize to default constants for generated clips in case of MPEG-4 */ |
|
1120 innerStreamParameters.iVideoFormat = (CComposer::TVideoFormat) CComposer::EVideoFormatMPEG4; |
|
1121 TVedTranscodeFactor tempFact; |
|
1122 tempFact.iStreamType = EVedVideoBitstreamModeMPEG4Regular; |
|
1123 tempFact.iTRes = 29; |
|
1124 CVedVideoClip* currentClip = tempm->VideoClip(iVideoClipNumber); |
|
1125 CVedVideoClipInfo* currentInfo = currentClip->Info(); |
|
1126 currentInfo->SetTranscodeFactor(tempFact); /* set to default values, as initialized above */ |
|
1127 } |
|
1128 else if ( (iOutputVideoType == EVideoH263Profile0Level10) || |
|
1129 (iOutputVideoType == EVideoH263Profile0Level45) ) |
|
1130 { |
|
1131 |
|
1132 innerStreamParameters.iVideoFormat = (CComposer::TVideoFormat)iOutputVideoType; |
|
1133 /* initialize to default constants for generated clips in case of H.263 */ |
|
1134 TVedTranscodeFactor tempFact; |
|
1135 tempFact.iStreamType = EVedVideoBitstreamModeH263; |
|
1136 tempFact.iTRes = 0; |
|
1137 CVedVideoClip* currentClip = tempm->VideoClip(iVideoClipNumber); |
|
1138 CVedVideoClipInfo* currentInfo = currentClip->Info(); |
|
1139 currentInfo->SetTranscodeFactor(tempFact); /* set to default values, as initialized above */ |
|
1140 } |
|
1141 |
|
1142 #ifdef VIDEOEDITORENGINE_AVC_EDITING |
|
1143 else if (iOutputVideoType == EVideoAVCProfileBaseline) |
|
1144 { |
|
1145 /* initialize to default constants for generated clips in case of AVC */ |
|
1146 innerStreamParameters.iVideoFormat = (CComposer::TVideoFormat) CComposer::EVideoFormatAVCProfileBaseline; |
|
1147 TVedTranscodeFactor tempFact; |
|
1148 tempFact.iStreamType = EVedVideoBitstreamModeAVC; |
|
1149 tempFact.iTRes = 30; |
|
1150 CVedVideoClip* currentClip = tempm->VideoClip(iVideoClipNumber); |
|
1151 CVedVideoClipInfo* currentInfo = currentClip->Info(); |
|
1152 currentInfo->SetTranscodeFactor(tempFact); /* set to default values, as initialized above */ |
|
1153 } |
|
1154 #endif |
|
1155 else |
|
1156 User::Leave(KErrNotSupported); |
|
1157 |
|
1158 |
|
1159 if(iAllGeneratedClips == 1) |
|
1160 { |
|
1161 innerStreamParameters.iVideoWidth = iVideoParameters.iWidth; |
|
1162 innerStreamParameters.iVideoHeight = iVideoParameters.iHeight; |
|
1163 } |
|
1164 innerStreamParameters.iCanSeek = ETrue; |
|
1165 innerStreamParameters.iHaveVideo = ETrue; |
|
1166 innerStreamParameters.iHaveAudio =EFalse; |
|
1167 innerStreamParameters.iAudioFramesInSample =0; |
|
1168 innerStreamParameters.iAudioTimeScale =KAMRAudioTimeScale; /* 8000 */ |
|
1169 innerStreamParameters.iVideoTimeScale =iImageVideoTimeScale; |
|
1170 iImageComposer->ComposeHeaderL(innerStreamParameters,iImageVideoTimeScale /*iOutputVideoTimeScale*/, |
|
1171 iOutputAudioTimeScale, iAudioFramesInSample); |
|
1172 |
|
1173 if(!iVideoEncoder) |
|
1174 { |
|
1175 /* It should never come here as iVideoEncoder is created before in hand */ |
|
1176 PRINT(_L("ERROR I VIDEOENCODER DOES NOT EXIST")); |
|
1177 return 1; /* Indicating error */ |
|
1178 } |
|
1179 else |
|
1180 { |
|
1181 //iVideoEncoder->Start(); /* make sure it is started only once */ |
|
1182 } |
|
1183 |
|
1184 TInt erInitialize = CreateImage3GPFilesL(aCreateMode); |
|
1185 iTotalImagesProcessed++; |
|
1186 return erInitialize; |
|
1187 } |
|
1188 else if (aCreateMode == EVideoEncodeFrame && iImageEncodeProcFinished == 0) |
|
1189 { |
|
1190 /* for encoding, you will read from file, so increment the number of images processed */ |
|
1191 TInt er = CreateImage3GPFilesL(aCreateMode); |
|
1192 /* before incrementing the number of images processed, check if it has actually been encoded */ |
|
1193 |
|
1194 iTotalImagesProcessed++; |
|
1195 |
|
1196 return er; |
|
1197 } |
|
1198 else if(aCreateMode == EVideoWriteFrameToFile && iImageEncodeProcFinished == 0) |
|
1199 { |
|
1200 // a frame has been encoded, write it to output 3gp buffer |
|
1201 TInt er2 = CreateImage3GPFilesL(aCreateMode); |
|
1202 return er2; |
|
1203 } |
|
1204 else if(aCreateMode == EVideoWriteFrameToFile && iImageEncodeProcFinished == 1) |
|
1205 { |
|
1206 return KErrGeneral; /* This should not happen */ |
|
1207 } |
|
1208 else |
|
1209 { |
|
1210 /* This should not happen */ |
|
1211 return KErrGeneral; |
|
1212 } |
|
1213 } |
|
1214 |
|
1215 |
|
1216 // ----------------------------------------------------------------------------- |
|
1217 // CMovieProcessorImpl::EncodeImageFrameL |
|
1218 // Encodes raw frames for 3gp generated clips |
|
1219 // The frame is in buffer pointed to by iReadImageDes. |
|
1220 // (other items were commented in a header). |
|
1221 // ----------------------------------------------------------------------------- |
|
1222 // |
|
1223 TInt CMovieProcessorImpl::EncodeImageFrameL() |
|
1224 { |
|
1225 |
|
1226 if (iProcessingCancelled) |
|
1227 return KErrNone; |
|
1228 |
|
1229 iVideoEncoder->Stop(); |
|
1230 |
|
1231 TTimeIntervalMicroSeconds inMSeconds = TTimeIntervalMicroSeconds(iVideoClip->Info()->Generator()->VideoFrameStartTime(iTotalImagesProcessed)); |
|
1232 |
|
1233 if (!IsActive()) |
|
1234 { |
|
1235 SetActive(); |
|
1236 iStatus = KRequestPending; |
|
1237 } |
|
1238 iVideoEncoder->EncodeFrameL(iReadImageDes, iStatus,inMSeconds); |
|
1239 |
|
1240 iTotalImagesProcessed++; /* Now we have encoded, and previously we had not, so increment now */ |
|
1241 iImageEncodedFlag = 1; |
|
1242 iEncodeInProgress = 1; /* set to indicate encoding in progress */ |
|
1243 |
|
1244 return KErrNone; |
|
1245 } |
|
1246 |
|
1247 // ----------------------------------------------------------------------------- |
|
1248 // CMovieProcessorImpl::ProcessImageSetsL |
|
1249 // Decides whether to encode or request for a frame from generator, |
|
1250 // and in what mode to call the CreateImageFiles function |
|
1251 // The encoding is done calling the GetFrame, so it goes through the frame generator |
|
1252 // (other items were commented in a header). |
|
1253 // ----------------------------------------------------------------------------- |
|
1254 // |
|
1255 void CMovieProcessorImpl::DoImageSetProcessL() |
|
1256 { |
|
1257 |
|
1258 if (iProcessingCancelled) |
|
1259 return; |
|
1260 |
|
1261 // we come here from RunL |
|
1262 |
|
1263 if(iImageEncodedFlag == 0 && iImageEncodeProcFinished == 0) |
|
1264 { |
|
1265 // Starting to process an image => issue GetFrame() -request |
|
1266 |
|
1267 if(iFirstTimeProcessing == 0) |
|
1268 { |
|
1269 // this will be known from a bit or flag from the video clip |
|
1270 iFirstTimeProcessing = 1; // this indicates that this is the first time we are getting info about this video clip |
|
1271 iTotalImagesProcessed = 0; |
|
1272 iNumOfImages = (TInt)iVideoClip->Info()->VideoFrameCount(); |
|
1273 } |
|
1274 TSize tempres = iMovie->Resolution(); |
|
1275 if(iTotalImagesProcessed < iNumOfImages) |
|
1276 { |
|
1277 iGetFrameInProgress = 1; // indicates that the GetFrame was called and will be in progress |
|
1278 iVideoClip->Info()->Generator()->GetFrameL(*this, /*0*/ iTotalImagesProcessed,&tempres, EColor4K, EFalse, 1); |
|
1279 } |
|
1280 else |
|
1281 { |
|
1282 // It should never come here though |
|
1283 iImageClipCreated = 1; |
|
1284 } |
|
1285 } |
|
1286 else if(iImageEncodedFlag == 1 && iImageEncodeProcFinished == 0) |
|
1287 { |
|
1288 // image has been encoded |
|
1289 |
|
1290 // tell the function to compose and return |
|
1291 iEncodeInProgress = 0; |
|
1292 iGetFrameInProgress = 0; // finished getting the frame, so if there's a cancel, no need to delete bitmap etc |
|
1293 ProcessImageSetsL(EVideoWriteFrameToFile); // composing of VedVideoClipgenerator, though its not used inside |
|
1294 if(iNumOfImages == iTotalImagesProcessed) /// if all images are over |
|
1295 { |
|
1296 iImageClipCreated = 1; |
|
1297 iImageEncodeProcFinished = 1; // finished creating the image 3GP file, so go ahead */ |
|
1298 iImageComposer->Close(); |
|
1299 |
|
1300 delete iImageComposer; |
|
1301 iImageComposer = 0; |
|
1302 |
|
1303 if(iParser) |
|
1304 { |
|
1305 delete iParser; |
|
1306 iParser = 0; |
|
1307 } |
|
1308 |
|
1309 /* set constant file name used as buffer */ |
|
1310 iClipFileName = TPtrC(KTempFilePath); |
|
1311 iClipFileName.Append( _L("Im_nokia_vpi.tmp") ); |
|
1312 |
|
1313 if (iImageAvcEdit) |
|
1314 { |
|
1315 delete iImageAvcEdit; |
|
1316 iImageAvcEdit = 0; |
|
1317 } |
|
1318 |
|
1319 if (iVideoEncoder) |
|
1320 { |
|
1321 iVideoEncoder->Stop(); |
|
1322 delete iVideoEncoder; |
|
1323 iVideoEncoder = 0; |
|
1324 } |
|
1325 |
|
1326 InitializeGeneratedClipL(); |
|
1327 /* reset the number of images for the next image set */ |
|
1328 iImageEncodedFlag = 0; |
|
1329 |
|
1330 if(!IsActive()) |
|
1331 { |
|
1332 SetActive(); // wait till the video encoder finishes initialising |
|
1333 iStatus = KRequestPending; |
|
1334 } |
|
1335 User::Free(iYuvImageBuf); |
|
1336 iYuvImageBuf = 0; |
|
1337 } |
|
1338 else |
|
1339 { |
|
1340 // request for a new frame |
|
1341 |
|
1342 User::Free(iYuvImageBuf); |
|
1343 iYuvImageBuf = 0; |
|
1344 TSize tempres = iMovie->Resolution(); |
|
1345 iGetFrameInProgress = 1; |
|
1346 iVideoClip->Info()->Generator()->GetFrameL(*this,iTotalImagesProcessed,&tempres,EColor4K,EFalse,1); |
|
1347 } |
|
1348 } |
|
1349 } |
|
1350 |
|
1351 |
|
1352 |
|
1353 |
|
1354 // ----------------------------------------------------------------------------- |
|
1355 // CMovieProcessorImpl::Reset |
|
1356 // Resets the processor for processing a new movie |
|
1357 // (other items were commented in a header). |
|
1358 // ----------------------------------------------------------------------------- |
|
1359 // |
|
1360 void CMovieProcessorImpl::ResetL() |
|
1361 { |
|
1362 |
|
1363 // delete clip structures |
|
1364 DeleteClipStructures(); |
|
1365 |
|
1366 // delete video processing modules |
|
1367 iEncoderInitPending = EFalse; |
|
1368 iState = EStatePreparing; |
|
1369 DoCloseVideoL(); |
|
1370 iState = EStateIdle; |
|
1371 |
|
1372 VPASSERT(!iEncoderInitPending); |
|
1373 VPASSERT(!iVideoEncoder); |
|
1374 |
|
1375 if (iComposer) |
|
1376 { |
|
1377 delete iComposer; |
|
1378 iComposer = 0; |
|
1379 } |
|
1380 |
|
1381 if(iImageComposer) |
|
1382 { |
|
1383 delete iImageComposer; |
|
1384 iImageComposer=0; |
|
1385 } |
|
1386 |
|
1387 if (iImageAvcEdit) |
|
1388 { |
|
1389 delete iImageAvcEdit; |
|
1390 iImageAvcEdit = 0; |
|
1391 } |
|
1392 |
|
1393 if (iYuvImageBuf) |
|
1394 { |
|
1395 User::Free(iYuvImageBuf); |
|
1396 iYuvImageBuf=0; |
|
1397 } |
|
1398 |
|
1399 // for transition effect |
|
1400 if ( iFsConnected ) |
|
1401 { |
|
1402 CloseTransitionInfoL(); |
|
1403 iFs.Close(); |
|
1404 iFsConnected = EFalse; |
|
1405 } |
|
1406 |
|
1407 iDataFormat = EDataAutoDetect; |
|
1408 iStreamBitrate = 0; |
|
1409 iNumDemuxChannels = 0; |
|
1410 iOutputNumberOfFrames = 0; |
|
1411 iVideoType = EVideoH263Profile0Level10; |
|
1412 iAudioType = EAudioAMR; |
|
1413 iFirstFrameOfClip = EFalse; |
|
1414 iFirstFrameFlagSet = EFalse; |
|
1415 |
|
1416 iFirstClipUsesEncoder = EFalse; |
|
1417 iMpeg4ModeTranscoded = EFalse; |
|
1418 |
|
1419 iOutputVideoTimeScale = KVideoTimeScale; |
|
1420 iOutputAudioTimeScale = KAMRAudioTimeScale; |
|
1421 iOutputVideoType = EVideoNone; |
|
1422 iOutputAudioType = EAudioAMR; |
|
1423 |
|
1424 iProcessingCancelled = EFalse; |
|
1425 |
|
1426 iStartTransitionEffect = EVedStartTransitionEffectNone; |
|
1427 iMiddleTransitionEffect = EVedMiddleTransitionEffectNone; |
|
1428 iPreviousMiddleTransitionEffect = EVedMiddleTransitionEffectNone; |
|
1429 iEndTransitionEffect = EVedEndTransitionEffectNone; |
|
1430 iWriting1stColorTransitionFrame = EFalse; |
|
1431 i1stColorTransitionFrameTS = 0; |
|
1432 |
|
1433 iApplySlowMotion = ETrue; |
|
1434 iCurrentVideoTimeInTicks = 0.0; |
|
1435 iInitialClipStartTimeStamp = 0; |
|
1436 iStartingProcessing = EFalse; |
|
1437 iFramesProcessed = 0; |
|
1438 iProgress = 0; |
|
1439 iGeneratedProcessed = 0; |
|
1440 iAudioProcessingCompleted = EFalse; |
|
1441 iEncodingBlackFrames = EFalse; |
|
1442 |
|
1443 iTotalDurationInSample = 0; |
|
1444 iAudioProcessingCancelled = EFalse; |
|
1445 iWaitSchedulerStarted = EFalse; |
|
1446 |
|
1447 iCurrentMovieName.Zero(); |
|
1448 iAudioClipWritten = 0; |
|
1449 iVideoClipWritten = 0; |
|
1450 iDiskFull = EFalse; |
|
1451 iAudioFrameNumber = 0; |
|
1452 iVideoFrameNumber = 0; |
|
1453 iFrameBuffered = EFalse; |
|
1454 iVideoIntraFrameNumber = 0; |
|
1455 iVideoClipNumber=0; |
|
1456 iStartFrameIndex = 0; |
|
1457 iVideoClip=0; |
|
1458 iMovie=0; |
|
1459 iSpeed = KMaxVideoSpeed; |
|
1460 iColorEffect = EVedColorEffectNone; |
|
1461 iNumberOfVideoClips=0; |
|
1462 iEncoderBuffer = 0; |
|
1463 iEncodePending = 0; |
|
1464 iVideoClipDuration = 0; |
|
1465 iLeftOverDuration = 0; |
|
1466 iTimeStamp = 0; |
|
1467 |
|
1468 iThumbnailInProgress=EFalse; |
|
1469 iTotalMovieDuration = 0; |
|
1470 iFramesProcessed=0; |
|
1471 iStartCutTime = TTimeIntervalMicroSeconds(0); |
|
1472 iEndCutTime = TTimeIntervalMicroSeconds(0); |
|
1473 |
|
1474 iTr.iTrPrevNew = -1; |
|
1475 iTr.iTrPrevOrig = -1; |
|
1476 |
|
1477 iAudioFramesInSample = KVedAudioFramesInSample; |
|
1478 iAudioFramesInBuffer = 0; |
|
1479 iOutAudioBuffer=0; |
|
1480 iNumberOfAudioClipsCreated = 0; |
|
1481 iCurrentAudioTimeInMs = 0; |
|
1482 iTotalAudioTimeWrittenMs = 0; |
|
1483 iNumberOfAudioClips=0; |
|
1484 iAudioClipNumber=0; |
|
1485 iTimeStampListScaled = EFalse; |
|
1486 |
|
1487 iCurrentVideoSize = 0; |
|
1488 iCurrentAudioSize = 0; |
|
1489 iMovieSizeLimitExceeded = EFalse; |
|
1490 |
|
1491 iAllVideoProcessed = EFalse; |
|
1492 |
|
1493 // We are now properly initialized |
|
1494 iState = EStateIdle; |
|
1495 } |
|
1496 |
|
1497 |
|
1498 // ----------------------------------------------------------------------------- |
|
1499 // CMovieProcessorImpl::CancelProcessingL |
|
1500 // Stops processing a movie |
|
1501 // (other items were commented in a header). |
|
1502 // ----------------------------------------------------------------------------- |
|
1503 // |
|
1504 void CMovieProcessorImpl::CancelProcessingL() |
|
1505 { |
|
1506 |
|
1507 PRINT((_L("CancelProcessingL begin, iEncoderInitPending %d, iEncodePending %d, iEncodeInProgress %d"), |
|
1508 iEncoderInitPending, iEncodePending, iEncodeInProgress )); |
|
1509 |
|
1510 #ifdef _DEBUG |
|
1511 if (iVideoEncoder) |
|
1512 PRINT((_L("CancelProcessingL() - iEncodePending in encoder %d"), iVideoEncoder->IsEncodePending())); |
|
1513 #endif |
|
1514 |
|
1515 if (iProcessingCancelled) |
|
1516 { |
|
1517 PRINT((_L("CancelProcessingL() - Already cancelled!"))); |
|
1518 |
|
1519 if (iMonitor) |
|
1520 iMonitor->ProcessingCancelled(); |
|
1521 |
|
1522 return; |
|
1523 } |
|
1524 |
|
1525 iProcessingCancelled = ETrue; |
|
1526 |
|
1527 if (iDemux) |
|
1528 iDemux->Stop(); |
|
1529 |
|
1530 if (iVideoProcessor) |
|
1531 iVideoProcessor->Stop(); |
|
1532 |
|
1533 if (iAudioProcessor) |
|
1534 iAudioProcessor->StopL(); |
|
1535 |
|
1536 // delete all objects except status monitor |
|
1537 DoCloseVideoL(); |
|
1538 |
|
1539 if ( iVideoEncoder && iVideoEncoder->IsEncodePending() == 0 && |
|
1540 (iEncodePending || iEncodeInProgress) ) |
|
1541 { |
|
1542 // encoder has completed encoding request, but scheduler has |
|
1543 // not called RunL() yet. Reset flags so that the request will |
|
1544 // be handled as init complete in RunL |
|
1545 PRINT((_L("CancelProcessingL() - resetting encoding flags"))); |
|
1546 iEncoderInitPending = ETrue; |
|
1547 iEncodePending = iEncodeInProgress = 0; |
|
1548 } |
|
1549 |
|
1550 // close the rest |
|
1551 |
|
1552 // for transition effect |
|
1553 if ( iFsConnected ) |
|
1554 { |
|
1555 CloseTransitionInfoL(); |
|
1556 iFs.Close(); |
|
1557 iFsConnected = EFalse; |
|
1558 } |
|
1559 |
|
1560 if(iGetFrameInProgress == 1) |
|
1561 { |
|
1562 //VPASSERT(iEncodeInProgress == 0); |
|
1563 iGetFrameInProgress = 0; |
|
1564 |
|
1565 if(iVideoClip->Info()->Class() == EVedVideoClipClassGenerated) |
|
1566 { |
|
1567 iVideoClip->Info()->Generator()->CancelFrame(); |
|
1568 } |
|
1569 } |
|
1570 |
|
1571 if (iComposer) |
|
1572 { |
|
1573 iComposer->Close(); // creates the output file |
|
1574 delete iComposer; |
|
1575 iComposer = 0; |
|
1576 } |
|
1577 |
|
1578 if(iImageComposer) |
|
1579 { |
|
1580 delete iImageComposer; |
|
1581 iImageComposer=0; |
|
1582 } |
|
1583 |
|
1584 if (iImageAvcEdit) |
|
1585 { |
|
1586 delete iImageAvcEdit; |
|
1587 iImageAvcEdit = 0; |
|
1588 } |
|
1589 |
|
1590 |
|
1591 DeleteClipStructures(); |
|
1592 |
|
1593 if (!iEncoderInitPending) |
|
1594 { |
|
1595 |
|
1596 PRINT((_L("CMovieProcessorImpl::CancelProcessingL - calling cancelled callback"))); |
|
1597 // if StartMovieL() has not been called at this point, |
|
1598 // there is no status monitor or observer to call |
|
1599 if (iMonitor) |
|
1600 iMonitor->ProcessingCancelled(); |
|
1601 |
|
1602 iState = EStateIdle; |
|
1603 } |
|
1604 |
|
1605 PRINT((_L("CMovieProcessorImpl::CancelProcessingL end"))) |
|
1606 |
|
1607 } |
|
1608 |
|
1609 |
|
1610 // ----------------------------------------------------------------------------- |
|
1611 // CMovieProcessorImpl::GetClipPropertiesL |
|
1612 // Retrieves parameters for a clip |
|
1613 // (other items were commented in a header). |
|
1614 // ----------------------------------------------------------------------------- |
|
1615 // |
|
1616 void CMovieProcessorImpl::GetClipPropertiesL(const TDesC& aFileName, |
|
1617 RFile* aFileHandle, |
|
1618 TVedVideoFormat& aFormat, |
|
1619 TVedVideoType& aVideoType, |
|
1620 TSize& aResolution, |
|
1621 TVedAudioType& aAudioType, |
|
1622 TTimeIntervalMicroSeconds& aDuration, |
|
1623 TInt& aVideoFrameCount, |
|
1624 TInt& aSamplingRate, |
|
1625 TVedAudioChannelMode& aChannelMode) |
|
1626 { |
|
1627 |
|
1628 PRINT((_L("CMovieProcessorImpl::GetClipPropertiesL() begin"))) |
|
1629 |
|
1630 TInt error = KErrNone; |
|
1631 if (!aFileHandle) |
|
1632 { |
|
1633 // Check that 3gp file exists. |
|
1634 |
|
1635 RFs fs; |
|
1636 User::LeaveIfError(fs.Connect()); |
|
1637 |
|
1638 RFile file; |
|
1639 error = file.Open(fs, aFileName, EFileShareReadersOnly | EFileStream | EFileRead); |
|
1640 if ( error != KErrNone ) |
|
1641 { |
|
1642 error = file.Open(fs, aFileName, EFileShareAny | EFileStream | EFileRead); |
|
1643 } |
|
1644 if (error == KErrNone) |
|
1645 { |
|
1646 file.Close(); |
|
1647 } |
|
1648 fs.Close(); |
|
1649 User::LeaveIfError(error); |
|
1650 |
|
1651 // get filename |
|
1652 iClipFileName = aFileName; |
|
1653 iClipFileHandle = NULL; |
|
1654 } |
|
1655 else |
|
1656 { |
|
1657 iClipFileHandle = aFileHandle; |
|
1658 iClipFileName.Zero(); |
|
1659 } |
|
1660 |
|
1661 CParser::TStreamParameters iStreamParams; |
|
1662 |
|
1663 // parse header |
|
1664 TRAP(error, ParseHeaderOnlyL(iStreamParams, iClipFileName, iClipFileHandle)); |
|
1665 |
|
1666 if (error != KErrNone && error != KErrNotSupported) |
|
1667 User::Leave(error); |
|
1668 |
|
1669 /* pass back clip properties */ |
|
1670 |
|
1671 // video format (file format actually) |
|
1672 if (iStreamParams.iFileFormat == CParser::EFileFormat3GP) |
|
1673 aFormat = EVedVideoFormat3GPP; |
|
1674 else if (iStreamParams.iFileFormat == CParser::EFileFormatMP4) |
|
1675 aFormat = EVedVideoFormatMP4; |
|
1676 else |
|
1677 aFormat = EVedVideoFormatUnrecognized; |
|
1678 |
|
1679 // video type |
|
1680 if(iStreamParams.iVideoFormat == CParser::EVideoFormatNone) |
|
1681 aVideoType = EVedVideoTypeNoVideo; |
|
1682 else if (iStreamParams.iVideoFormat == CParser::EVideoFormatH263Profile0Level10) |
|
1683 aVideoType = EVedVideoTypeH263Profile0Level10; |
|
1684 else if (iStreamParams.iVideoFormat == CParser::EVideoFormatH263Profile0Level45) |
|
1685 aVideoType = EVedVideoTypeH263Profile0Level45; |
|
1686 else if(iStreamParams.iVideoFormat == CParser::EVideoFormatMPEG4) |
|
1687 aVideoType = EVedVideoTypeMPEG4SimpleProfile; |
|
1688 else if(iStreamParams.iVideoFormat == CParser::EVideoFormatAVCProfileBaseline) |
|
1689 aVideoType = EVedVideoTypeAVCBaselineProfile; |
|
1690 else |
|
1691 aVideoType = EVedVideoTypeUnrecognized; |
|
1692 |
|
1693 // audio type |
|
1694 if(!iStreamParams.iHaveAudio/*iStreamParams.iAudioFormat == CParser::EAudioFormatNone*/) |
|
1695 aAudioType=EVedAudioTypeNoAudio; |
|
1696 else if(iStreamParams.iAudioFormat == CParser::EAudioFormatAMR) |
|
1697 aAudioType=EVedAudioTypeAMR; |
|
1698 else if (iStreamParams.iAudioFormat == CParser::EAudioFormatAAC) |
|
1699 aAudioType=EVedAudioTypeAAC_LC; // what about EVedAudioTypeAAC_LTP ??? |
|
1700 else |
|
1701 aAudioType=EVedAudioTypeUnrecognized; |
|
1702 |
|
1703 // Dummy values, update when AAC support is there |
|
1704 aSamplingRate = KVedAudioSamplingRate8k; |
|
1705 aChannelMode = EVedAudioChannelModeSingleChannel; |
|
1706 |
|
1707 // resolution |
|
1708 aResolution.iWidth = iStreamParams.iVideoWidth; |
|
1709 aResolution.iHeight = iStreamParams.iVideoHeight; |
|
1710 |
|
1711 // common |
|
1712 TUint duration = (iStreamParams.iVideoLength > iStreamParams.iAudioLength ? |
|
1713 iStreamParams.iVideoLength : iStreamParams.iAudioLength); |
|
1714 aDuration = TTimeIntervalMicroSeconds( TInt64(duration) * TInt64(1000) ); |
|
1715 |
|
1716 // get total number of video frames |
|
1717 aVideoFrameCount = iParser->GetNumberOfVideoFrames(); |
|
1718 |
|
1719 /***************IF Audio Type is AAC get the audio properties************************/ |
|
1720 |
|
1721 if(iStreamParams.iAudioFormat == CParser::EAudioFormatAAC) |
|
1722 { |
|
1723 //temporarily initialize iOutputAudioType and iAudioType as AudioProcessor uses it |
|
1724 iOutputAudioType = EAudioAAC; |
|
1725 iAudioType = EAudioAAC; |
|
1726 } |
|
1727 |
|
1728 PRINT((_L("CMovieProcessorImpl::GetClipPropertiesL() end"))) |
|
1729 } |
|
1730 |
|
1731 |
|
1732 |
|
1733 |
|
1734 // ----------------------------------------------------------------------------- |
|
1735 // CMovieProcessorImpl::GenerateVideoFrameInfoArray |
|
1736 // Retrieves frames parameters for a clip to array |
|
1737 // (other items were commented in a header). |
|
1738 // ----------------------------------------------------------------------------- |
|
1739 // |
|
1740 void CMovieProcessorImpl::GenerateVideoFrameInfoArrayL(const TDesC& aFileName, RFile* aFileHandle, |
|
1741 TVedVideoFrameInfo*& aVideoFrameInfoArray) |
|
1742 { |
|
1743 |
|
1744 PRINT((_L("CMovieProcessorImpl::GenerateVideoFrameInfoArray() begin"))) |
|
1745 |
|
1746 TInt error; |
|
1747 |
|
1748 if (!aFileHandle) |
|
1749 { |
|
1750 // Check that 3gp file exists. |
|
1751 RFs fs; |
|
1752 User::LeaveIfError(fs.Connect()); |
|
1753 |
|
1754 RFile file; |
|
1755 error = file.Open(fs, aFileName, EFileShareReadersOnly | EFileStream | EFileRead); |
|
1756 if ( error != KErrNone ) |
|
1757 { |
|
1758 error = file.Open(fs, aFileName, EFileShareAny | EFileStream | EFileRead); |
|
1759 } |
|
1760 if (error == KErrNone) |
|
1761 { |
|
1762 file.Close(); |
|
1763 } |
|
1764 fs.Close(); |
|
1765 User::LeaveIfError(error); |
|
1766 } |
|
1767 |
|
1768 // parse clip header |
|
1769 CParser::TStreamParameters streamParams; |
|
1770 |
|
1771 // get filename |
|
1772 if (aFileHandle) |
|
1773 { |
|
1774 iClipFileName.Zero(); |
|
1775 iClipFileHandle = aFileHandle; |
|
1776 } |
|
1777 else |
|
1778 { |
|
1779 iClipFileHandle = NULL; |
|
1780 iClipFileName = aFileName; |
|
1781 } |
|
1782 |
|
1783 // parse header |
|
1784 TRAP(error, ParseHeaderOnlyL(streamParams, iClipFileName, iClipFileHandle)); |
|
1785 |
|
1786 if (error != KErrNone && error != KErrNotSupported) |
|
1787 { |
|
1788 User::Leave( error ); |
|
1789 } |
|
1790 |
|
1791 // video type |
|
1792 TVedVideoType videoType = EVedVideoTypeNoVideo; |
|
1793 if(streamParams.iVideoFormat == CParser::EVideoFormatNone) |
|
1794 videoType = EVedVideoTypeNoVideo; |
|
1795 else if(streamParams.iVideoFormat == CParser::EVideoFormatH263Profile0Level10) |
|
1796 videoType = EVedVideoTypeH263Profile0Level10; |
|
1797 else if(streamParams.iVideoFormat == CParser::EVideoFormatH263Profile0Level45) |
|
1798 videoType = EVedVideoTypeH263Profile0Level45; |
|
1799 else if(streamParams.iVideoFormat == CParser::EVideoFormatMPEG4) |
|
1800 videoType = EVedVideoTypeMPEG4SimpleProfile; |
|
1801 else if(streamParams.iVideoFormat == CParser::EVideoFormatAVCProfileBaseline) |
|
1802 videoType = EVedVideoTypeAVCBaselineProfile; |
|
1803 else |
|
1804 { |
|
1805 User::Leave(KErrNotSupported); |
|
1806 } |
|
1807 |
|
1808 // frame parameters |
|
1809 if( (videoType == EVedVideoTypeH263Profile0Level10) || |
|
1810 (videoType == EVedVideoTypeH263Profile0Level45) || |
|
1811 (videoType == EVedVideoTypeMPEG4SimpleProfile) || |
|
1812 (videoType == EVedVideoTypeAVCBaselineProfile) ) |
|
1813 { |
|
1814 TInt frameCount = 0; |
|
1815 FillVideoFrameInfoArrayL(frameCount, (TVedVideoFrameInfo*&)aVideoFrameInfoArray); |
|
1816 } |
|
1817 |
|
1818 PRINT((_L("CMovieProcessorImpl::GenerateVideoFrameInfoArray() end"))) |
|
1819 } |
|
1820 |
|
1821 // ----------------------------------------------------------------------------- |
|
1822 // CMovieProcessorImpl::FillVideoFrameInfoArray |
|
1823 // Fills an array containing video frame parameters: size, start time & type |
|
1824 // (other items were commented in a header). |
|
1825 // ----------------------------------------------------------------------------- |
|
1826 // |
|
1827 |
|
1828 void CMovieProcessorImpl::FillVideoFrameInfoArrayL(TInt& aVideoFrameCount, |
|
1829 TVedVideoFrameInfo*& aVideoFrameInfoArray) |
|
1830 { |
|
1831 |
|
1832 PRINT((_L("CMovieProcessorImpl::FillVideoFrameInfoArrayL() begin"))) |
|
1833 |
|
1834 // get total number of video frames |
|
1835 aVideoFrameCount = iParser->GetNumberOfVideoFrames(); |
|
1836 VPASSERT(aVideoFrameCount); |
|
1837 // create memory for frame parameters - DO NOT delete it in MediaProcessorImpl |
|
1838 if(aVideoFrameInfoArray) |
|
1839 { |
|
1840 delete aVideoFrameInfoArray; |
|
1841 aVideoFrameInfoArray=0; |
|
1842 } |
|
1843 |
|
1844 // get individual frame parameters |
|
1845 aVideoFrameInfoArray = (TVedVideoFrameInfo*)User::AllocL(aVideoFrameCount * sizeof(class TVedVideoFrameInfo)); |
|
1846 |
|
1847 TFrameInfoParameters* frameInfoArray = (TFrameInfoParameters*)User::AllocZL((aVideoFrameCount) * sizeof(struct TFrameInfoParameters)); |
|
1848 |
|
1849 TInt i; |
|
1850 // Get all the frame parameters using the new function |
|
1851 CMP4Parser* parser = (CMP4Parser*)iParser; |
|
1852 TInt startIndex =0; |
|
1853 TInt err = parser->GetVideoFrameProperties(frameInfoArray,startIndex,aVideoFrameCount); |
|
1854 if(err !=0) |
|
1855 User::Leave(KErrAbort); |
|
1856 |
|
1857 for(i=0; i<aVideoFrameCount; i++) |
|
1858 { |
|
1859 aVideoFrameInfoArray[i].iSize = (TInt)frameInfoArray[i].iSize; |
|
1860 aVideoFrameInfoArray[i].iStartTime = (TInt)frameInfoArray[i].iStartTime; |
|
1861 if(frameInfoArray[i].iType) |
|
1862 { |
|
1863 PRINT((_L("CMovieProcessorImpl::FillVideoFrameInfoArrayL() iType of %d nonzero, time %d"), i, aVideoFrameInfoArray[i].iStartTime)) |
|
1864 aVideoFrameInfoArray[i].iFlags = 1; |
|
1865 } |
|
1866 else |
|
1867 { |
|
1868 PRINT((_L("CMovieProcessorImpl::FillVideoFrameInfoArrayL() iType of %d zero, time %d"), i, aVideoFrameInfoArray[i].iStartTime)) |
|
1869 aVideoFrameInfoArray[i].iFlags = 0; |
|
1870 } |
|
1871 } |
|
1872 User::Free(frameInfoArray); |
|
1873 frameInfoArray=0; |
|
1874 |
|
1875 PRINT((_L("CMovieProcessorImpl::FillVideoFrameInfoArrayL() end"))) |
|
1876 |
|
1877 } |
|
1878 |
|
1879 // ----------------------------------------------------------------------------- |
|
1880 // CMovieProcessorImpl::FillFrameParameters |
|
1881 // Fills an internal array containing parameters for each video frame : |
|
1882 // (other items were commented in a header). |
|
1883 // ----------------------------------------------------------------------------- |
|
1884 // |
|
1885 void CMovieProcessorImpl::FillFrameParametersL(TInt aCurrentFrameIndex) |
|
1886 { |
|
1887 PRINT((_L("CMovieProcessorImpl::FillFrameParameters() begin, current index %d"),aCurrentFrameIndex)) |
|
1888 |
|
1889 // get total number of video frames |
|
1890 TInt numberOfFrames = iParser->GetNumberOfFrames(); |
|
1891 TInt frameNumber = 0; |
|
1892 |
|
1893 // get start frame index in the input clip |
|
1894 iStartFrameIndex = iParser->GetStartFrameIndex(); |
|
1895 |
|
1896 TInt cutOutTime = 0; |
|
1897 if (!iThumbnailInProgress) |
|
1898 cutOutTime = I64INT( iVideoClip->CutOutTime().Int64() / TInt64(1000) ); |
|
1899 |
|
1900 TFrameInfoParameters* frameInfoArray = |
|
1901 (TFrameInfoParameters*)User::AllocZL((numberOfFrames) * sizeof(struct TFrameInfoParameters)); |
|
1902 |
|
1903 CleanupStack::PushL(frameInfoArray); |
|
1904 |
|
1905 // get info array from parser |
|
1906 CMP4Parser* parser = (CMP4Parser*)iParser; |
|
1907 TInt error = parser->GetVideoFrameProperties(frameInfoArray, iStartFrameIndex, numberOfFrames); |
|
1908 if (error != 0) |
|
1909 User::Leave(KErrAbort); |
|
1910 |
|
1911 while ( frameNumber < numberOfFrames ) |
|
1912 { |
|
1913 iFrameParameters[aCurrentFrameIndex].iTimeStamp = |
|
1914 GetVideoTimeInTicksFromMs( TInt64(frameInfoArray[frameNumber].iStartTime), EFalse ); |
|
1915 iFrameParameters[aCurrentFrameIndex].iType = TUint8( frameInfoArray[frameNumber].iType ); |
|
1916 |
|
1917 if (!iThumbnailInProgress && frameInfoArray[frameNumber].iStartTime > cutOutTime) |
|
1918 { |
|
1919 break; |
|
1920 } |
|
1921 |
|
1922 frameNumber++; |
|
1923 aCurrentFrameIndex++; |
|
1924 } |
|
1925 |
|
1926 CleanupStack::PopAndDestroy(frameInfoArray); |
|
1927 |
|
1928 PRINT((_L("CMovieProcessorImpl::FillFrameParameters() end"))) |
|
1929 } |
|
1930 |
|
1931 |
|
1932 // ----------------------------------------------------------------------------- |
|
1933 // CMovieProcessorImpl::IncProgressBar |
|
1934 // Report progress to observer |
|
1935 // (other items were commented in a header). |
|
1936 // ----------------------------------------------------------------------------- |
|
1937 //- |
|
1938 void CMovieProcessorImpl::IncProgressBar() |
|
1939 { |
|
1940 VPASSERT(iTotalMovieDuration > 0); |
|
1941 |
|
1942 TInt64 msProcessed = iGeneratedProcessed + iTotalAudioTimeWrittenMs + |
|
1943 GetVideoTimeInMsFromTicks(iCurrentVideoTimeInTicks, ETrue); |
|
1944 |
|
1945 TInt percentage = TInt( ( (I64REAL(msProcessed) / I64REAL(iTotalMovieDuration)) * 100.0) + 0.5 ); |
|
1946 |
|
1947 //VPASSERT( percentage <= 100 ); |
|
1948 |
|
1949 if (percentage > iProgress && percentage <= 100) |
|
1950 { |
|
1951 iProgress = percentage; |
|
1952 iMonitor->Progress(iProgress); |
|
1953 } |
|
1954 } |
|
1955 |
|
1956 // ----------------------------------------------------------------------------- |
|
1957 // CMovieProcessorImpl::GetMovieSizeEstimateL |
|
1958 // Calculates an estimate for resulting movie size |
|
1959 // (other items were commented in a header). |
|
1960 // ----------------------------------------------------------------------------- |
|
1961 // |
|
1962 TInt CMovieProcessorImpl::GetMovieSizeEstimateL(const CVedMovie* aMovie) |
|
1963 { |
|
1964 TInt fileSize=0; |
|
1965 iSizeEstimate->GetMovieSizeEstimateL(aMovie, (TInt&)fileSize); |
|
1966 return fileSize; |
|
1967 } |
|
1968 |
|
1969 |
|
1970 // ----------------------------------------------------------------------------- |
|
1971 // CMovieProcessorImpl::GetMovieSizeEstimateForMMSL |
|
1972 // Calculates file size estimate for MMS use |
|
1973 // (other items were commented in a header). |
|
1974 // ----------------------------------------------------------------------------- |
|
1975 // |
|
1976 TInt CMovieProcessorImpl::GetMovieSizeEstimateForMMSL(const CVedMovie* aMovie, |
|
1977 TInt aTargetSize, |
|
1978 TTimeIntervalMicroSeconds aStartTime, |
|
1979 TTimeIntervalMicroSeconds& aEndTime) |
|
1980 { |
|
1981 return iSizeEstimate->GetMovieSizeEstimateForMMSL(aMovie, aTargetSize, aStartTime, aEndTime); |
|
1982 } |
|
1983 |
|
1984 |
|
1985 |
|
1986 // ----------------------------------------------------------------------------- |
|
1987 // CMovieProcessorImpl::StartThumbL |
|
1988 // Initiates thumbnail extraction from clip (full resolution raw is reutrned) |
|
1989 // (other items were commented in a header). |
|
1990 // ----------------------------------------------------------------------------- |
|
1991 // |
|
1992 void CMovieProcessorImpl::StartThumbL(const TDesC& aFileName, |
|
1993 RFile* aFileHandle, |
|
1994 TInt aIndex, |
|
1995 TSize aResolution, |
|
1996 TDisplayMode aDisplayMode, |
|
1997 TBool aEnhance) |
|
1998 { |
|
1999 |
|
2000 PRINT((_L("CMovieProcessorImpl::StartThumbL() begin, aIndex = %d, enhance = %d"), aIndex, aEnhance)) |
|
2001 |
|
2002 if (!aFileHandle) |
|
2003 { |
|
2004 //Check that 3gp file exists. |
|
2005 RFs fs; |
|
2006 User::LeaveIfError(fs.Connect()); |
|
2007 RFile file; |
|
2008 TInt error = file.Open(fs, aFileName, EFileShareReadersOnly | EFileStream | EFileRead); |
|
2009 if ( error != KErrNone ) |
|
2010 { |
|
2011 error = file.Open(fs, aFileName, EFileShareAny | EFileStream | EFileRead); |
|
2012 } |
|
2013 if (error == KErrNone) |
|
2014 { |
|
2015 file.Close(); |
|
2016 } |
|
2017 fs.Close(); |
|
2018 User::LeaveIfError(error); |
|
2019 } |
|
2020 |
|
2021 // get thumbnail parameters |
|
2022 if (aFileHandle) |
|
2023 { |
|
2024 iClipFileName.Zero(); |
|
2025 iClipFileHandle = aFileHandle; |
|
2026 } |
|
2027 else |
|
2028 { |
|
2029 iClipFileHandle = NULL; |
|
2030 iClipFileName = aFileName; |
|
2031 } |
|
2032 |
|
2033 iOutputThumbResolution.SetSize(aResolution.iWidth, aResolution.iHeight); |
|
2034 iThumbIndex = aIndex; |
|
2035 iThumbDisplayMode = aDisplayMode; |
|
2036 iThumbEnhance = aEnhance; |
|
2037 |
|
2038 iThumbnailInProgress = ETrue; |
|
2039 |
|
2040 // initialization |
|
2041 InitializeClipStructuresL(); |
|
2042 |
|
2043 InitializeClipL(); // opens the file & parses header |
|
2044 |
|
2045 // update number of frames |
|
2046 SetOutputNumberOfFrames(iParser->iOutputNumberOfFrames); |
|
2047 |
|
2048 PRINT((_L("CMovieProcessorImpl::StartThumbL() end"))) |
|
2049 |
|
2050 } |
|
2051 |
|
2052 // ----------------------------------------------------------------------------- |
|
2053 // CMovieProcessorImpl::ProcessThumbL |
|
2054 // Generates thumbnail from clip (actually, full resolution raw is returned) |
|
2055 // (other items were commented in a header). |
|
2056 // ----------------------------------------------------------------------------- |
|
2057 // |
|
2058 void CMovieProcessorImpl::ProcessThumbL(TRequestStatus &aStatus, TVedTranscodeFactor* aFactor) |
|
2059 { |
|
2060 |
|
2061 PRINT((_L("CMovieProcessorImpl::ProcessThumbL() begin"))) |
|
2062 |
|
2063 iState = EStateProcessing; |
|
2064 iThumbnailRequestStatus = &aStatus; |
|
2065 |
|
2066 // seek to the last intra frame before desired frame |
|
2067 TTimeIntervalMicroSeconds startTime(0); |
|
2068 if ( iThumbIndex > 0 ) |
|
2069 { |
|
2070 TInt time = 0; |
|
2071 TUint inMs = TUint( iParser->GetVideoFrameStartTime(iThumbIndex, &time) ); |
|
2072 TInt64 inMicroS = TInt64( inMs ) * TInt64( 1000 ); |
|
2073 startTime = TTimeIntervalMicroSeconds( inMicroS ); |
|
2074 } |
|
2075 |
|
2076 // iOutputNumberOFrames contains the total amount of frames in clip |
|
2077 // without cutting |
|
2078 SetOutputNumberOfFrames(iParser->iOutputNumberOfFrames); |
|
2079 |
|
2080 TInt error = iParser->SeekOptimalIntraFrame(startTime, iThumbIndex, ETrue); |
|
2081 if (error != KErrNone) |
|
2082 { |
|
2083 iThumbnailRequestStatus = 0; |
|
2084 User::Leave(KErrGeneral); |
|
2085 } |
|
2086 iStartFrameIndex = iParser->GetStartFrameIndex(); |
|
2087 VPASSERT(iStartFrameIndex >= 0); |
|
2088 |
|
2089 error = iVideoProcessor->ProcessThumb(this, iThumbIndex, iStartFrameIndex, aFactor); |
|
2090 if (error != KErrNone) |
|
2091 { |
|
2092 iThumbnailRequestStatus = 0; |
|
2093 User::Leave(error); |
|
2094 } |
|
2095 } |
|
2096 |
|
2097 |
|
2098 // ----------------------------------------------------------------------------- |
|
2099 // CMovieProcessorImpl::NotifyThumbnailReady |
|
2100 // Called by thumbnail generator when thumbnail is ready |
|
2101 // for retrieval |
|
2102 // (other items were commented in a header). |
|
2103 // ----------------------------------------------------------------------------- |
|
2104 // |
|
2105 void CMovieProcessorImpl::NotifyThumbnailReady(TInt aError) |
|
2106 { |
|
2107 |
|
2108 PRINT((_L("CMovieProcessorImpl::NotifyThumbnailReady() begin"))) |
|
2109 |
|
2110 if (HandleThumbnailError(aError)) |
|
2111 return; |
|
2112 |
|
2113 // get YUV thumb |
|
2114 iVideoProcessor->FetchThumb(&iYuvBuf); |
|
2115 |
|
2116 // check validity of thumbnail and associated operation |
|
2117 if(iThumbEnhance) // for saving to file |
|
2118 { |
|
2119 if(iThumbDisplayMode == ENone) // if no preference |
|
2120 iThumbDisplayMode = EColor16M; // 24-bit color image for enhancement |
|
2121 else if(iThumbDisplayMode != EColor16M) // invalid combination |
|
2122 { |
|
2123 HandleThumbnailError(KErrNotSupported); |
|
2124 return; |
|
2125 } |
|
2126 } |
|
2127 else // for screen display |
|
2128 { |
|
2129 if(iThumbDisplayMode == ENone) // if no preference |
|
2130 iThumbDisplayMode = EColor64K; // 16-bit image for some products |
|
2131 } |
|
2132 |
|
2133 TInt bytesPerPixel = 0; |
|
2134 // determine proper bit depth for the bitmap |
|
2135 if(iThumbDisplayMode == EColor16M) |
|
2136 bytesPerPixel = 3; // 24-bit rgb takes 3 bytes, stored as bbbbbbbb gggggggg rrrrrrrr |
|
2137 else if(iThumbDisplayMode == EColor64K || iThumbDisplayMode == EColor4K) |
|
2138 bytesPerPixel = 2; // 12-bit rgb takes 2 bytes, stored as ggggbbbb xxxxrrrr |
|
2139 else |
|
2140 { |
|
2141 HandleThumbnailError(KErrNotSupported); |
|
2142 return; // support for 12-, 16- and 24-bit color images only |
|
2143 } |
|
2144 |
|
2145 TInt error; |
|
2146 if ( !iRgbBuf ) |
|
2147 { |
|
2148 TSize inputFrameResolution(iParser->iStreamParameters.iVideoWidth,iParser->iStreamParameters.iVideoHeight); |
|
2149 |
|
2150 // rgb specs |
|
2151 TUint thumbLength = inputFrameResolution.iWidth * inputFrameResolution.iHeight; |
|
2152 TUint thumbUVLength = thumbLength>>2; |
|
2153 |
|
2154 VPASSERT(iYuvBuf); |
|
2155 // assign yuv pointers |
|
2156 TUint8* yBuf = iYuvBuf; |
|
2157 TUint8* uBuf = yBuf + thumbLength; |
|
2158 TUint8* vBuf = uBuf + thumbUVLength; |
|
2159 |
|
2160 // create output rgb buffer |
|
2161 TRAP(error, iRgbBuf = (TUint8*) User::AllocL(thumbLength * bytesPerPixel)); |
|
2162 if (HandleThumbnailError(error)) |
|
2163 return; |
|
2164 |
|
2165 TInt scanLineLength; |
|
2166 |
|
2167 // convert yuv to rgb |
|
2168 switch (iThumbDisplayMode) |
|
2169 { |
|
2170 |
|
2171 case EColor4K: |
|
2172 { |
|
2173 TInt error; |
|
2174 CYuv2Rgb12* yuvConverter = NULL; |
|
2175 TRAP(error, yuvConverter = new(ELeave) CYuv2Rgb12); |
|
2176 if (HandleThumbnailError(error)) |
|
2177 return; |
|
2178 scanLineLength = inputFrameResolution.iWidth * bytesPerPixel; |
|
2179 VPASSERT(yuvConverter); |
|
2180 TRAP(error, yuvConverter->ConstructL(inputFrameResolution.iWidth, inputFrameResolution.iHeight, inputFrameResolution.iWidth, inputFrameResolution.iHeight)); |
|
2181 if (HandleThumbnailError(error)) |
|
2182 return; |
|
2183 yuvConverter->Convert(yBuf, uBuf, vBuf, inputFrameResolution.iWidth, inputFrameResolution.iHeight, iRgbBuf, scanLineLength); |
|
2184 delete yuvConverter; |
|
2185 yuvConverter=0; |
|
2186 } |
|
2187 break; |
|
2188 |
|
2189 default: |
|
2190 case EColor64K: |
|
2191 { |
|
2192 TInt error; |
|
2193 CYuv2Rgb16* yuvConverter = NULL; |
|
2194 TRAP(error, yuvConverter = new(ELeave) CYuv2Rgb16); |
|
2195 if (HandleThumbnailError(error)) |
|
2196 return; |
|
2197 scanLineLength = inputFrameResolution.iWidth * bytesPerPixel; |
|
2198 VPASSERT(yuvConverter); |
|
2199 TRAP(error, yuvConverter->ConstructL(inputFrameResolution.iWidth, inputFrameResolution.iHeight, inputFrameResolution.iWidth, inputFrameResolution.iHeight);) |
|
2200 if (HandleThumbnailError(error)) |
|
2201 return; |
|
2202 yuvConverter->Convert(yBuf, uBuf, vBuf, inputFrameResolution.iWidth, inputFrameResolution.iHeight, iRgbBuf, scanLineLength); |
|
2203 delete yuvConverter; |
|
2204 yuvConverter=0; |
|
2205 } |
|
2206 break; |
|
2207 |
|
2208 case EColor16M: |
|
2209 { |
|
2210 TInt error; |
|
2211 CYuv2Rgb24* yuvConverter = NULL; |
|
2212 TRAP(error, yuvConverter = new(ELeave) CYuv2Rgb24); |
|
2213 if (HandleThumbnailError(error)) |
|
2214 return; |
|
2215 scanLineLength = inputFrameResolution.iWidth * bytesPerPixel; |
|
2216 VPASSERT(yuvConverter); |
|
2217 TRAP(error, yuvConverter->ConstructL(inputFrameResolution.iWidth, inputFrameResolution.iHeight, inputFrameResolution.iWidth, inputFrameResolution.iHeight)) |
|
2218 if (HandleThumbnailError(error)) |
|
2219 return; |
|
2220 yuvConverter->Convert(yBuf, uBuf, vBuf, inputFrameResolution.iWidth, inputFrameResolution.iHeight, iRgbBuf, scanLineLength); |
|
2221 delete yuvConverter; |
|
2222 yuvConverter=0; |
|
2223 } |
|
2224 break; |
|
2225 } |
|
2226 } |
|
2227 |
|
2228 //CFbsBitmap* iOutBitmap = 0; |
|
2229 |
|
2230 if(!iThumbEnhance) |
|
2231 { |
|
2232 const TSize inputFrameResolution(iParser->iStreamParameters.iVideoWidth,iParser->iStreamParameters.iVideoHeight); |
|
2233 |
|
2234 /* Pre-calculate pixel indices for horizontal scaling. */ |
|
2235 // inputFrameResolution is the resolution of the image read from video clip. |
|
2236 // iOutputThumbResolution is the final resolution desired by the caller. |
|
2237 |
|
2238 const TInt xIncrement = inputFrameResolution.iWidth * iOutputThumbResolution.iWidth; |
|
2239 const TInt xBoundary = iOutputThumbResolution.iWidth * iOutputThumbResolution.iWidth; |
|
2240 |
|
2241 TInt* xIndices = 0; |
|
2242 TRAPD(xIndicesErr, xIndices = new (ELeave) TInt[iOutputThumbResolution.iWidth]); |
|
2243 if (xIndicesErr == KErrNone) |
|
2244 { |
|
2245 TInt xDecision = xIncrement / bytesPerPixel; |
|
2246 TInt sourceIndex = 0; |
|
2247 for (TInt x = 0; x < iOutputThumbResolution.iWidth; x++) |
|
2248 { |
|
2249 while (xDecision > xBoundary) |
|
2250 { |
|
2251 xDecision -= xBoundary; |
|
2252 sourceIndex += bytesPerPixel; |
|
2253 } |
|
2254 |
|
2255 xIndices[x] = sourceIndex; |
|
2256 xDecision += xIncrement; |
|
2257 } |
|
2258 } |
|
2259 else |
|
2260 { |
|
2261 HandleThumbnailError(xIndicesErr); |
|
2262 return; |
|
2263 } |
|
2264 |
|
2265 /* Initialize bitmap. */ |
|
2266 TRAPD(bitmapErr, iOutBitmap = new (ELeave) CFbsBitmap); |
|
2267 if ((xIndicesErr == KErrNone) && (bitmapErr == KErrNone)) |
|
2268 { |
|
2269 bitmapErr = iOutBitmap->Create(iOutputThumbResolution, iThumbDisplayMode /*EColor64K*/); |
|
2270 if (bitmapErr == KErrNone) |
|
2271 { |
|
2272 // Lock the heap to prevent the FBS server from invalidating the address |
|
2273 iOutBitmap->LockHeap(); |
|
2274 |
|
2275 /* Scale to desired iOutputThumbResolution and copy to bitmap. */ |
|
2276 TUint8* dataAddress = (TUint8*)iOutBitmap->DataAddress(); |
|
2277 const TInt yIncrement = inputFrameResolution.iHeight * iOutputThumbResolution.iHeight; |
|
2278 const TInt yBoundary = iOutputThumbResolution.iHeight * iOutputThumbResolution.iHeight; |
|
2279 |
|
2280 TInt targetIndex = 0; |
|
2281 TInt sourceRowIndex = 0; |
|
2282 TInt yDecision = yIncrement / 2; |
|
2283 for (TInt y = 0; y < iOutputThumbResolution.iHeight; y++) |
|
2284 { |
|
2285 while (yDecision > yBoundary) |
|
2286 { |
|
2287 yDecision -= yBoundary; |
|
2288 sourceRowIndex += (inputFrameResolution.iWidth * bytesPerPixel); |
|
2289 } |
|
2290 yDecision += yIncrement; |
|
2291 |
|
2292 for (TInt x = 0; x < iOutputThumbResolution.iWidth; x++) |
|
2293 { |
|
2294 for (TInt i = 0; i < bytesPerPixel; ++i) |
|
2295 { |
|
2296 const TInt firstPixelSourceIndex = sourceRowIndex + xIndices[x] + i; |
|
2297 dataAddress[targetIndex] = iRgbBuf[firstPixelSourceIndex]; |
|
2298 targetIndex++; |
|
2299 } |
|
2300 } |
|
2301 } |
|
2302 iOutBitmap->UnlockHeap(); |
|
2303 } |
|
2304 else |
|
2305 { |
|
2306 delete iOutBitmap; iOutBitmap = 0; |
|
2307 HandleThumbnailError(bitmapErr); |
|
2308 return; |
|
2309 } |
|
2310 } |
|
2311 else |
|
2312 { |
|
2313 HandleThumbnailError(bitmapErr); |
|
2314 delete[] xIndices; xIndices = 0; |
|
2315 return; |
|
2316 } |
|
2317 |
|
2318 delete[] xIndices; |
|
2319 xIndices = 0; |
|
2320 } |
|
2321 else // enhance |
|
2322 { |
|
2323 TInt i,j; |
|
2324 // create input bitmap and buffer |
|
2325 CFbsBitmap* inBitmap = 0; |
|
2326 TRAPD(inBitmapErr, inBitmap = new (ELeave) CFbsBitmap); |
|
2327 if( inBitmapErr == KErrNone ) |
|
2328 { |
|
2329 // create bitmaps |
|
2330 TSize originalResolution(iParser->iStreamParameters.iVideoWidth, iParser->iStreamParameters.iVideoHeight); |
|
2331 inBitmapErr = inBitmap->Create(originalResolution, iThumbDisplayMode/*EColor16M*/); |
|
2332 |
|
2333 if( inBitmapErr == KErrNone ) |
|
2334 { |
|
2335 // fill image from rgb buffer to input bitmap buffer |
|
2336 TPtr8 linePtr(0,0); |
|
2337 TInt lineLength = inBitmap->ScanLineLength(originalResolution.iWidth, iThumbDisplayMode); |
|
2338 for(j=0, i=0; j<originalResolution.iHeight; j++, i+=lineLength) |
|
2339 { |
|
2340 linePtr.Set(iRgbBuf+i, lineLength, lineLength); |
|
2341 inBitmap->SetScanLine((TDes8&)linePtr,j); |
|
2342 } |
|
2343 |
|
2344 // create output bitmap |
|
2345 TRAPD(outBitmapErr, iOutBitmap = new (ELeave) CFbsBitmap); |
|
2346 if( outBitmapErr == KErrNone ) |
|
2347 { |
|
2348 outBitmapErr = iOutBitmap->Create(iOutputThumbResolution, iThumbDisplayMode/*EColor16M*/); // same size as input frame |
|
2349 |
|
2350 if( outBitmapErr == KErrNone ) |
|
2351 { |
|
2352 // post-processing enhancement |
|
2353 TRAP(outBitmapErr, EnhanceThumbnailL((const CFbsBitmap*)inBitmap, (CFbsBitmap*)iOutBitmap)); |
|
2354 |
|
2355 } |
|
2356 else |
|
2357 { |
|
2358 delete inBitmap; inBitmap = 0; |
|
2359 delete iOutBitmap; iOutBitmap = 0; |
|
2360 HandleThumbnailError(outBitmapErr); |
|
2361 return; |
|
2362 } |
|
2363 } |
|
2364 else |
|
2365 { |
|
2366 delete inBitmap; inBitmap = 0; |
|
2367 HandleThumbnailError(outBitmapErr); |
|
2368 return; |
|
2369 } |
|
2370 } |
|
2371 else |
|
2372 { |
|
2373 delete inBitmap; inBitmap = 0; |
|
2374 HandleThumbnailError(inBitmapErr); |
|
2375 return; |
|
2376 } |
|
2377 |
|
2378 // delete input bitmap |
|
2379 delete inBitmap; |
|
2380 inBitmap = 0; |
|
2381 } |
|
2382 else |
|
2383 { |
|
2384 HandleThumbnailError(inBitmapErr); |
|
2385 return; |
|
2386 } |
|
2387 } |
|
2388 |
|
2389 // return enhanced bitmap |
|
2390 //aThumb = outBitmap; |
|
2391 //iState = EStateReadyToProcess; |
|
2392 |
|
2393 iYuvBuf = 0; |
|
2394 delete iRgbBuf; |
|
2395 iRgbBuf = 0; |
|
2396 |
|
2397 VPASSERT(iThumbnailRequestStatus); |
|
2398 User::RequestComplete(iThumbnailRequestStatus, KErrNone); |
|
2399 iThumbnailRequestStatus = 0; |
|
2400 |
|
2401 PRINT((_L("CMovieProcessorImpl::NotifyThumbnailReady() end"))) |
|
2402 } |
|
2403 |
|
2404 |
|
2405 // ----------------------------------------------------------------------------- |
|
2406 // CMovieProcessorImpl::HandleThumbnailError |
|
2407 // Handle error in thumbnail generation |
|
2408 // (other items were commented in a header). |
|
2409 // ----------------------------------------------------------------------------- |
|
2410 // |
|
2411 TBool CMovieProcessorImpl::HandleThumbnailError(TInt aError) |
|
2412 { |
|
2413 if (aError != KErrNone) |
|
2414 { |
|
2415 TInt error = aError; |
|
2416 |
|
2417 #ifndef _DEBUG |
|
2418 if (error < KErrHardwareNotAvailable) |
|
2419 error = KErrGeneral; |
|
2420 #endif |
|
2421 |
|
2422 VPASSERT(iThumbnailRequestStatus); |
|
2423 User::RequestComplete(iThumbnailRequestStatus, error); |
|
2424 iThumbnailRequestStatus = 0; |
|
2425 return ETrue; |
|
2426 } |
|
2427 return EFalse; |
|
2428 } |
|
2429 |
|
2430 // ----------------------------------------------------------------------------- |
|
2431 // CMovieProcessorImpl::FetchThumb |
|
2432 // Returns a pointer to completed thumbnail bitmap |
|
2433 // (other items were commented in a header). |
|
2434 // ----------------------------------------------------------------------------- |
|
2435 // |
|
2436 void CMovieProcessorImpl::FetchThumb(CFbsBitmap*& aThumb) |
|
2437 { |
|
2438 aThumb = iOutBitmap; |
|
2439 iOutBitmap = 0; |
|
2440 |
|
2441 iState = EStateReadyToProcess; |
|
2442 } |
|
2443 |
|
2444 // ----------------------------------------------------------------------------- |
|
2445 // CMovieProcessorImpl::InitializeClipStructuresL |
|
2446 // Initializes internal structures for movie processing |
|
2447 // (other items were commented in a header). |
|
2448 // ----------------------------------------------------------------------------- |
|
2449 // |
|
2450 void CMovieProcessorImpl::InitializeClipStructuresL() |
|
2451 { |
|
2452 |
|
2453 TTimeIntervalMicroSeconds time; |
|
2454 TInt i; |
|
2455 |
|
2456 // create memory for structures |
|
2457 // VIDEO |
|
2458 if(!iVideoClipParameters) |
|
2459 { |
|
2460 iVideoClipParameters = (struct TVideoClipParameters *)User::AllocL(iNumberOfVideoClips * |
|
2461 sizeof(struct TVideoClipParameters)); |
|
2462 Mem::Fill(iVideoClipParameters, iNumberOfVideoClips*sizeof(TVideoClipParameters), 0); |
|
2463 } |
|
2464 |
|
2465 if(!iThumbnailInProgress) |
|
2466 { |
|
2467 // create audio buffer |
|
2468 iOutAudioBuffer = (HBufC8*) HBufC8::NewL(KInitialAudioBufferSize); |
|
2469 |
|
2470 // create video buffer |
|
2471 iOutVideoBuffer = (HBufC8*) HBufC8::NewL(KInitialVideoBufferSize); |
|
2472 |
|
2473 CVedMovieImp* movie = (iMovie); |
|
2474 // initialize video clip parameters |
|
2475 for(i=0; i<iNumberOfVideoClips; i++) |
|
2476 { |
|
2477 // convert start & end times to milliseconds |
|
2478 iVideoClip = movie->VideoClip(i); |
|
2479 time = TTimeIntervalMicroSeconds(iVideoClip->StartTime()); |
|
2480 iVideoClipParameters[i].iStartTime = time.Int64()/1000; |
|
2481 time = TTimeIntervalMicroSeconds(iVideoClip->EndTime()); |
|
2482 iVideoClipParameters[i].iEndTime = time.Int64()/1000; |
|
2483 } |
|
2484 |
|
2485 } |
|
2486 |
|
2487 return; |
|
2488 } |
|
2489 |
|
2490 |
|
2491 // ----------------------------------------------------------------------------- |
|
2492 // CMovieProcessorImpl::DeleteClipStructures |
|
2493 // Frees memory allocated for internal structures |
|
2494 // (other items were commented in a header). |
|
2495 // ----------------------------------------------------------------------------- |
|
2496 // |
|
2497 void CMovieProcessorImpl::DeleteClipStructures() |
|
2498 { |
|
2499 |
|
2500 if (iFrameParameters) |
|
2501 User::Free(iFrameParameters); |
|
2502 iFrameParameters = 0; |
|
2503 iFrameParametersSize = 0; |
|
2504 |
|
2505 if (iVideoClipParameters) |
|
2506 User::Free(iVideoClipParameters); |
|
2507 iVideoClipParameters = 0; |
|
2508 |
|
2509 if (iEncoderBuffer) |
|
2510 User::Free(iEncoderBuffer); |
|
2511 iEncoderBuffer = 0; |
|
2512 |
|
2513 if (iOutAudioBuffer) |
|
2514 delete iOutAudioBuffer; |
|
2515 iOutAudioBuffer=0; |
|
2516 |
|
2517 if (iOutVideoBuffer) |
|
2518 delete iOutVideoBuffer; |
|
2519 iOutVideoBuffer = 0; |
|
2520 |
|
2521 } |
|
2522 |
|
2523 |
|
2524 // ----------------------------------------------------------------------------- |
|
2525 // CMovieProcessorImpl::InitializeClipL |
|
2526 // Initializes the processor for processing a clip |
|
2527 // (other items were commented in a header). |
|
2528 // ----------------------------------------------------------------------------- |
|
2529 // |
|
2530 void CMovieProcessorImpl::InitializeClipL() |
|
2531 { |
|
2532 |
|
2533 PRINT((_L("CMovieProcessorImpl::InitializeClipL() begin"))); |
|
2534 |
|
2535 iFirstFrameOfClip = ETrue; |
|
2536 iFirstFrameFlagSet = EFalse; |
|
2537 iModeChanged = EFalse; /* assuming that this clip's mode has not been changed */ |
|
2538 iVideoClipWritten = 0; |
|
2539 iWriting1stColorTransitionFrame = EFalse; |
|
2540 |
|
2541 // update clip properties |
|
2542 if (iThumbnailInProgress) |
|
2543 { |
|
2544 // just get the first frame in normal mode |
|
2545 iSpeed = KMaxVideoSpeed; |
|
2546 iColorEffect = (TVedColorEffect)EVedColorEffectNone; |
|
2547 iStartCutTime = 0; |
|
2548 iEndCutTime = 10; |
|
2549 } |
|
2550 else |
|
2551 { |
|
2552 if (iNumberOfVideoClips) |
|
2553 { |
|
2554 if ( iVideoClip->Info()->FileHandle() ) |
|
2555 { |
|
2556 iClipFileName.Zero(); |
|
2557 iClipFileHandle = iVideoClip->Info()->FileHandle(); |
|
2558 } |
|
2559 else |
|
2560 { |
|
2561 iClipFileHandle = NULL; |
|
2562 iClipFileName = (TPtrC)iVideoClip->Info()->FileName(); |
|
2563 } |
|
2564 |
|
2565 iSpeed = (TInt)iVideoClip->Speed(); |
|
2566 iColorEffect = (TVedColorEffect)iVideoClip->ColorEffect(); |
|
2567 iStartCutTime = TTimeIntervalMicroSeconds(iVideoClip->CutInTime()); |
|
2568 iEndCutTime = TTimeIntervalMicroSeconds(iVideoClip->CutOutTime()); |
|
2569 |
|
2570 iColorToneRgb = iVideoClip->ColorTone(); |
|
2571 ConvertColorToneRGBToYUV(iColorEffect,iColorToneRgb); |
|
2572 // store previous middle transition, if there is more than one middle transition |
|
2573 if(iVideoClipNumber > 0) |
|
2574 iPreviousMiddleTransitionEffect = iMiddleTransitionEffect; |
|
2575 // check if there is a position for middle transition for this clip |
|
2576 if(iMovie->MiddleTransitionEffectCount() > iVideoClipNumber) |
|
2577 { |
|
2578 iMiddleTransitionEffect = iMovie->MiddleTransitionEffect(iVideoClipNumber); |
|
2579 |
|
2580 if( ( iMiddleTransitionEffect == EVedMiddleTransitionEffectCrossfade || |
|
2581 iMiddleTransitionEffect == EVedMiddleTransitionEffectWipeLeftToRight || |
|
2582 iMiddleTransitionEffect == EVedMiddleTransitionEffectWipeRightToLeft || |
|
2583 iMiddleTransitionEffect == EVedMiddleTransitionEffectWipeTopToBottom || |
|
2584 iMiddleTransitionEffect == EVedMiddleTransitionEffectWipeBottomToTop ) && |
|
2585 ( iVideoClipNumber != iNumberOfVideoClips - 1 ) ) |
|
2586 { |
|
2587 TParse filename, filepath; |
|
2588 CVedMovieImp* movie = (iMovie); |
|
2589 |
|
2590 if( movie->VideoClip( iVideoClipNumber + 1 )->Info()->Class() == EVedVideoClipClassGenerated) |
|
2591 { |
|
2592 /************************************************************************/ |
|
2593 TFileName ImageMovieName(KTempFilePath); |
|
2594 ImageMovieName.Append( _L( "Im_" ) ); |
|
2595 ImageMovieName.Append( _L( "nokia_vpi.tmp" ) ); |
|
2596 /************************************************************************/ |
|
2597 |
|
2598 filename.Set( ImageMovieName, NULL, NULL ); |
|
2599 |
|
2600 if (iOutputFileHandle) |
|
2601 { |
|
2602 RFile* file = iOutputFileHandle; |
|
2603 TFileName fullName; |
|
2604 TInt error = file->FullName(fullName); |
|
2605 filepath.Set(fullName, NULL, NULL); |
|
2606 } |
|
2607 else |
|
2608 filepath.Set( iOutputMovieFileName, NULL, NULL ); |
|
2609 } |
|
2610 else{ |
|
2611 |
|
2612 if ( movie->VideoClip( iVideoClipNumber + 1 )->Info()->FileHandle() != NULL ) |
|
2613 { |
|
2614 |
|
2615 RFile* file = movie->VideoClip( iVideoClipNumber + 1 )->Info()->FileHandle(); |
|
2616 |
|
2617 TFileName origName; |
|
2618 TInt error = file->Name(origName); |
|
2619 filename.Set(origName, NULL, NULL); |
|
2620 |
|
2621 TFileName fullName; |
|
2622 error = file->FullName(fullName); |
|
2623 filepath.Set(fullName, NULL, NULL); |
|
2624 |
|
2625 } |
|
2626 else |
|
2627 { |
|
2628 filename.Set( movie->VideoClip( iVideoClipNumber + 1 )->Info()->FileName(), NULL, NULL ); |
|
2629 filepath.Set( iOutputMovieFileName, NULL, NULL ); |
|
2630 } |
|
2631 |
|
2632 } |
|
2633 |
|
2634 iNextClipFileName = filepath.DriveAndPath(); |
|
2635 iNextClipFileName.Append( filename.Name() ); |
|
2636 // VPI special tmp file |
|
2637 iNextClipFileName.Append( _L( "_" ) ); |
|
2638 iNextClipFileName.AppendNum( iVideoClipNumber ); |
|
2639 iNextClipFileName.Append( _L( "_vpi.tmp" ) ); |
|
2640 // try to create a tmp file |
|
2641 if ( iNextClip.Create( iFs, iNextClipFileName, EFileStream | EFileWrite | EFileShareExclusive ) != KErrNone ) |
|
2642 { |
|
2643 // check if the tmp file exists |
|
2644 if ( iNextClip.Open( iFs, iNextClipFileName, EFileStream | EFileWrite | EFileShareExclusive ) != KErrNone ) |
|
2645 { |
|
2646 iNextClip.Close(); |
|
2647 iNextClipFileName.Zero(); |
|
2648 } |
|
2649 } |
|
2650 } |
|
2651 } |
|
2652 } |
|
2653 |
|
2654 // this is in common timescale |
|
2655 iInitialClipStartTimeStamp = (iVideoClipNumber>0 ? iVideoClipParameters[iVideoClipNumber-1].iEndTime : 0); |
|
2656 } |
|
2657 |
|
2658 if (!iThumbnailInProgress) |
|
2659 iMonitor->StartPreparing(); |
|
2660 |
|
2661 // create an instance of the parser |
|
2662 if (iNumberOfVideoClips) |
|
2663 { |
|
2664 |
|
2665 if (!iParser) |
|
2666 { |
|
2667 if (iClipFileHandle) |
|
2668 iParser = (CMP4Parser*) CMP4Parser::NewL(this, iClipFileHandle); |
|
2669 else |
|
2670 iParser = (CMP4Parser*) CMP4Parser::NewL(this, iClipFileName); |
|
2671 } |
|
2672 iParser->iFirstTimeClipParsing = ETrue; |
|
2673 iState = EStateIdle; |
|
2674 // open file & parse header |
|
2675 CMovieProcessorImpl::TDataFormat format = CMovieProcessorImpl::EDataAutoDetect; |
|
2676 User::LeaveIfError(OpenStream(iClipFileName, iClipFileHandle, format)); |
|
2677 |
|
2678 if ( iThumbnailInProgress && (iHaveVideo == EFalse) ) |
|
2679 User::Leave(KErrNotFound); |
|
2680 |
|
2681 VPASSERT(iState == EStateOpened); |
|
2682 } |
|
2683 else |
|
2684 SetHeaderDefaults(); |
|
2685 |
|
2686 iState = EStatePreparing; |
|
2687 |
|
2688 if (!iThumbnailInProgress) |
|
2689 { |
|
2690 |
|
2691 // Since the clip does not have any audio type it can be over written by any audio type depending on output |
|
2692 if(iOutputAudioType == EAudioAMR) |
|
2693 { |
|
2694 if( iAudioType == EAudioNone) |
|
2695 iAudioType = EAudioAMR; |
|
2696 } |
|
2697 else if(iOutputAudioType == EAudioAAC) |
|
2698 { |
|
2699 if( iAudioType == EAudioNone) |
|
2700 { |
|
2701 iAudioType = EAudioAAC; |
|
2702 if(iVideoClipNumber == 0) |
|
2703 { |
|
2704 //because it has no audio so it will have no decoder specific Info |
|
2705 iFirstClipHasNoDecInfo = ETrue; |
|
2706 } |
|
2707 iAudioFramesInSample = 1; |
|
2708 } |
|
2709 } |
|
2710 |
|
2711 iEncoderInitPending = ETrue; |
|
2712 // complete request to finish initialising & start processing |
|
2713 SetActive(); |
|
2714 iStatus = KRequestPending; |
|
2715 |
|
2716 TRequestStatus *status = &iStatus; |
|
2717 User::RequestComplete(status, KErrNone); |
|
2718 |
|
2719 if (iNumberOfVideoClips) |
|
2720 { |
|
2721 if (iParser) |
|
2722 { |
|
2723 // update the video clip duration in millisec. |
|
2724 iVideoClipDuration = (TInt64)(iVideoClip->Info()->Duration().Int64() * |
|
2725 (TInt64)iParser->iStreamParameters.iVideoTimeScale / (TInt64)(1000000)); |
|
2726 } |
|
2727 } |
|
2728 } |
|
2729 |
|
2730 else |
|
2731 { |
|
2732 // open demux & decoder |
|
2733 User::LeaveIfError(Prepare()); |
|
2734 |
|
2735 VPASSERT(iState == EStateReadyToProcess); |
|
2736 } |
|
2737 |
|
2738 PRINT((_L("CMovieProcessorImpl::InitializeClipL() end"))) |
|
2739 } |
|
2740 |
|
2741 |
|
2742 // ----------------------------------------------------------------------------- |
|
2743 // CMovieProcessorImpl::InitializeGeneratedClipL |
|
2744 // Initializes the processor for processing a generated clip |
|
2745 // (other items were commented in a header). |
|
2746 // ----------------------------------------------------------------------------- |
|
2747 // |
|
2748 void CMovieProcessorImpl::InitializeGeneratedClipL() |
|
2749 { |
|
2750 iEncodeInProgress = 0; |
|
2751 iFirstFrameOfClip = ETrue; // initialized to indicate that the first frame of this clip has not yet been written |
|
2752 iFirstFrameFlagSet = EFalse; |
|
2753 iModeChanged = EFalse; /* assuming that this clip's mode has not been changed */ |
|
2754 iVideoClipWritten = 0; |
|
2755 iWriting1stColorTransitionFrame = EFalse; |
|
2756 |
|
2757 // update clip properties |
|
2758 if (iThumbnailInProgress) |
|
2759 { |
|
2760 /* just get the first frame in normal mode */ |
|
2761 iSpeed = KMaxVideoSpeed; |
|
2762 iColorEffect = (TVedColorEffect)EVedColorEffectNone; |
|
2763 iStartCutTime = 0; |
|
2764 iEndCutTime = 10; |
|
2765 } |
|
2766 else |
|
2767 { |
|
2768 if (iNumberOfVideoClips) |
|
2769 { |
|
2770 iSpeed = (TInt)iVideoClip->Speed(); |
|
2771 iColorEffect = iVideoClip->ColorEffect(); |
|
2772 iStartCutTime = TTimeIntervalMicroSeconds(0); /* since generated clips cannot be cut */ |
|
2773 iEndCutTime = TTimeIntervalMicroSeconds(iVideoClip->Info()->Duration()); |
|
2774 |
|
2775 iColorToneRgb = iVideoClip->ColorTone(); |
|
2776 ConvertColorToneRGBToYUV(iColorEffect,iColorToneRgb); |
|
2777 |
|
2778 /* store previous middle transition, if there is more than one middle transition */ |
|
2779 if(iVideoClipNumber > 0) |
|
2780 iPreviousMiddleTransitionEffect = iMiddleTransitionEffect; |
|
2781 /* since it is image clip, there will be no middletransitioneffect |
|
2782 check if there is a position for middle transition for this clip |
|
2783 */ |
|
2784 if(iMovie->MiddleTransitionEffectCount() > iVideoClipNumber) |
|
2785 { |
|
2786 iMiddleTransitionEffect = iMovie->MiddleTransitionEffect(iVideoClipNumber); |
|
2787 |
|
2788 if( ( iMiddleTransitionEffect == EVedMiddleTransitionEffectCrossfade || |
|
2789 iMiddleTransitionEffect == EVedMiddleTransitionEffectWipeLeftToRight || |
|
2790 iMiddleTransitionEffect == EVedMiddleTransitionEffectWipeRightToLeft || |
|
2791 iMiddleTransitionEffect == EVedMiddleTransitionEffectWipeTopToBottom || |
|
2792 iMiddleTransitionEffect == EVedMiddleTransitionEffectWipeBottomToTop ) && |
|
2793 ( iVideoClipNumber != iNumberOfVideoClips - 1 ) ) |
|
2794 { |
|
2795 TParse filename, filepath; |
|
2796 CVedMovieImp* movie = (iMovie); |
|
2797 if( movie->VideoClip( iVideoClipNumber + 1 )->Info()->Class() == EVedVideoClipClassGenerated) |
|
2798 { |
|
2799 /* path for storing temporary files */ |
|
2800 TFileName ImageMovieName(KTempFilePath); |
|
2801 ImageMovieName.Append( _L( "Im_" ) ); |
|
2802 ImageMovieName.Append( _L( "nokia_vpi.tmp" ) ); |
|
2803 filename.Set( ImageMovieName, NULL, NULL ); |
|
2804 |
|
2805 if (iOutputFileHandle) |
|
2806 { |
|
2807 RFile* file = iOutputFileHandle; |
|
2808 TFileName fullName; |
|
2809 TInt error = file->FullName(fullName); |
|
2810 filepath.Set(fullName, NULL, NULL); |
|
2811 } |
|
2812 else |
|
2813 filepath.Set( iOutputMovieFileName, NULL, NULL ); |
|
2814 } |
|
2815 else |
|
2816 { |
|
2817 if ( movie->VideoClip( iVideoClipNumber + 1 )->Info()->FileHandle() != NULL ) |
|
2818 { |
|
2819 RFile* file = movie->VideoClip( iVideoClipNumber + 1 )->Info()->FileHandle(); |
|
2820 |
|
2821 TFileName origName; |
|
2822 TInt error = file->Name(origName); |
|
2823 filename.Set(origName, NULL, NULL); |
|
2824 |
|
2825 TFileName fullName; |
|
2826 error = file->FullName(fullName); |
|
2827 filepath.Set(fullName, NULL, NULL); |
|
2828 } |
|
2829 else |
|
2830 { |
|
2831 filename.Set( movie->VideoClip( iVideoClipNumber + 1 )->Info()->FileName(), NULL, NULL ); |
|
2832 filepath.Set( iOutputMovieFileName, NULL, NULL ); |
|
2833 } |
|
2834 } |
|
2835 |
|
2836 iNextClipFileName = filepath.DriveAndPath(); |
|
2837 iNextClipFileName.Append( filename.Name() ); |
|
2838 /* tag it to indicate that its a tmp editor file */ |
|
2839 iNextClipFileName.Append( _L( "_" ) ); |
|
2840 iNextClipFileName.AppendNum( iVideoClipNumber ); |
|
2841 iNextClipFileName.Append( _L( "_vpi.tmp" ) ); |
|
2842 /* try to create a tmp file */ |
|
2843 if ( iNextClip.Create( iFs, iNextClipFileName, EFileStream | EFileWrite | EFileShareExclusive ) != KErrNone ) |
|
2844 { |
|
2845 /* check if the tmp file exists */ |
|
2846 if ( iNextClip.Open( iFs, iNextClipFileName, EFileStream | EFileWrite | EFileShareExclusive ) != KErrNone ) |
|
2847 { |
|
2848 iNextClip.Close(); |
|
2849 iNextClipFileName.Zero(); |
|
2850 } |
|
2851 } |
|
2852 } |
|
2853 } |
|
2854 } |
|
2855 |
|
2856 /* this is in common timescale */ |
|
2857 iInitialClipStartTimeStamp = (iVideoClipNumber>0 ? iVideoClipParameters[iVideoClipNumber-1].iEndTime : 0); |
|
2858 } |
|
2859 |
|
2860 if (iNumberOfVideoClips) |
|
2861 { |
|
2862 |
|
2863 if (!iParser) /* if file name does not exist, this may cause error while parsing */ |
|
2864 { |
|
2865 iParser = (CMP4Parser*) CMP4Parser::NewL(this, iClipFileName); |
|
2866 } |
|
2867 else |
|
2868 { |
|
2869 delete iParser; |
|
2870 iParser = 0; |
|
2871 iParser = (CMP4Parser*) CMP4Parser::NewL(this, iClipFileName); |
|
2872 } |
|
2873 |
|
2874 iParser->iFirstTimeClipParsing = ETrue; |
|
2875 iState = EStateIdle; |
|
2876 /* open file and parse header */ |
|
2877 CMovieProcessorImpl::TDataFormat format = CMovieProcessorImpl::EDataAutoDetect; |
|
2878 /* this will be overloaded to accomodate for the buffer type */ |
|
2879 |
|
2880 // HUOM! Meneekö tää oikein ?? |
|
2881 User::LeaveIfError(OpenStream(iClipFileName, NULL, format)); |
|
2882 VPASSERT(iState == EStateOpened); |
|
2883 |
|
2884 } |
|
2885 else |
|
2886 { |
|
2887 SetHeaderDefaults(); |
|
2888 } |
|
2889 |
|
2890 iState = EStatePreparing; |
|
2891 if (!iThumbnailInProgress) |
|
2892 { |
|
2893 |
|
2894 iEncoderInitPending = ETrue; |
|
2895 // async. |
|
2896 if (!IsActive()) |
|
2897 { |
|
2898 SetActive(); |
|
2899 iStatus = KRequestPending; |
|
2900 } |
|
2901 TRequestStatus *status = &iStatus; |
|
2902 User::RequestComplete(status, KErrNone); |
|
2903 |
|
2904 if (iNumberOfVideoClips) |
|
2905 { |
|
2906 /* update the video clip duration in millisec. */ |
|
2907 iVideoClipDuration = (TInt64)(iVideoClip->Info()->Duration().Int64() * |
|
2908 (TInt64)iParser->iStreamParameters.iVideoTimeScale / (TInt64)(1000000)); |
|
2909 } |
|
2910 |
|
2911 if(iOutputAudioType == EAudioAMR) |
|
2912 { // setting generated clip type same as output type |
|
2913 if( iAudioType == EAudioNone) |
|
2914 iAudioType = EAudioAMR; |
|
2915 }else if(iOutputAudioType == EAudioAAC) |
|
2916 { |
|
2917 if( iAudioType == EAudioNone) |
|
2918 iAudioType = EAudioAAC; |
|
2919 } |
|
2920 |
|
2921 } |
|
2922 else /* if thumbnail is in progress */ |
|
2923 { |
|
2924 /* open demux and decoder */ |
|
2925 User::LeaveIfError(Prepare()); |
|
2926 VPASSERT(iState == EStateReadyToProcess); |
|
2927 } |
|
2928 } |
|
2929 |
|
2930 |
|
2931 // ----------------------------------------------------------------------------- |
|
2932 // CMovieProcessorImpl::TemporaryInitializeGeneratedClipL |
|
2933 // temporarily initializes the processor for image 3gp file creation |
|
2934 // (other items were commented in a header). |
|
2935 // ----------------------------------------------------------------------------- |
|
2936 // |
|
2937 void CMovieProcessorImpl::TemporaryInitializeGeneratedClipL(){ |
|
2938 |
|
2939 /* re-initialize to create new clip */ |
|
2940 iFirstFrameOfClip = ETrue; /* initialized to indicate that the first frame of this clip has not yet been written */ |
|
2941 iFirstFrameFlagSet = EFalse; |
|
2942 iModeChanged = EFalse; /* assuming that this clip's mode has not been changed */ |
|
2943 iImageClipCreated = 0; |
|
2944 iWriting1stColorTransitionFrame = EFalse; |
|
2945 |
|
2946 iImageEncodedFlag = 0; // has an image been encoded |
|
2947 iImageEncodeProcFinished = 0; |
|
2948 iFirstTimeProcessing = 0; |
|
2949 iGetFrameInProgress = 0; |
|
2950 iEncodeInProgress = 0; |
|
2951 |
|
2952 TInt foundFileBased = 0; |
|
2953 TInt index = 0; |
|
2954 CVedVideoClip* tempclip =0; |
|
2955 CVedMovieImp* tempmovie = (iMovie); |
|
2956 TSize mRes = tempmovie->Resolution(); |
|
2957 |
|
2958 if(iVideoClipNumber == 0) |
|
2959 { |
|
2960 while(foundFileBased == 0) |
|
2961 { |
|
2962 if(index >= iNumberOfVideoClips) |
|
2963 { |
|
2964 break; |
|
2965 } |
|
2966 else |
|
2967 { |
|
2968 tempclip = tempmovie->VideoClip(index); |
|
2969 index++; |
|
2970 if(tempclip->Info()->Class()==EVedVideoClipClassGenerated) |
|
2971 { |
|
2972 foundFileBased = 0; //You still need to search |
|
2973 } |
|
2974 else |
|
2975 { |
|
2976 foundFileBased = 1; |
|
2977 |
|
2978 if (tempclip->Info()->FileHandle()) |
|
2979 { |
|
2980 iClipFileName.Zero(); |
|
2981 iClipFileHandle = tempclip->Info()->FileHandle(); |
|
2982 } |
|
2983 else |
|
2984 { |
|
2985 iClipFileHandle = NULL; |
|
2986 iClipFileName = tempclip->Info()->FileName(); |
|
2987 } |
|
2988 } |
|
2989 } |
|
2990 } |
|
2991 tempmovie = 0; /* make them zero again */ |
|
2992 tempclip = 0; |
|
2993 if(foundFileBased == 0) /* no file based clips in output movie */ |
|
2994 { |
|
2995 iAllGeneratedClips =1; /* to indicate all clips to be inserted are generated */ |
|
2996 iVideoParameters.iWidth = mRes.iWidth; |
|
2997 iVideoParameters.iHeight = mRes.iHeight; |
|
2998 } |
|
2999 else |
|
3000 { |
|
3001 /* create an instance of the parser which will be used to set stream paramters from file based */ |
|
3002 if (iNumberOfVideoClips) |
|
3003 { |
|
3004 if (!iParser) |
|
3005 { |
|
3006 if (iClipFileHandle) |
|
3007 iParser = (CMP4Parser*) CMP4Parser::NewL(this, iClipFileHandle); |
|
3008 else |
|
3009 iParser = (CMP4Parser*) CMP4Parser::NewL(this, iClipFileName); |
|
3010 } |
|
3011 iParser->iFirstTimeClipParsing = ETrue; |
|
3012 iState = EStateIdle; |
|
3013 /* open file and parse header */ |
|
3014 iDataFormat = EData3GP; /* since file will be generated cant open with open stream so reset with standard */ |
|
3015 iMuxType = EMux3GP; |
|
3016 ParseHeaderL(); |
|
3017 } |
|
3018 } |
|
3019 } |
|
3020 |
|
3021 iState = EStatePreparing; |
|
3022 |
|
3023 if(iAllGeneratedClips == 1) /* all are generated clips => use H263 */ |
|
3024 { |
|
3025 //iTranscoder->SetTargetVideoType(1); /* set target video type to H263 */ |
|
3026 //not required as if all are generated even then output is based on engine output video type |
|
3027 } |
|
3028 iMonitor->StartPreparing(); |
|
3029 if (!iVideoEncoder) |
|
3030 { |
|
3031 |
|
3032 #ifdef VIDEOEDITORENGINE_AVC_EDITING |
|
3033 if (iOutputVideoType == EVideoAVCProfileBaseline) |
|
3034 { |
|
3035 VPASSERT(!iImageAvcEdit); |
|
3036 iImageAvcEdit = CVedAVCEdit::NewL(); |
|
3037 |
|
3038 iImageAvcEdit->SetOutputLevel( GetOutputAVCLevel() ); |
|
3039 } |
|
3040 |
|
3041 #endif |
|
3042 iVideoEncoder = CVideoEncoder::NewL(iMonitor, iImageAvcEdit, iMovie->VideoCodecMimeType()); |
|
3043 |
|
3044 iVideoEncoder->SetFrameSizeL(iMovie->Resolution()); |
|
3045 |
|
3046 // Use the max frame rate since we don't want to change the frame rate |
|
3047 TReal inputFrameRate = iMovie->MaximumFramerate(); |
|
3048 iVideoEncoder->SetInputFrameRate(inputFrameRate); |
|
3049 |
|
3050 if ( iMovie->VideoFrameRate() > 0 ) |
|
3051 iVideoEncoder->SetFrameRate( iMovie->VideoFrameRate() ); |
|
3052 if ( iMovie->VideoBitrate() > 0 ) // if there is request for restricted bitrate, use it |
|
3053 iVideoEncoder->SetBitrate( iMovie->VideoBitrate() ); |
|
3054 else if ( iMovie->VideoStandardBitrate() > 0 ) // use the given standard bitrate |
|
3055 iVideoEncoder->SetBitrate( iMovie->VideoStandardBitrate() ); |
|
3056 |
|
3057 if( iMovie->RandomAccessRate() > 0.0 ) |
|
3058 iVideoEncoder->SetRandomAccessRate( iMovie->RandomAccessRate() ); |
|
3059 |
|
3060 /* initialize encoder */ |
|
3061 if (!IsActive()) |
|
3062 { |
|
3063 SetActive(); |
|
3064 iStatus = KRequestPending; |
|
3065 } |
|
3066 iVideoEncoder->InitializeL(iStatus); |
|
3067 iEncoderInitPending = ETrue; |
|
3068 } |
|
3069 else |
|
3070 { |
|
3071 if((iAllGeneratedClips == 1) && (iVideoClipNumber == 0)) |
|
3072 { |
|
3073 VPASSERT(iVideoEncoder->BeenStarted() == 0); |
|
3074 //iVideoEncoder->Stop(); |
|
3075 delete iVideoEncoder; |
|
3076 iVideoEncoder = 0; |
|
3077 |
|
3078 #ifdef VIDEOEDITORENGINE_AVC_EDITING |
|
3079 if (iOutputVideoType == EVideoAVCProfileBaseline) |
|
3080 { |
|
3081 VPASSERT(!iImageAvcEdit); |
|
3082 iImageAvcEdit = CVedAVCEdit::NewL(); |
|
3083 |
|
3084 iImageAvcEdit->SetOutputLevel( GetOutputAVCLevel() ); |
|
3085 } |
|
3086 #endif |
|
3087 |
|
3088 iVideoEncoder = CVideoEncoder::NewL(iMonitor, iImageAvcEdit, iMovie->VideoCodecMimeType()); |
|
3089 |
|
3090 iVideoEncoder->SetFrameSizeL(iMovie->Resolution()); |
|
3091 |
|
3092 TReal inputFrameRate = iMovie->MaximumFramerate(); |
|
3093 iVideoEncoder->SetInputFrameRate(inputFrameRate); |
|
3094 |
|
3095 if ( iMovie->VideoFrameRate() > 0 ) |
|
3096 iVideoEncoder->SetFrameRate( iMovie->VideoFrameRate() ); |
|
3097 if ( iMovie->VideoBitrate() > 0 ) // if there is request for restricted bitrate, use it |
|
3098 iVideoEncoder->SetBitrate( iMovie->VideoBitrate() ); |
|
3099 else if ( iMovie->VideoStandardBitrate() > 0 ) // use the given standard bitrate |
|
3100 iVideoEncoder->SetBitrate( iMovie->VideoStandardBitrate() ); |
|
3101 if( iMovie->RandomAccessRate() > 0.0 ) |
|
3102 iVideoEncoder->SetRandomAccessRate( iMovie->RandomAccessRate() ); |
|
3103 |
|
3104 /* initialize encoder */ |
|
3105 if (!IsActive()) |
|
3106 { |
|
3107 SetActive(); |
|
3108 iStatus = KRequestPending; |
|
3109 } |
|
3110 iVideoEncoder->InitializeL(iStatus); |
|
3111 iEncoderInitPending = ETrue; |
|
3112 } |
|
3113 else |
|
3114 { |
|
3115 // first frame has to be intra |
|
3116 iVideoEncoder->SetRandomAccessPoint(); |
|
3117 iEncoderInitPending = ETrue; |
|
3118 if (!IsActive()) |
|
3119 { |
|
3120 SetActive(); |
|
3121 iStatus = KRequestPending; |
|
3122 } |
|
3123 TRequestStatus *status = &iStatus; |
|
3124 User::RequestComplete(status, KErrNone); |
|
3125 } |
|
3126 } |
|
3127 |
|
3128 |
|
3129 } |
|
3130 |
|
3131 |
|
3132 // ----------------------------------------------------------------------------- |
|
3133 // CMovieProcessorImpl::ParseHeaderOnlyL |
|
3134 // Parses the header for a given clip |
|
3135 // (other items were commented in a header). |
|
3136 // ----------------------------------------------------------------------------- |
|
3137 // |
|
3138 void CMovieProcessorImpl::ParseHeaderOnlyL(CParser::TStreamParameters& aStreamParams, |
|
3139 TFileName& aFileName, RFile* aFileHandle) |
|
3140 { |
|
3141 |
|
3142 if (!iParser) |
|
3143 { |
|
3144 // create an instance of the parser |
|
3145 if (aFileHandle) |
|
3146 iParser = (CMP4Parser*) CMP4Parser::NewL(this, aFileHandle); |
|
3147 else |
|
3148 iParser = (CMP4Parser*) CMP4Parser::NewL(this, aFileName); |
|
3149 } |
|
3150 iParser->ParseHeaderL(aStreamParams); |
|
3151 |
|
3152 // don't read the audio properties from input files, but from audio engine |
|
3153 CVedMovieImp* songmovie = (iMovie); |
|
3154 CAudSong* songPointer = 0; |
|
3155 |
|
3156 if (songmovie) |
|
3157 { |
|
3158 songPointer = songmovie->Song(); |
|
3159 } |
|
3160 |
|
3161 if (songPointer) |
|
3162 { |
|
3163 TAudFileProperties prop = songPointer->OutputFileProperties(); |
|
3164 |
|
3165 aStreamParams.iHaveAudio = ETrue; |
|
3166 if (songPointer->ClipCount(KAllTrackIndices) == 0) |
|
3167 { |
|
3168 aStreamParams.iHaveAudio = EFalse; |
|
3169 } |
|
3170 |
|
3171 aStreamParams.iAudioLength = I64INT(prop.iDuration.Int64())/1000; |
|
3172 aStreamParams.iAudioFormat = CParser::EAudioFormatNone; |
|
3173 if (prop.iAudioType == EAudAMR) |
|
3174 { |
|
3175 aStreamParams.iAudioFormat = CParser::EAudioFormatAMR; |
|
3176 aStreamParams.iAudioFramesInSample = 5; |
|
3177 aStreamParams.iAudioTimeScale = 1000; |
|
3178 } |
|
3179 else if ( prop.iAudioType == EAudAAC_MPEG4 ) |
|
3180 { |
|
3181 aStreamParams.iAudioFormat = CParser::EAudioFormatAAC; |
|
3182 aStreamParams.iAudioFramesInSample = 1; |
|
3183 aStreamParams.iAudioTimeScale = prop.iSamplingRate; |
|
3184 } |
|
3185 } |
|
3186 |
|
3187 // update output parameters. |
|
3188 UpdateStreamParameters(iParser->iStreamParameters, aStreamParams); |
|
3189 SetOutputNumberOfFrames(iParser->iOutputNumberOfFrames); |
|
3190 } |
|
3191 |
|
3192 |
|
3193 |
|
3194 // ----------------------------------------------------------------------------- |
|
3195 // CMovieProcessorImpl::OpenStream |
|
3196 // Opens a clip for processing |
|
3197 // (other items were commented in a header). |
|
3198 // ----------------------------------------------------------------------------- |
|
3199 // |
|
3200 TInt CMovieProcessorImpl::OpenStream(TFileName aFileName, RFile* aFileHandle, TDataFormat aDataFormat) |
|
3201 { |
|
3202 // We can only streams in idle state |
|
3203 if (iState != EStateIdle) |
|
3204 return EInvalidProcessorState; |
|
3205 |
|
3206 TInt error = KErrNone; |
|
3207 iDataFormat = aDataFormat; |
|
3208 |
|
3209 if (!aFileHandle) |
|
3210 { |
|
3211 |
|
3212 RFs fs; |
|
3213 RFile file; |
|
3214 |
|
3215 if(aFileName.Length() == 0) |
|
3216 return KErrArgument; |
|
3217 |
|
3218 // Open a file server session and open the file: |
|
3219 if ( (error = fs.Connect()) != KErrNone ) |
|
3220 return error; |
|
3221 |
|
3222 if ( (error = file.Open(fs, aFileName, EFileShareReadersOnly | EFileRead)) != KErrNone ) |
|
3223 { |
|
3224 if ( (error = file.Open(fs, aFileName, EFileShareAny | EFileRead)) != KErrNone ) |
|
3225 { |
|
3226 return error; |
|
3227 } |
|
3228 } |
|
3229 |
|
3230 // set descriptor to read buffer |
|
3231 TPtr8 readDes(0,0); |
|
3232 readDes.Set(iReadBuf, 0, KReadBufInitSize); |
|
3233 |
|
3234 // read data from the file |
|
3235 if ( (error = file.Read(readDes)) != KErrNone ) |
|
3236 return error; |
|
3237 |
|
3238 if ( readDes.Length() < 8 ) |
|
3239 return KErrGeneral; |
|
3240 |
|
3241 file.Close(); |
|
3242 fs.Close(); |
|
3243 |
|
3244 // detect if format is 3GP, 5-8 == "ftyp" |
|
3245 // This method is not 100 % proof, but good enough |
|
3246 if ( (iReadBuf[4] == 0x66) && (iReadBuf[5] == 0x74) && |
|
3247 (iReadBuf[6] == 0x79) && (iReadBuf[7] == 0x70) ) |
|
3248 { |
|
3249 iDataFormat = EData3GP; |
|
3250 iMuxType = EMux3GP; |
|
3251 } |
|
3252 else |
|
3253 return KErrNotSupported; |
|
3254 } |
|
3255 |
|
3256 // FIXME |
|
3257 iDataFormat = EData3GP; |
|
3258 iMuxType = EMux3GP; |
|
3259 |
|
3260 // parse 3GP header |
|
3261 CMP4Parser *parser = 0; |
|
3262 if ( !iParser ) |
|
3263 { |
|
3264 if (iClipFileHandle) |
|
3265 { |
|
3266 TRAP(error, (parser = CMP4Parser::NewL(this, iClipFileHandle)) ); |
|
3267 } |
|
3268 else |
|
3269 { |
|
3270 TRAP(error, (parser = CMP4Parser::NewL(this, iClipFileName)) ); |
|
3271 } |
|
3272 |
|
3273 if (error != KErrNone) |
|
3274 return error; |
|
3275 iParser = parser; |
|
3276 } |
|
3277 else |
|
3278 parser = (CMP4Parser*)iParser; |
|
3279 |
|
3280 TRAP(error, ParseHeaderL()); |
|
3281 |
|
3282 if (error != KErrNone) |
|
3283 return error; |
|
3284 |
|
3285 iState = EStateOpened; |
|
3286 |
|
3287 return KErrNone; |
|
3288 |
|
3289 |
|
3290 } |
|
3291 |
|
3292 // ----------------------------------------------------------------------------- |
|
3293 // CMovieProcessorImpl::CloseStream |
|
3294 // Closes the processed stream from parser |
|
3295 // (other items were commented in a header). |
|
3296 // ----------------------------------------------------------------------------- |
|
3297 // |
|
3298 TInt CMovieProcessorImpl::CloseStream() |
|
3299 { |
|
3300 |
|
3301 PRINT((_L("CMovieProcessorImpl::CloseStream() begin - iState = %d"), iState)) |
|
3302 |
|
3303 if ( (iState != EStateOpened) && (iState != EStateProcessing) ) |
|
3304 return EInvalidProcessorState; |
|
3305 |
|
3306 TInt error=0; |
|
3307 |
|
3308 // delete parser |
|
3309 if (iParser) |
|
3310 { |
|
3311 TRAP(error, |
|
3312 { |
|
3313 delete iParser; |
|
3314 iParser=0; |
|
3315 } |
|
3316 ); |
|
3317 if (error != KErrNone) |
|
3318 return error; |
|
3319 } |
|
3320 |
|
3321 iClipFileName.Zero(); |
|
3322 iCurrentMovieName.Zero(); |
|
3323 |
|
3324 // We are idle again |
|
3325 iState = EStateIdle; |
|
3326 |
|
3327 PRINT((_L("CMovieProcessorImpl::CloseStream() end "))) |
|
3328 |
|
3329 return KErrNone; |
|
3330 } |
|
3331 |
|
3332 |
|
3333 // ----------------------------------------------------------------------------- |
|
3334 // CMovieProcessorImpl::Prepare |
|
3335 // Prepares the processor for processing, opens demux & decoder |
|
3336 // (other items were commented in a header). |
|
3337 // ----------------------------------------------------------------------------- |
|
3338 // |
|
3339 TInt CMovieProcessorImpl::Prepare() |
|
3340 { |
|
3341 TInt error; |
|
3342 TUint videoBlockSize, videoBlocks; |
|
3343 |
|
3344 // We can only prepare from preparing state |
|
3345 if (iState != EStatePreparing) |
|
3346 return EInvalidProcessorState; |
|
3347 |
|
3348 // Make sure we now know the stream format |
|
3349 if (iDataFormat == EDataAutoDetect) |
|
3350 return EUnsupportedFormat; |
|
3351 |
|
3352 // Check whether the stream has audio, video or both, and whether it is |
|
3353 // muxed |
|
3354 switch (iDataFormat) |
|
3355 { |
|
3356 case EData3GP: |
|
3357 // the video and audio flags are set when |
|
3358 // the header is parsed. |
|
3359 iIsMuxed = ETrue; |
|
3360 break; |
|
3361 default: |
|
3362 User::Panic(_L("CMovieProcessorImpl"), EInvalidInternalState); |
|
3363 } |
|
3364 |
|
3365 // If we have already played this stream since opening it, we'll have to |
|
3366 // try to rewind |
|
3367 |
|
3368 // only 3gp file format supported => iIsMuxed always true |
|
3369 videoBlocks = KVideoQueueBlocks; |
|
3370 videoBlockSize = KVideoQueueBlockSize; |
|
3371 |
|
3372 // Initialize video |
|
3373 VPASSERT((!iVideoQueue) && (!iVideoProcessor)); |
|
3374 if (iHaveVideo) |
|
3375 { |
|
3376 TRAP(error, InitVideoL(videoBlocks, videoBlockSize)); |
|
3377 if ( error != KErrNone ) |
|
3378 return error; |
|
3379 } |
|
3380 |
|
3381 // Initialize demux |
|
3382 VPASSERT(!iDemux); |
|
3383 VPASSERT(iIsMuxed); |
|
3384 TRAP(error, InitDemuxL()); |
|
3385 if ( error != KErrNone ) |
|
3386 return error; |
|
3387 |
|
3388 iState = EStateReadyToProcess; |
|
3389 |
|
3390 return KErrNone; |
|
3391 } |
|
3392 |
|
3393 // ----------------------------------------------------------------------------- |
|
3394 // CMovieProcessorImpl::InitVideoL |
|
3395 // Initializes the video decoder for processing |
|
3396 // (other items were commented in a header). |
|
3397 // ----------------------------------------------------------------------------- |
|
3398 // |
|
3399 void CMovieProcessorImpl::InitVideoL(TUint aQueueBlocks, TUint aQueueBlockSize) |
|
3400 { |
|
3401 // Create video input queue |
|
3402 iVideoQueue = new (ELeave) CActiveQueue(aQueueBlocks, aQueueBlockSize); |
|
3403 iVideoQueue->ConstructL(); |
|
3404 |
|
3405 if (iThumbnailInProgress && |
|
3406 iParser->iStreamParameters.iVideoFormat == CParser::EVideoFormatAVCProfileBaseline) |
|
3407 { |
|
3408 if (!iAvcEdit) |
|
3409 { |
|
3410 // create AVC editing instance |
|
3411 iAvcEdit = CVedAVCEdit::NewL(); |
|
3412 } |
|
3413 } |
|
3414 |
|
3415 // Create correct video decoder object |
|
3416 |
|
3417 VPASSERT(!iVideoProcessor); |
|
3418 switch (iVideoType) |
|
3419 { |
|
3420 case EVideoH263Profile0Level10: |
|
3421 case EVideoH263Profile0Level45: |
|
3422 case EVideoMPEG4: |
|
3423 case EVideoAVCProfileBaseline: |
|
3424 // H.263 decoder handles both H.263+ and MPEG-4 |
|
3425 { |
|
3426 |
|
3427 iVideoProcessor = CVideoProcessor::NewL(iVideoQueue, |
|
3428 &iVideoParameters, |
|
3429 this, |
|
3430 iMonitor, |
|
3431 iAvcEdit, |
|
3432 iThumbnailInProgress, |
|
3433 CActive::EPriorityStandard); |
|
3434 |
|
3435 } |
|
3436 break; |
|
3437 |
|
3438 default: |
|
3439 User::Leave(EUnsupportedFormat); |
|
3440 } |
|
3441 } |
|
3442 |
|
3443 |
|
3444 // ----------------------------------------------------------------------------- |
|
3445 // CMovieProcessorImpl::InitDemuxL |
|
3446 // Initializes the demultiplexer for processing |
|
3447 // (other items were commented in a header). |
|
3448 // ----------------------------------------------------------------------------- |
|
3449 // |
|
3450 void CMovieProcessorImpl::InitDemuxL() |
|
3451 { |
|
3452 |
|
3453 // Set video channel target queue |
|
3454 TUint i; |
|
3455 |
|
3456 // 3gp is the only supported file format |
|
3457 VPASSERT(iMuxType == EMux3GP); |
|
3458 |
|
3459 for ( i = 0; i < iNumDemuxChannels; i++ ) |
|
3460 { |
|
3461 if (iMP4Channels[i].iDataType == CMP4Demux::EDataVideo) |
|
3462 { |
|
3463 VPASSERT(iHaveVideo); |
|
3464 iMP4Channels[i].iTargetQueue = iVideoQueue; |
|
3465 } |
|
3466 } |
|
3467 |
|
3468 VPASSERT(iParser); |
|
3469 |
|
3470 iDemux = CMP4Demux::NewL(NULL /* demuxQueue */, iNumDemuxChannels, |
|
3471 iMP4Channels, &iMP4Parameters, |
|
3472 iMonitor, (CMP4Parser*)iParser, |
|
3473 KDemuxPriority); |
|
3474 } |
|
3475 |
|
3476 // ----------------------------------------------------------------------------- |
|
3477 // CMovieProcessorImpl::DoStartProcessing |
|
3478 // Starts processing the movie |
|
3479 // (other items were commented in a header). |
|
3480 // ----------------------------------------------------------------------------- |
|
3481 // |
|
3482 void CMovieProcessorImpl::DoStartProcessing() |
|
3483 { |
|
3484 |
|
3485 PRINT((_L("CMovieProcessorImpl::DoStartProcessing() begin"))) |
|
3486 |
|
3487 if (iNumberOfVideoClips) |
|
3488 { |
|
3489 VPASSERT(iDemux); |
|
3490 VPASSERT(iVideoProcessor); |
|
3491 VPASSERT(iState == EStateReadyToProcess); |
|
3492 |
|
3493 // start demuxing & decoding video |
|
3494 |
|
3495 iDemux->Start(); |
|
3496 iVideoProcessor->Start(); |
|
3497 if(!((iVideoClipNumber==0)&&((TVedVideoClipClass)iVideoClip->Info()->Class()==(TVedVideoClipClass)EVedVideoClipClassGenerated))) |
|
3498 iMonitor->ProcessingStarted(iStartingProcessing); |
|
3499 iStartingProcessing = EFalse; |
|
3500 iState = EStateProcessing; |
|
3501 } |
|
3502 else |
|
3503 {// audio-only case |
|
3504 |
|
3505 iState = EStateProcessing; |
|
3506 iMonitor->ProcessingStarted(EFalse); |
|
3507 |
|
3508 TRAPD( error, iObserver->NotifyMovieProcessingStartedL(*iMovie) ); |
|
3509 if (error != KErrNone) |
|
3510 { |
|
3511 if (iMonitor) |
|
3512 iMonitor->Error(error); |
|
3513 return; |
|
3514 } |
|
3515 |
|
3516 // process all audio clips |
|
3517 ProcessAudioOnly(); |
|
3518 } |
|
3519 |
|
3520 PRINT((_L("CMovieProcessorImpl::DoStartProcessing() end"))) |
|
3521 |
|
3522 } |
|
3523 |
|
3524 |
|
3525 // ----------------------------------------------------------------------------- |
|
3526 // CMovieProcessorImpl::ProcessAudioOnly |
|
3527 // Processes the movie in audio-only case |
|
3528 // (other items were commented in a header). |
|
3529 // ----------------------------------------------------------------------------- |
|
3530 // |
|
3531 void CMovieProcessorImpl::ProcessAudioOnly() |
|
3532 { |
|
3533 |
|
3534 VPASSERT(iNumberOfAudioClips > 0); |
|
3535 |
|
3536 // write audio frames to file & encode a black video frame |
|
3537 TInt error; |
|
3538 TRAP(error, ProcessAudioL()); |
|
3539 if (error != KErrNone) |
|
3540 { |
|
3541 iMonitor->Error(error); |
|
3542 return; |
|
3543 } |
|
3544 |
|
3545 //VPASSERT(iEncodePending); |
|
3546 |
|
3547 } |
|
3548 |
|
3549 // ----------------------------------------------------------------------------- |
|
3550 // CMovieProcessorImpl::DoCloseVideoL |
|
3551 // Closes & deletes the structures used in processing |
|
3552 // (other items were commented in a header). |
|
3553 // ----------------------------------------------------------------------------- |
|
3554 // |
|
3555 void CMovieProcessorImpl::DoCloseVideoL() |
|
3556 { |
|
3557 if ((iState == EStateProcessing) || (iState == EStateReadyToProcess)|| |
|
3558 (iState == EStatePreparing) ) |
|
3559 { |
|
3560 PRINT((_L("CMovieProcessorImpl::DoCloseVideoL() - stopping"))) |
|
3561 User::LeaveIfError(Stop()); |
|
3562 iState = EStateOpened; |
|
3563 } |
|
3564 |
|
3565 // If we are buffering or opening at the moment or clip is open then close it |
|
3566 if ( (iState == EStateOpened) || (iState == EStateReadyToProcess)) |
|
3567 { |
|
3568 PRINT((_L("CMovieProcessorImpl::DoCloseVideoL() - closing stream"))) |
|
3569 User::LeaveIfError(CloseStream()); |
|
3570 iState = EStateIdle; |
|
3571 } |
|
3572 } |
|
3573 |
|
3574 |
|
3575 |
|
3576 // ----------------------------------------------------------------------------- |
|
3577 // CMovieProcessorImpl::Stop |
|
3578 // Stops processing & closes modules used in processing |
|
3579 // (other items were commented in a header). |
|
3580 // ----------------------------------------------------------------------------- |
|
3581 // |
|
3582 TInt CMovieProcessorImpl::Stop() |
|
3583 { |
|
3584 TInt error = KErrNone; |
|
3585 |
|
3586 // Check state |
|
3587 if ( (iState != EStateProcessing) && (iState != EStateReadyToProcess) && (iState != EStatePreparing) ) |
|
3588 return EInvalidProcessorState; |
|
3589 // We may also get here from the middle of a Prepare() attempt. |
|
3590 |
|
3591 PRINT((_L("CMovieProcessorImpl::Stop() begin"))) |
|
3592 |
|
3593 // Destroy the playback objects to stop playback |
|
3594 TRAP(error, |
|
3595 { |
|
3596 if (iDemux) |
|
3597 delete iDemux; |
|
3598 iDemux = 0; |
|
3599 |
|
3600 if (iAudioProcessor) |
|
3601 delete iAudioProcessor; |
|
3602 iAudioProcessor = 0; |
|
3603 |
|
3604 }); |
|
3605 if (error != KErrNone) |
|
3606 return error; |
|
3607 |
|
3608 |
|
3609 if (iVideoEncoder) |
|
3610 { |
|
3611 if (!iEncoderInitPending) |
|
3612 { |
|
3613 // Delete encoder. Don't delete now if encoding |
|
3614 // is not in progress at encoder and there are |
|
3615 // active encoding flags. This means that encoder |
|
3616 // has completed encoding request, but this->RunL() |
|
3617 // hasn't been called yet. Encoder will be deleted in RunL() |
|
3618 |
|
3619 if ( ( iVideoEncoder->IsEncodePending() == 1 ) || |
|
3620 ( (!iEncodePending && !iEncodeInProgress) ) ) |
|
3621 { |
|
3622 PRINT((_L("CMovieProcessorImpl::Stop() - deleting encoder"))); |
|
3623 Cancel(); |
|
3624 iVideoEncoder->Stop(); |
|
3625 delete iVideoEncoder; |
|
3626 iVideoEncoder = 0; |
|
3627 } |
|
3628 } |
|
3629 } |
|
3630 |
|
3631 if (iVideoProcessor) |
|
3632 delete iVideoProcessor; |
|
3633 iVideoProcessor = 0; |
|
3634 |
|
3635 if (iVideoQueue) |
|
3636 delete iVideoQueue; |
|
3637 iVideoQueue = 0; |
|
3638 |
|
3639 if (!iThumbnailInProgress && iState == EStateProcessing) |
|
3640 { |
|
3641 if (iMonitor) |
|
3642 iMonitor->ProcessingStopped(); |
|
3643 } |
|
3644 |
|
3645 iState = EStateOpened; |
|
3646 |
|
3647 PRINT((_L("CMovieProcessorImpl::Stop() end"))) |
|
3648 |
|
3649 return KErrNone; |
|
3650 } |
|
3651 |
|
3652 |
|
3653 // ----------------------------------------------------------------------------- |
|
3654 // CMovieProcessorImpl::Close |
|
3655 // Stops processing and closes all submodules except status monitor |
|
3656 // (other items were commented in a header). |
|
3657 // ----------------------------------------------------------------------------- |
|
3658 // |
|
3659 TInt CMovieProcessorImpl::Close() |
|
3660 { |
|
3661 |
|
3662 // delete all objects except status monitor |
|
3663 delete iComposer; |
|
3664 iComposer = 0; |
|
3665 DeleteClipStructures(); |
|
3666 TRAPD(error, DoCloseVideoL()); |
|
3667 if (error != KErrNone) |
|
3668 return error; |
|
3669 |
|
3670 iState = EStateIdle; |
|
3671 |
|
3672 return KErrNone; |
|
3673 |
|
3674 } |
|
3675 |
|
3676 |
|
3677 // ----------------------------------------------------------------------------- |
|
3678 // CMovieProcessorImpl::RunL |
|
3679 // Called by the active scheduler when the video encoder initialization is done |
|
3680 // or an ending black frame has been encoded |
|
3681 // (other items were commented in a header). |
|
3682 // ----------------------------------------------------------------------------- |
|
3683 // |
|
3684 void CMovieProcessorImpl::RunL() |
|
3685 { |
|
3686 |
|
3687 PRINT((_L("RunL begin, iEncoderInitPending %d, iEncodePending %d, iEncodeInProgress %d"), |
|
3688 iEncoderInitPending, iEncodePending, iEncodeInProgress )); |
|
3689 |
|
3690 PRINT((_L("CMovieProcessorImpl::RunL begin - iEncodeInProgress %d"), iEncodeInProgress)); |
|
3691 |
|
3692 if (iAudioProcessingCompleted) |
|
3693 { |
|
3694 iAudioProcessingCompleted = EFalse; |
|
3695 FinalizeVideoSequenceL(); |
|
3696 return; |
|
3697 } |
|
3698 |
|
3699 if (iNumberOfVideoClips) |
|
3700 { |
|
3701 // If we come here after a generated 3gp clip has been created, |
|
3702 // ival == EVedVideoClipClassGenerated && iImageClipCreated == 1 |
|
3703 |
|
3704 TVedVideoClipClass ival = (TVedVideoClipClass)iVideoClip->Info()->Class(); |
|
3705 if(ival == EVedVideoClipClassGenerated && iImageClipCreated == 0) // iImageClipCreated will work only for single imageset currently |
|
3706 { |
|
3707 iEncoderInitPending = EFalse; // is this correct ?? |
|
3708 DoImageSetProcessL(); // Call function which does image file creation |
|
3709 return; |
|
3710 } |
|
3711 } |
|
3712 |
|
3713 if (iEncoderInitPending) |
|
3714 { |
|
3715 VPASSERT(!iEncodePending); |
|
3716 |
|
3717 PRINT((_L("CMovieProcessorImpl::RunL - encoder init complete"))); |
|
3718 // video encoder has been initialized => start processing |
|
3719 |
|
3720 iEncoderInitPending = EFalse; |
|
3721 |
|
3722 if (iProcessingCancelled) |
|
3723 { |
|
3724 PRINT((_L("CMovieProcessorImpl::RunL - processing cancelled"))); |
|
3725 if (iVideoEncoder) |
|
3726 { |
|
3727 iVideoEncoder->Stop(); |
|
3728 delete iVideoEncoder; |
|
3729 iVideoEncoder = 0; |
|
3730 } |
|
3731 PRINT((_L("CMovieProcessorImpl::RunL - calling cancelled callback"))); |
|
3732 if (iMonitor) |
|
3733 iMonitor->ProcessingCancelled(); |
|
3734 |
|
3735 return; |
|
3736 } |
|
3737 |
|
3738 if (iEncodingBlackFrames) |
|
3739 { |
|
3740 FinalizeVideoSequenceL(); |
|
3741 return; |
|
3742 } |
|
3743 |
|
3744 if (iNumberOfVideoClips) |
|
3745 { |
|
3746 |
|
3747 #ifdef VIDEOEDITORENGINE_AVC_EDITING |
|
3748 CVedVideoClipInfo* currentInfo = iVideoClip->Info(); |
|
3749 |
|
3750 // Save SPS/PPS data from input file even if output type |
|
3751 // is not AVC. This data is needed in doing blending transitions |
|
3752 // from AVC input to H.263/MPEG-4 output. An optimisation |
|
3753 // could be to check if such transitions are present in movie |
|
3754 // and save only when necessary. |
|
3755 |
|
3756 if ( (currentInfo->Class() == EVedVideoClipClassFile && |
|
3757 currentInfo->VideoType() == EVedVideoTypeAVCBaselineProfile) || |
|
3758 (iOutputVideoType == EVideoAVCProfileBaseline && |
|
3759 currentInfo->Class() == EVedVideoClipClassGenerated) ) |
|
3760 { |
|
3761 // save SPS/PPS NAL units |
|
3762 TInt size = iParser->GetDecoderSpecificInfoSize(); |
|
3763 HBufC8* buf = (HBufC8*) HBufC8::NewLC(size); |
|
3764 TPtr8 ptr = buf->Des(); |
|
3765 User::LeaveIfError( iParser->ReadAVCDecoderSpecificInfo(ptr) ); |
|
3766 iAvcEdit->SaveAVCDecoderConfigurationRecordL(ptr, EFalse); |
|
3767 CleanupStack::PopAndDestroy(); // buf |
|
3768 } |
|
3769 #endif |
|
3770 |
|
3771 // go to the beginning of the I frame immediately preceding the start cut time |
|
3772 User::LeaveIfError(iParser->SeekOptimalIntraFrame(iStartCutTime, 0, ETrue)); |
|
3773 |
|
3774 TInt currentFrameIndex = iOutputNumberOfFrames; |
|
3775 |
|
3776 // update parameters |
|
3777 SetOutputNumberOfFrames(iParser->iOutputNumberOfFrames); |
|
3778 |
|
3779 // fill in frame parameters |
|
3780 FillFrameParametersL(currentFrameIndex); |
|
3781 |
|
3782 // open video decoder & demux |
|
3783 User::LeaveIfError(Prepare()); |
|
3784 } |
|
3785 // if ( iVideoClipNumber > 0 || class != generated ) |
|
3786 if( iNumberOfVideoClips == 0 || !( (iVideoClipNumber==0) && ((TVedVideoClipClass)iVideoClip->Info()->Class()==(TVedVideoClipClass)EVedVideoClipClassGenerated)) ) |
|
3787 iMonitor->PrepareComplete(); |
|
3788 |
|
3789 // start processing |
|
3790 DoStartProcessing(); |
|
3791 } |
|
3792 |
|
3793 else if (iEncodePending) |
|
3794 { |
|
3795 // encoding complete |
|
3796 iEncodePending = EFalse; |
|
3797 |
|
3798 TInt64 frameDuration; // in ticks |
|
3799 if ( iLeftOverDuration >= 1000 ) |
|
3800 { |
|
3801 iLeftOverDuration -= 1000; |
|
3802 frameDuration = iOutputVideoTimeScale; // One second in ticks |
|
3803 } |
|
3804 else |
|
3805 { |
|
3806 iLeftOverDuration = 0; |
|
3807 frameDuration = TInt64( TReal(iOutputVideoTimeScale)/1000.0 * I64REAL(iLeftOverDuration) + 0.5 ); |
|
3808 } |
|
3809 |
|
3810 if (iStatus == KErrNone) |
|
3811 { |
|
3812 // Audio-only or audio longer than video |
|
3813 // => black frame is encoded at the end |
|
3814 |
|
3815 PRINT((_L("CMovieProcessorImpl::RunL - encoding complete"))); |
|
3816 |
|
3817 // fetch the bitstream & write it to output file, release bitstream buffer |
|
3818 TBool isKeyFrame = 0; |
|
3819 TPtrC8 buf(iVideoEncoder->GetBufferL(isKeyFrame)); |
|
3820 |
|
3821 TReal tsInTicks = I64REAL(iTimeStamp.Int64() / TInt64(1000)) * TReal(iOutputVideoTimeScale) / 1000.0; |
|
3822 |
|
3823 User::LeaveIfError( WriteVideoFrameToFile((TDesC8&)buf, TInt64(tsInTicks + 0.5), I64INT(frameDuration), |
|
3824 isKeyFrame, ETrue, EFalse, ETrue ) ); |
|
3825 |
|
3826 iVideoEncoder->ReturnBuffer(); |
|
3827 |
|
3828 // do not reset flag until here so that last frame of last clip gets the correct |
|
3829 // duration in case slow motion is used |
|
3830 iApplySlowMotion = EFalse; |
|
3831 } |
|
3832 #ifdef _DEBUG |
|
3833 else |
|
3834 { |
|
3835 PRINT((_L("CMovieProcessorImpl::RunL - encoding failed"))); |
|
3836 } |
|
3837 #endif |
|
3838 |
|
3839 iTimeStamp = TTimeIntervalMicroSeconds(iTimeStamp.Int64() + TInt64(1000000)); |
|
3840 |
|
3841 if (iLeftOverDuration > 0) |
|
3842 { |
|
3843 // encode another frame |
|
3844 VPASSERT(iEncoderBuffer); |
|
3845 TSize tmpSize = iMovie->Resolution(); |
|
3846 TUint yLength = tmpSize.iWidth * tmpSize.iHeight; |
|
3847 TUint uvLength = yLength >> 1; |
|
3848 TUint yuvLength = yLength + uvLength; |
|
3849 TPtr8 yuvPtr(iEncoderBuffer, yuvLength, yuvLength); |
|
3850 |
|
3851 if (!IsActive()) |
|
3852 { |
|
3853 SetActive(); |
|
3854 iStatus = KRequestPending; |
|
3855 } |
|
3856 |
|
3857 iVideoEncoder->EncodeFrameL(yuvPtr, iStatus, iTimeStamp); |
|
3858 iEncodePending = ETrue; |
|
3859 |
|
3860 } |
|
3861 else |
|
3862 { |
|
3863 // movie complete, close everything |
|
3864 |
|
3865 TInt error = FinalizeVideoWrite(); |
|
3866 if (error != KErrNone) |
|
3867 { |
|
3868 User::Leave(KErrGeneral); |
|
3869 } |
|
3870 |
|
3871 error = iComposer->Close(); // creates the output file |
|
3872 if (error != KErrNone) |
|
3873 { |
|
3874 User::Leave(KErrGeneral); |
|
3875 } |
|
3876 |
|
3877 // delete all objects except status monitor |
|
3878 delete iComposer; |
|
3879 iComposer = 0; |
|
3880 |
|
3881 if(iImageComposer) |
|
3882 { |
|
3883 delete iImageComposer; |
|
3884 iImageComposer=0; |
|
3885 } |
|
3886 |
|
3887 if (iImageAvcEdit) |
|
3888 { |
|
3889 delete iImageAvcEdit; |
|
3890 iImageAvcEdit = 0; |
|
3891 } |
|
3892 |
|
3893 DeleteClipStructures(); |
|
3894 |
|
3895 DoCloseVideoL(); |
|
3896 |
|
3897 VPASSERT(!iEncoderInitPending); |
|
3898 |
|
3899 PRINT((_L("CMovieProcessorImpl::RunL - calling completed callback"))); |
|
3900 iMonitor->ProcessingComplete(); |
|
3901 iState = EStateIdle; |
|
3902 } |
|
3903 |
|
3904 } |
|
3905 } |
|
3906 |
|
3907 |
|
3908 // ----------------------------------------------------------------------------- |
|
3909 // CMovieProcessorImpl::RunError |
|
3910 // Called by the AO framework when RunL method has leaved |
|
3911 // (other items were commented in a header). |
|
3912 // ----------------------------------------------------------------------------- |
|
3913 // |
|
3914 TInt CMovieProcessorImpl::RunError(TInt aError) |
|
3915 { |
|
3916 |
|
3917 if ( iCurClipFileName.Length() ) |
|
3918 { |
|
3919 iCurClip.Close(); |
|
3920 iFs.Delete( iCurClipFileName ); |
|
3921 iCurClipFileName.Zero(); |
|
3922 iCurClipDurationList.Reset(); |
|
3923 iCurClipTimeStampList.Reset(); |
|
3924 } |
|
3925 |
|
3926 iMonitor->Error(aError); |
|
3927 |
|
3928 return KErrNone; |
|
3929 } |
|
3930 |
|
3931 |
|
3932 // ----------------------------------------------------------------------------- |
|
3933 // CMovieProcessorImpl::DoCancel |
|
3934 // Cancels any pending asynchronous requests |
|
3935 // (other items were commented in a header). |
|
3936 // ----------------------------------------------------------------------------- |
|
3937 // |
|
3938 void CMovieProcessorImpl::DoCancel() |
|
3939 { |
|
3940 |
|
3941 PRINT((_L("CMovieProcessorImpl::DoCancel() begin"))) |
|
3942 |
|
3943 // Cancel our internal request |
|
3944 if ( iStatus == KRequestPending ) |
|
3945 { |
|
3946 PRINT((_L("CMovieProcessorImpl::DoCancel() cancel request"))) |
|
3947 TRequestStatus *status = &iStatus; |
|
3948 User::RequestComplete(status, KErrCancel); |
|
3949 } |
|
3950 |
|
3951 PRINT((_L("CMovieProcessorImpl::DoCancel() end"))) |
|
3952 } |
|
3953 |
|
3954 |
|
3955 // ----------------------------------------------------------------------------- |
|
3956 // CMovieProcessorImpl::SetHeaderDefaults |
|
3957 // Sets appropriate default values for processing parameters |
|
3958 // in audio-only case |
|
3959 // (other items were commented in a header). |
|
3960 // ----------------------------------------------------------------------------- |
|
3961 // |
|
3962 void CMovieProcessorImpl::SetHeaderDefaults() |
|
3963 { |
|
3964 |
|
3965 // set suitable default values |
|
3966 iHaveVideo = ETrue; |
|
3967 iVideoType = EVideoH263Profile0Level10; |
|
3968 iHaveAudio = ETrue; |
|
3969 iAudioType = EAudioAMR; |
|
3970 iNumDemuxChannels = 0; |
|
3971 |
|
3972 // resolution from movie |
|
3973 TSize tmpSize = iMovie->Resolution(); |
|
3974 iVideoParameters.iWidth = tmpSize.iWidth; |
|
3975 iVideoParameters.iHeight = tmpSize.iHeight; |
|
3976 iVideoParameters.iIntraFrequency = 0; |
|
3977 iVideoParameters.iNumScalabilityLayers = 0; |
|
3978 iVideoParameters.iReferencePicturesNeeded = 0; |
|
3979 // picture period in nanoseconds |
|
3980 iVideoParameters.iPicturePeriodNsec = TInt64(33366667); |
|
3981 |
|
3982 // output time scales |
|
3983 iOutputVideoTimeScale = KVideoTimeScale; |
|
3984 iOutputAudioTimeScale = KAMRAudioTimeScale; |
|
3985 |
|
3986 iStreamLength = 0; |
|
3987 iStreamSize = 0; |
|
3988 iStreamBitrate = 10000; |
|
3989 |
|
3990 } |
|
3991 |
|
3992 |
|
3993 // ----------------------------------------------------------------------------- |
|
3994 // CMovieProcessorImpl::ParseHeaderL |
|
3995 // Parses the clip header & sets internal variables accordingly |
|
3996 // (other items were commented in a header). |
|
3997 // ----------------------------------------------------------------------------- |
|
3998 // |
|
3999 void CMovieProcessorImpl::ParseHeaderL() |
|
4000 { |
|
4001 TInt i; |
|
4002 |
|
4003 VPASSERT(iParser); |
|
4004 |
|
4005 if ( iMuxType != EMux3GP ) |
|
4006 User::Leave(EUnsupportedFormat); |
|
4007 |
|
4008 CParser::TStreamParameters streamParams; |
|
4009 |
|
4010 // parse |
|
4011 iParser->ParseHeaderL(streamParams); |
|
4012 |
|
4013 // don't read the audio properties from input files, but from audio engine |
|
4014 CVedMovieImp* songmovie = (iMovie); |
|
4015 CAudSong* songPointer = 0; |
|
4016 |
|
4017 if (songmovie) |
|
4018 { |
|
4019 songPointer = songmovie->Song(); |
|
4020 } |
|
4021 |
|
4022 if (songPointer) |
|
4023 { |
|
4024 |
|
4025 TAudFileProperties prop = songPointer->OutputFileProperties(); |
|
4026 |
|
4027 streamParams.iHaveAudio = ETrue; |
|
4028 if (songPointer->ClipCount(KAllTrackIndices) == 0) |
|
4029 { |
|
4030 streamParams.iHaveAudio = EFalse; |
|
4031 } |
|
4032 |
|
4033 streamParams.iAudioLength = I64INT(prop.iDuration.Int64())/1000; |
|
4034 streamParams.iAudioFormat = CParser::EAudioFormatNone; |
|
4035 if (prop.iAudioType == EAudAMR) |
|
4036 { |
|
4037 streamParams.iAudioFormat = CParser::EAudioFormatAMR; |
|
4038 streamParams.iAudioFramesInSample = 5; |
|
4039 streamParams.iAudioTimeScale = 1000; |
|
4040 } |
|
4041 else if ( prop.iAudioType == EAudAAC_MPEG4 ) |
|
4042 { |
|
4043 streamParams.iAudioFormat = CParser::EAudioFormatAAC; |
|
4044 streamParams.iAudioFramesInSample = 1; |
|
4045 streamParams.iAudioTimeScale = prop.iSamplingRate; |
|
4046 } |
|
4047 } |
|
4048 |
|
4049 // copy input stream info into parser |
|
4050 UpdateStreamParameters(iParser->iStreamParameters, streamParams); |
|
4051 |
|
4052 // copy parameters |
|
4053 iHaveVideo = streamParams.iHaveVideo; |
|
4054 iVideoType = (TVideoType)streamParams.iVideoFormat; |
|
4055 iHaveAudio = streamParams.iHaveAudio; |
|
4056 iAudioType = (TAudioType)streamParams.iAudioFormat; |
|
4057 iNumDemuxChannels = streamParams.iNumDemuxChannels; |
|
4058 iCanSeek = streamParams.iCanSeek; |
|
4059 iVideoParameters.iWidth = streamParams.iVideoWidth; |
|
4060 iVideoParameters.iHeight = streamParams.iVideoHeight; |
|
4061 iVideoParameters.iIntraFrequency = streamParams.iVideoIntraFrequency; |
|
4062 iVideoParameters.iNumScalabilityLayers = streamParams.iNumScalabilityLayers; |
|
4063 iVideoParameters.iReferencePicturesNeeded = streamParams.iReferencePicturesNeeded; |
|
4064 iVideoParameters.iPicturePeriodNsec = streamParams.iVideoPicturePeriodNsec; |
|
4065 |
|
4066 for (i = 0; i < (TInt)streamParams.iNumScalabilityLayers; i++) |
|
4067 iVideoParameters.iLayerFrameRates[i] = streamParams.iLayerFrameRates[i]; |
|
4068 |
|
4069 |
|
4070 // assign time scale values |
|
4071 if((iVideoClipNumber==0) && (iOutputVideoTimeSet==0)) |
|
4072 { |
|
4073 iOutputVideoTimeSet = 1; |
|
4074 iOutputVideoTimeScale = KVideoTimeScale; |
|
4075 } |
|
4076 |
|
4077 // change the start and end times of the video clip from msec to ticks |
|
4078 iVideoClipParameters[iVideoClipNumber].iStartTime = TInt64( TReal(iOutputVideoTimeScale)/1000.0 * |
|
4079 I64REAL(iVideoClipParameters[iVideoClipNumber].iStartTime) ); |
|
4080 |
|
4081 iVideoClipParameters[iVideoClipNumber].iEndTime = TInt64( TReal(iOutputVideoTimeScale)/1000.0 * |
|
4082 I64REAL(iVideoClipParameters[iVideoClipNumber].iEndTime) ); |
|
4083 |
|
4084 iVideoParameters.iTiming = CVideoProcessor::ETimeStamp; |
|
4085 iMP4Parameters.iPicturePeriodMs = I64INT( (iVideoParameters.iPicturePeriodNsec / TInt64(1000000)) ); |
|
4086 iMP4Parameters.iAudioFramesInSample = streamParams.iAudioFramesInSample; |
|
4087 |
|
4088 |
|
4089 if ( iHaveVideo ) |
|
4090 { |
|
4091 iMP4Channels[0].iDataType = CMP4Demux::EDataVideo; |
|
4092 } |
|
4093 |
|
4094 if ( iHaveAudio ) { |
|
4095 |
|
4096 iNumDemuxChannels = 1; |
|
4097 |
|
4098 // NOTE: audio is processed 'off-line' after a video |
|
4099 // clip has been processed. |
|
4100 |
|
4101 //iMP4Channels[i].iDataType = CMP4Demux::EDataAudio; |
|
4102 } |
|
4103 |
|
4104 iStreamLength = streamParams.iStreamLength; |
|
4105 iStreamBitrate = streamParams.iStreamBitrate; |
|
4106 iStreamSize = streamParams.iStreamSize; |
|
4107 |
|
4108 // Ensure that the video isn't too large |
|
4109 if (!iThumbnailInProgress) |
|
4110 { |
|
4111 if ( (iVideoParameters.iWidth > KVedMaxVideoWidth) || |
|
4112 (iVideoParameters.iHeight > KVedMaxVideoHeight) ) |
|
4113 User::Leave(EVideoTooLarge); |
|
4114 } |
|
4115 |
|
4116 } |
|
4117 |
|
4118 // ----------------------------------------------------------------------------- |
|
4119 // CMovieProcessorImpl::UpdateStreamParameters |
|
4120 // Copies stream parameters to destination structure |
|
4121 // (other items were commented in a header). |
|
4122 // ----------------------------------------------------------------------------- |
|
4123 // |
|
4124 void CMovieProcessorImpl::UpdateStreamParameters(CParser::TStreamParameters& aDestParameters, |
|
4125 CParser::TStreamParameters& aSrcParameters) |
|
4126 { |
|
4127 TInt i; |
|
4128 aDestParameters.iHaveVideo = aSrcParameters.iHaveVideo; |
|
4129 aDestParameters.iHaveAudio = aSrcParameters.iHaveAudio; |
|
4130 aDestParameters.iNumDemuxChannels = aSrcParameters.iNumDemuxChannels; |
|
4131 aDestParameters.iFileFormat = aSrcParameters.iFileFormat; |
|
4132 aDestParameters.iVideoFormat = aSrcParameters.iVideoFormat; |
|
4133 aDestParameters.iAudioFormat = aSrcParameters.iAudioFormat; |
|
4134 aDestParameters.iAudioFramesInSample = aSrcParameters.iAudioFramesInSample; |
|
4135 aDestParameters.iVideoWidth = aSrcParameters.iVideoWidth; |
|
4136 aDestParameters.iVideoHeight = aSrcParameters.iVideoHeight; |
|
4137 aDestParameters.iVideoPicturePeriodNsec = aSrcParameters.iVideoPicturePeriodNsec; |
|
4138 aDestParameters.iVideoIntraFrequency = aSrcParameters.iVideoIntraFrequency; |
|
4139 aDestParameters.iStreamLength = aSrcParameters.iStreamLength; |
|
4140 aDestParameters.iVideoLength = aSrcParameters.iVideoLength; |
|
4141 aDestParameters.iAudioLength = aSrcParameters.iAudioLength; |
|
4142 aDestParameters.iCanSeek = aSrcParameters.iCanSeek; |
|
4143 aDestParameters.iStreamSize = aSrcParameters.iStreamSize; |
|
4144 aDestParameters.iStreamBitrate = aSrcParameters.iStreamBitrate; |
|
4145 aDestParameters.iMaxPacketSize = aSrcParameters.iMaxPacketSize; |
|
4146 aDestParameters.iLogicalChannelNumberVideo = aSrcParameters.iLogicalChannelNumberVideo; |
|
4147 aDestParameters.iLogicalChannelNumberAudio = aSrcParameters.iLogicalChannelNumberAudio; |
|
4148 aDestParameters.iReferencePicturesNeeded = aSrcParameters.iReferencePicturesNeeded; |
|
4149 aDestParameters.iNumScalabilityLayers = aSrcParameters.iNumScalabilityLayers; |
|
4150 for(i=0; i<(TInt)aSrcParameters.iNumScalabilityLayers; i++) |
|
4151 aDestParameters.iLayerFrameRates[i] = aSrcParameters.iLayerFrameRates[i]; |
|
4152 |
|
4153 aDestParameters.iFrameRate = aSrcParameters.iFrameRate; |
|
4154 aDestParameters.iVideoTimeScale = aSrcParameters.iVideoTimeScale; |
|
4155 aDestParameters.iAudioTimeScale = aSrcParameters.iAudioTimeScale; |
|
4156 |
|
4157 } |
|
4158 |
|
4159 |
|
4160 |
|
4161 // ----------------------------------------------------------------------------- |
|
4162 // CMovieProcessorImpl::FinalizeVideoClip |
|
4163 // Finalizes video clip once all its frames are processed |
|
4164 // (other items were commented in a header). |
|
4165 // ----------------------------------------------------------------------------- |
|
4166 // |
|
4167 void CMovieProcessorImpl::FinalizeVideoClip() |
|
4168 { |
|
4169 |
|
4170 PRINT((_L("CMovieProcessorImpl::FinalizeVideoClip() begin"))) |
|
4171 |
|
4172 TInt error = KErrNone; |
|
4173 |
|
4174 TInt64 endPosition = TInt64(iCurrentVideoTimeInTicks + 0.5); |
|
4175 |
|
4176 if ( TransitionDuration() ) |
|
4177 { |
|
4178 endPosition += TInt64( (TReal)TransitionDuration() * (TReal)iOutputVideoTimeScale / (TReal)iParser->iStreamParameters.iVideoTimeScale + 0.5); |
|
4179 } |
|
4180 |
|
4181 // convert time from ticks to millisec. |
|
4182 endPosition = GetVideoTimeInMsFromTicks( endPosition, ETrue ); |
|
4183 |
|
4184 CVedMovieImp* movie = (iMovie); |
|
4185 |
|
4186 iLeftOverDuration = movie->Duration().Int64()/1000 - |
|
4187 movie->VideoClip(iNumberOfVideoClips-1)->EndTime().Int64()/1000; |
|
4188 |
|
4189 if (error != KErrNone) |
|
4190 { |
|
4191 iMonitor->Error(error); |
|
4192 return; |
|
4193 } |
|
4194 |
|
4195 // if last video clip in movie |
|
4196 if(iVideoClipNumber == iNumberOfVideoClips-1 || iMovieSizeLimitExceeded) |
|
4197 { |
|
4198 iAllVideoProcessed = ETrue; |
|
4199 if ( iFsConnected ) |
|
4200 { |
|
4201 TRAP(error, CloseTransitionInfoL()); |
|
4202 if (error != KErrNone) |
|
4203 { |
|
4204 iMonitor->Error(error); |
|
4205 return; |
|
4206 } |
|
4207 iFs.Close(); |
|
4208 iFsConnected = EFalse; |
|
4209 } |
|
4210 |
|
4211 // process all audio |
|
4212 TRAP(error, ProcessAudioL()); |
|
4213 if (error != KErrNone) |
|
4214 { |
|
4215 iMonitor->Error(error); |
|
4216 return; |
|
4217 } |
|
4218 } |
|
4219 else // process the next clip |
|
4220 { |
|
4221 |
|
4222 if (iCurClipFileName.Length() ) |
|
4223 { |
|
4224 iCurClip.Close(); |
|
4225 error = iFs.Delete( iCurClipFileName ); |
|
4226 if (error != KErrNone) |
|
4227 { |
|
4228 iMonitor->Error(error); |
|
4229 return; |
|
4230 } |
|
4231 iCurClipFileName.Zero(); |
|
4232 iCurClipDurationList.Reset(); |
|
4233 iCurClipTimeStampList.Reset(); |
|
4234 iTimeStampListScaled = EFalse; |
|
4235 } |
|
4236 |
|
4237 if ( iNextClipFileName.Length() ) |
|
4238 { |
|
4239 iNextClip.Close(); |
|
4240 } |
|
4241 |
|
4242 // close the video |
|
4243 TRAP(error, DoCloseVideoL()); |
|
4244 if (error != KErrNone) |
|
4245 { |
|
4246 iMonitor->Error(error); |
|
4247 |
|
4248 if ( iNextClipFileName.Length() ) |
|
4249 { |
|
4250 error = iFs.Delete( iNextClipFileName ); |
|
4251 if (error != KErrNone) |
|
4252 { |
|
4253 iMonitor->Error(error); |
|
4254 return; |
|
4255 } |
|
4256 iNextClipFileName.Zero(); |
|
4257 iNextClipDurationList.Reset(); |
|
4258 iNextClipTimeStampList.Reset(); |
|
4259 } |
|
4260 return; |
|
4261 } |
|
4262 iMonitor->Closed(); |
|
4263 |
|
4264 if ( iFsConnected ) |
|
4265 { |
|
4266 CFileMan *fileMan = 0; |
|
4267 TRAP(error, fileMan = CFileMan::NewL( iFs )); |
|
4268 if (error != KErrNone) |
|
4269 { |
|
4270 iMonitor->Error(error); |
|
4271 return; |
|
4272 } |
|
4273 TParse filepath; |
|
4274 filepath.Set( iOutputMovieFileName, NULL, NULL ); |
|
4275 TFileName filesToDelete = filepath.DriveAndPath(); |
|
4276 filesToDelete.Append( _L( "Im_nokia_vpi.tmp" ) ); |
|
4277 fileMan->Delete( filesToDelete,0 ); |
|
4278 delete fileMan; |
|
4279 } |
|
4280 |
|
4281 // copy from current to next |
|
4282 iCurClipFileName = iNextClipFileName; |
|
4283 iNextClipFileName.Zero(); |
|
4284 TInt duration; |
|
4285 TInt64 timestamp; |
|
4286 while ( iNextClipDurationList.Count() ) |
|
4287 { |
|
4288 duration = iNextClipDurationList[0]; |
|
4289 iNextClipDurationList.Remove( 0 ); |
|
4290 iCurClipDurationList.Append( duration ); |
|
4291 timestamp = iNextClipTimeStampList[0]; |
|
4292 iNextClipTimeStampList.Remove( 0 ); |
|
4293 iCurClipTimeStampList.Append( timestamp ); |
|
4294 } |
|
4295 |
|
4296 iNextClipDurationList.Reset(); |
|
4297 iNextClipTimeStampList.Reset(); |
|
4298 iCurClipIndex = 0; |
|
4299 |
|
4300 if ( iCurClipFileName.Length() ) |
|
4301 { |
|
4302 if ( iCurClip.Open(iFs, iCurClipFileName, EFileShareReadersOnly | EFileStream | EFileRead) != KErrNone ) |
|
4303 { |
|
4304 if ( iCurClip.Open(iFs, iCurClipFileName, EFileShareAny | EFileStream | EFileRead) != KErrNone ) |
|
4305 { |
|
4306 iCurClip.Close(); |
|
4307 iCurClipFileName.Zero(); |
|
4308 iCurClipDurationList.Reset(); |
|
4309 iCurClipTimeStampList.Reset(); |
|
4310 } |
|
4311 } |
|
4312 } |
|
4313 |
|
4314 VPASSERT(!iEncoderInitPending); |
|
4315 |
|
4316 // go to next clip |
|
4317 iVideoClipNumber++; |
|
4318 iVideoClip = movie->VideoClip(iVideoClipNumber); |
|
4319 |
|
4320 if(iVideoClip->Info()->Class() == EVedVideoClipClassGenerated) |
|
4321 { |
|
4322 TRAP(error, TemporaryInitializeGeneratedClipL()); |
|
4323 } |
|
4324 else |
|
4325 { |
|
4326 TRAP(error, InitializeClipL()); |
|
4327 } |
|
4328 if (error != KErrNone) |
|
4329 { |
|
4330 if ( iCurClipFileName.Length() ) |
|
4331 { |
|
4332 iCurClip.Close(); |
|
4333 TInt fsError = iFs.Delete( iCurClipFileName ); |
|
4334 if (fsError != KErrNone) |
|
4335 { |
|
4336 iMonitor->Error(fsError); |
|
4337 return; |
|
4338 } |
|
4339 iCurClipFileName.Zero(); |
|
4340 iCurClipDurationList.Reset(); |
|
4341 iCurClipTimeStampList.Reset(); |
|
4342 } |
|
4343 iMonitor->Error(error); |
|
4344 return; |
|
4345 } |
|
4346 |
|
4347 } |
|
4348 PRINT((_L("CMovieProcessorImpl::FinalizeVideoClip() end"))) |
|
4349 } |
|
4350 |
|
4351 |
|
4352 // ----------------------------------------------------------------------------- |
|
4353 // CMovieProcessorImpl::ProcessAudioL |
|
4354 // Starts audio processing |
|
4355 // (other items were commented in a header). |
|
4356 // ----------------------------------------------------------------------------- |
|
4357 // |
|
4358 void CMovieProcessorImpl::ProcessAudioL() |
|
4359 { |
|
4360 |
|
4361 VPASSERT(iAudioProcessor == 0); |
|
4362 |
|
4363 CVedMovieImp* songmovie = (iMovie); |
|
4364 CAudSong* songPointer = songmovie->Song(); |
|
4365 |
|
4366 // always read audio decoder specific info from audio engine |
|
4367 TAudType audioType = songPointer->OutputFileProperties().iAudioType; |
|
4368 |
|
4369 if( audioType == EAudAAC_MPEG4 ) |
|
4370 { |
|
4371 HBufC8* audioinfo = 0; |
|
4372 songPointer->GetMP4DecoderSpecificInfoLC(audioinfo,64); |
|
4373 |
|
4374 if (audioinfo != 0) |
|
4375 { |
|
4376 CMP4Composer* composeInfo = (CMP4Composer*)iComposer; |
|
4377 User::LeaveIfError(composeInfo->WriteAudioSpecificInfo(audioinfo)); |
|
4378 CleanupStack::Pop(); |
|
4379 delete audioinfo; |
|
4380 audioinfo = 0; |
|
4381 } |
|
4382 |
|
4383 } |
|
4384 if( (iMovieSizeLimit > 0)&&(iMovieSizeLimitExceeded) ) |
|
4385 { |
|
4386 songPointer->SetDuration(iEndCutTime.Int64()*1000); |
|
4387 } |
|
4388 |
|
4389 // create audioprocessor |
|
4390 iAudioProcessor = CAudioProcessor::NewL(this, songPointer); |
|
4391 |
|
4392 // start processing |
|
4393 iAudioProcessor->StartL(); |
|
4394 |
|
4395 |
|
4396 } |
|
4397 |
|
4398 // ----------------------------------------------------------------------------- |
|
4399 // CMovieProcessorImpl::AudioProcessingComplete |
|
4400 // Called by audio processor when audio processing has been completed |
|
4401 // (other items were commented in a header). |
|
4402 // ----------------------------------------------------------------------------- |
|
4403 // |
|
4404 void CMovieProcessorImpl::AudioProcessingComplete(TInt aError) |
|
4405 { |
|
4406 |
|
4407 if (aError != KErrNone) |
|
4408 { |
|
4409 iMonitor->Error(aError); |
|
4410 return; |
|
4411 } |
|
4412 |
|
4413 if (iProcessingCancelled) |
|
4414 return; |
|
4415 |
|
4416 TInt error = FinalizeAudioWrite(); |
|
4417 if (error != KErrNone) |
|
4418 { |
|
4419 iMonitor->Error(error); |
|
4420 return; |
|
4421 } |
|
4422 |
|
4423 iAudioProcessingCompleted = ETrue; |
|
4424 |
|
4425 // since this is run in audio processor's RunL, the audio processor |
|
4426 // cannot be deleted here. signal the AO to finish in this->RunL() |
|
4427 |
|
4428 VPASSERT(!IsActive()); |
|
4429 |
|
4430 SetActive(); |
|
4431 iStatus = KRequestPending; |
|
4432 TRequestStatus *status = &iStatus; |
|
4433 User::RequestComplete(status, KErrNone); |
|
4434 |
|
4435 |
|
4436 |
|
4437 } |
|
4438 |
|
4439 // ----------------------------------------------------------------------------- |
|
4440 // CMovieProcessorImpl::FinalizeVideoSequence |
|
4441 // Finalizes the movie once all clips have been processed |
|
4442 // (other items were commented in a header). |
|
4443 // ----------------------------------------------------------------------------- |
|
4444 // |
|
4445 void CMovieProcessorImpl::FinalizeVideoSequenceL() |
|
4446 { |
|
4447 |
|
4448 CVedMovieImp* movie = (iMovie); |
|
4449 |
|
4450 // calculate video left-over duration in case audio runs longer than video |
|
4451 iLeftOverDuration = 0; |
|
4452 if (iNumberOfVideoClips > 0) |
|
4453 { |
|
4454 iLeftOverDuration = movie->Duration().Int64()/1000 - |
|
4455 movie->VideoClip(iNumberOfVideoClips-1)->EndTime().Int64()/1000; |
|
4456 } |
|
4457 else |
|
4458 iLeftOverDuration = movie->Duration().Int64()/1000; |
|
4459 |
|
4460 if ( iLeftOverDuration > 0) |
|
4461 { |
|
4462 VPASSERT( iNumberOfAudioClips!=0 ); |
|
4463 // encode black frames |
|
4464 |
|
4465 if(iVideoProcessor) |
|
4466 { |
|
4467 delete iVideoProcessor; |
|
4468 iVideoProcessor = 0; |
|
4469 } |
|
4470 |
|
4471 if (!iVideoEncoder) |
|
4472 { |
|
4473 // create and initialise encoder |
|
4474 |
|
4475 iVideoEncoder = CVideoEncoder::NewL(iMonitor, iAvcEdit, iMovie->VideoCodecMimeType()); |
|
4476 |
|
4477 iVideoEncoder->SetFrameSizeL(iMovie->Resolution()); |
|
4478 |
|
4479 if ( iMovie->VideoFrameRate() > 0 ) |
|
4480 iVideoEncoder->SetFrameRate( iMovie->VideoFrameRate() ); |
|
4481 if ( iMovie->VideoBitrate() > 0 ) // if there is request for restricted bitrate, use it |
|
4482 iVideoEncoder->SetBitrate( iMovie->VideoBitrate() ); |
|
4483 else if ( iMovie->VideoStandardBitrate() > 0 ) // use the given standard bitrate |
|
4484 iVideoEncoder->SetBitrate( iMovie->VideoStandardBitrate() ); |
|
4485 |
|
4486 // use input framerate of 1 fps |
|
4487 iVideoEncoder->SetInputFrameRate(1.0); |
|
4488 |
|
4489 if( iMovie->RandomAccessRate() > 0.0 ) |
|
4490 iVideoEncoder->SetRandomAccessRate( iMovie->RandomAccessRate() ); |
|
4491 |
|
4492 // async. |
|
4493 if (!IsActive()) |
|
4494 { |
|
4495 SetActive(); |
|
4496 iStatus = KRequestPending; |
|
4497 } |
|
4498 iVideoEncoder->InitializeL(iStatus); |
|
4499 |
|
4500 iEncoderInitPending = ETrue; |
|
4501 iEncodingBlackFrames = ETrue; |
|
4502 return; |
|
4503 |
|
4504 } |
|
4505 TSize tmpSize = iMovie->Resolution(); |
|
4506 TUint yLength = tmpSize.iWidth * tmpSize.iHeight; |
|
4507 TUint uvLength = yLength >> 1; |
|
4508 TUint yuvLength = yLength + uvLength; |
|
4509 |
|
4510 if ( iEncoderBuffer == 0 ) |
|
4511 { |
|
4512 // allocate memory for encoder input YUV frame |
|
4513 iEncoderBuffer = (TUint8*)User::AllocL(yuvLength); |
|
4514 } |
|
4515 |
|
4516 // fill buffer with 'black' data |
|
4517 // Y |
|
4518 TPtr8 yuvPtr(0,0); |
|
4519 TInt data=5; // don't use zero - real player doesn't show all-zero frames |
|
4520 yuvPtr.Set(iEncoderBuffer, yLength, yLength); |
|
4521 yuvPtr.Fill((TChar)data, yLength); |
|
4522 |
|
4523 // U,V |
|
4524 data=128; |
|
4525 yuvPtr.Set(iEncoderBuffer + yLength, uvLength, uvLength); |
|
4526 yuvPtr.Fill((TChar)data, uvLength); |
|
4527 |
|
4528 yuvPtr.Set(iEncoderBuffer, yuvLength, yuvLength); |
|
4529 |
|
4530 if (iNumberOfVideoClips == 0) |
|
4531 iTimeStamp = 0; |
|
4532 else |
|
4533 iTimeStamp = movie->VideoClip(iNumberOfVideoClips-1)->CutOutTime(); |
|
4534 |
|
4535 if (!IsActive()) |
|
4536 { |
|
4537 SetActive(); |
|
4538 iStatus = KRequestPending; |
|
4539 } |
|
4540 |
|
4541 iVideoEncoder->EncodeFrameL(yuvPtr, iStatus, iTimeStamp); |
|
4542 iEncodePending = ETrue; |
|
4543 |
|
4544 return; |
|
4545 |
|
4546 } |
|
4547 |
|
4548 // movie complete, close everything |
|
4549 |
|
4550 User::LeaveIfError(FinalizeVideoWrite()); |
|
4551 |
|
4552 // delete all objects except status monitor |
|
4553 if (iComposer) |
|
4554 { |
|
4555 User::LeaveIfError(iComposer->Close()); |
|
4556 delete iComposer; |
|
4557 iComposer = 0; |
|
4558 } |
|
4559 |
|
4560 DoCloseVideoL(); |
|
4561 |
|
4562 DeleteClipStructures(); |
|
4563 |
|
4564 PRINT((_L("CMovieProcessorImpl::FinalizeVideoSequence() - calling completed callback"))) |
|
4565 |
|
4566 iMonitor->ProcessingComplete(); |
|
4567 iState = EStateIdle; |
|
4568 |
|
4569 } |
|
4570 |
|
4571 // ----------------------------------------------------------------------------- |
|
4572 // CMovieProcessorImpl::CurrentMetadataSize |
|
4573 // Get current metadata size |
|
4574 // (other items were commented in a header). |
|
4575 // ----------------------------------------------------------------------------- |
|
4576 // |
|
4577 TUint CMovieProcessorImpl::CurrentMetadataSize() |
|
4578 { |
|
4579 TBool haveAudio; |
|
4580 TBool haveVideo; |
|
4581 TUint metadatasize = 0; |
|
4582 |
|
4583 haveAudio = EFalse; |
|
4584 haveVideo = EFalse; |
|
4585 |
|
4586 if ( GetOutputVideoType() == EVedVideoTypeH263Profile0Level10 || |
|
4587 GetOutputVideoType() == EVedVideoTypeH263Profile0Level45 ) |
|
4588 { |
|
4589 haveVideo = ETrue; |
|
4590 metadatasize += 574; // Constant size H.263 metadata |
|
4591 metadatasize += (iVideoFrameNumber * 16 + iVideoIntraFrameNumber * 4); // Content dependent H.263 metadata |
|
4592 } |
|
4593 |
|
4594 if ( GetOutputVideoType() == EVedVideoTypeMPEG4SimpleProfile ) |
|
4595 { |
|
4596 haveVideo = ETrue; |
|
4597 metadatasize += 596; // Constant size MPEG-4 video metadata |
|
4598 metadatasize += (iVideoFrameNumber * 16 + iVideoIntraFrameNumber * 4); // Content dependent MPEG-4 video metadata |
|
4599 } |
|
4600 |
|
4601 if ( GetOutputAudioType() == EVedAudioTypeAMR ) // AMR-NB |
|
4602 { |
|
4603 haveAudio = ETrue; |
|
4604 metadatasize += 514; // Constant size AMR metadata |
|
4605 metadatasize += ((iAudioFrameNumber + KVedAudioFramesInSample - 1) / KVedAudioFramesInSample) * 8; |
|
4606 } |
|
4607 |
|
4608 if ( GetOutputAudioType() == EVedAudioTypeAAC_LC ) // MPEG-4 AAC-LC |
|
4609 { |
|
4610 haveAudio = ETrue; |
|
4611 metadatasize += 514; // Constant size metadata |
|
4612 metadatasize += (iAudioFrameNumber * 8); |
|
4613 } |
|
4614 |
|
4615 if (haveAudio && haveVideo) |
|
4616 metadatasize -= 116; // There is only one moov and mvhd in a file |
|
4617 |
|
4618 return metadatasize; |
|
4619 |
|
4620 } |
|
4621 |
|
4622 // ----------------------------------------------------------------------------- |
|
4623 // CMovieProcessorImpl::BufferAMRFrames |
|
4624 // Collects output audio frames to a buffer and writes them |
|
4625 // to the output 3gp file when a whole audio sample is available |
|
4626 // (other items were commented in a header). |
|
4627 // ----------------------------------------------------------------------------- |
|
4628 // |
|
4629 TInt CMovieProcessorImpl::BufferAMRFrames(const TDesC8& aBuf, TInt aNumFrames,TInt aDuration) |
|
4630 |
|
4631 { |
|
4632 |
|
4633 VPASSERT(iOutAudioBuffer); |
|
4634 TPtr8 outAudioPtr(iOutAudioBuffer->Des()); |
|
4635 TInt error = KErrNone; |
|
4636 |
|
4637 if((outAudioPtr.Length() + aBuf.Length()) > outAudioPtr.MaxLength()) |
|
4638 { |
|
4639 // extend buffer size |
|
4640 |
|
4641 // New size is 3/2ths of the old size, rounded up to the next |
|
4642 // full kilobyte |
|
4643 TUint newSize = (3 * outAudioPtr.MaxLength()) / 2; |
|
4644 newSize = (newSize + 1023) & (~1023); |
|
4645 TRAP(error, (iOutAudioBuffer = iOutAudioBuffer->ReAllocL(newSize)) ); |
|
4646 |
|
4647 if (error != KErrNone) |
|
4648 return error; |
|
4649 |
|
4650 PRINT((_L("CMovieProcessorImpl::BufferAMRFrames() - extended buffer to %d bytes"), newSize)); |
|
4651 |
|
4652 outAudioPtr.Set(iOutAudioBuffer->Des()); |
|
4653 } |
|
4654 outAudioPtr.Append(aBuf); |
|
4655 iTotalDurationInSample += aDuration; |
|
4656 iAudioFramesInBuffer += aNumFrames; |
|
4657 |
|
4658 return KErrNone; |
|
4659 } |
|
4660 |
|
4661 |
|
4662 // ----------------------------------------------------------------------------- |
|
4663 // CMovieProcessorImpl::WriteAMRSamplesToFile |
|
4664 // Write buffered AMR sample(s) to composer |
|
4665 // (other items were commented in a header). |
|
4666 // ----------------------------------------------------------------------------- |
|
4667 // |
|
4668 TInt CMovieProcessorImpl::WriteAMRSamplesToFile() |
|
4669 { |
|
4670 |
|
4671 VPASSERT(iOutAudioBuffer); |
|
4672 TPtr8 outAudioPtr(iOutAudioBuffer->Des()); |
|
4673 |
|
4674 VPASSERT(TUint(iAudioFramesInSample) == KVedAudioFramesInSample); |
|
4675 |
|
4676 while ( iAudioFramesInBuffer >= iAudioFramesInSample ) |
|
4677 { |
|
4678 // write one audio sample to file |
|
4679 |
|
4680 // TInt frameSamples = 160; |
|
4681 // TInt samplingRate = KVedAudioSamplingRate8k; |
|
4682 TInt audioSampleDurationInTicks = 0; |
|
4683 TPtrC8 writeDes; |
|
4684 TInt sampleSize = 0; |
|
4685 // TInt frameNum = 0; |
|
4686 // TInt frameSize = 0; |
|
4687 TUint8* frameBuffer = (TUint8*)outAudioPtr.Ptr(); |
|
4688 TInt error = KErrNone; |
|
4689 |
|
4690 // Gets Size of buffer of current frames |
|
4691 sampleSize = outAudioPtr.Length(); |
|
4692 |
|
4693 // if frame is sampled at a different rate |
|
4694 audioSampleDurationInTicks = GetAudioTimeInTicksFromMs(iTotalDurationInSample/1000); |
|
4695 |
|
4696 // set descriptor to sample |
|
4697 writeDes.Set( outAudioPtr.Left(sampleSize) ); |
|
4698 |
|
4699 // compose audio to output 3gp file |
|
4700 error = iComposer->WriteFrames((TDesC8&)writeDes, sampleSize, audioSampleDurationInTicks, 0 /*iAudioKeyFrame*/, |
|
4701 iAudioFramesInSample, CMP4Composer::EFrameTypeAudio); |
|
4702 |
|
4703 // Resetting duration to zero |
|
4704 iTotalDurationInSample = 0; |
|
4705 |
|
4706 if (error != KErrNone) |
|
4707 return error; |
|
4708 |
|
4709 if ( outAudioPtr.Length() > sampleSize ) |
|
4710 { |
|
4711 // copy rest of the data to beginning of buffer |
|
4712 frameBuffer = (TUint8*)outAudioPtr.Ptr(); |
|
4713 TInt len = outAudioPtr.Length() - sampleSize; |
|
4714 Mem::Copy(frameBuffer, frameBuffer + sampleSize, len); |
|
4715 outAudioPtr.SetLength(len); |
|
4716 } |
|
4717 else |
|
4718 { |
|
4719 VPASSERT(iAudioFramesInBuffer == iAudioFramesInSample); |
|
4720 outAudioPtr.SetLength(0); |
|
4721 } |
|
4722 |
|
4723 iAudioFramesInBuffer -= iAudioFramesInSample; |
|
4724 iAudioFrameNumber += iAudioFramesInSample; |
|
4725 } |
|
4726 return KErrNone; |
|
4727 |
|
4728 } |
|
4729 |
|
4730 TInt CMovieProcessorImpl::WriteAllAudioFrames(TDesC8& aBuf, TInt aDuration) |
|
4731 { |
|
4732 |
|
4733 if (iDiskFull) |
|
4734 return KErrDiskFull; |
|
4735 |
|
4736 // check available disk space |
|
4737 |
|
4738 TInt error; |
|
4739 TInt64 freeSpace = 0; |
|
4740 // get free space on disk |
|
4741 TRAP(error, freeSpace = iComposer->DriveFreeSpaceL()); |
|
4742 if (error != KErrNone) |
|
4743 return error; |
|
4744 |
|
4745 // subtract metadata length from free space |
|
4746 freeSpace -= TInt64(CurrentMetadataSize()); |
|
4747 |
|
4748 if (freeSpace < TInt64(KDiskSafetyLimit)) |
|
4749 { |
|
4750 iDiskFull = ETrue; |
|
4751 return KErrDiskFull; |
|
4752 } |
|
4753 |
|
4754 |
|
4755 if ( GetOutputAudioType() == EVedAudioTypeAMR ) |
|
4756 { |
|
4757 // write frame by frame until duration >= duration for this clip |
|
4758 |
|
4759 // NOTE: clip duration not checked!!! |
|
4760 |
|
4761 TPtr8 ptr(0,0); |
|
4762 TUint8* buf = (TUint8*)aBuf.Ptr(); |
|
4763 TInt frameSize = aBuf.Length(); |
|
4764 |
|
4765 ptr.Set(buf, frameSize, frameSize); |
|
4766 error = BufferAMRFrames(ptr, 1,aDuration); |
|
4767 if ( error != KErrNone ) |
|
4768 return error; |
|
4769 iTotalAudioTimeWrittenMs += (aDuration/1000); //20; |
|
4770 buf += frameSize; |
|
4771 |
|
4772 } |
|
4773 else |
|
4774 { |
|
4775 // write buffer directly to file (1 frame per sample in AAC) |
|
4776 |
|
4777 error = KErrNone; |
|
4778 // TInt error = KErrNone; |
|
4779 TInt sampleSize = aBuf.Length(); |
|
4780 // const TInt framesInSample = 1; |
|
4781 TInt audioSampleDurationInTicks = GetAudioTimeInTicksFromMs(aDuration/1000); |
|
4782 |
|
4783 iTotalAudioTimeWrittenMs += (aDuration/1000); |
|
4784 |
|
4785 VPASSERT(iAudioFramesInSample == 1); |
|
4786 error = iComposer->WriteFrames(aBuf, sampleSize, audioSampleDurationInTicks, 0 /*iAudioKeyFrame*/, |
|
4787 iAudioFramesInSample, CMP4Composer::EFrameTypeAudio); |
|
4788 } |
|
4789 |
|
4790 if ( GetOutputAudioType() == EVedAudioTypeAMR ) |
|
4791 { |
|
4792 error = WriteAMRSamplesToFile(); |
|
4793 if ( error != KErrNone ) |
|
4794 return error; |
|
4795 } |
|
4796 |
|
4797 if ( (iTotalAudioTimeWrittenMs % 200) == 0 ) |
|
4798 IncProgressBar(); |
|
4799 |
|
4800 return KErrNone; |
|
4801 |
|
4802 } |
|
4803 |
|
4804 |
|
4805 // ----------------------------------------------------------------------------- |
|
4806 // CMovieProcessorImpl::FinalizeAudioWrite |
|
4807 // Write the remaining audio frames at the end of processing |
|
4808 // (other items were commented in a header). |
|
4809 // ----------------------------------------------------------------------------- |
|
4810 // |
|
4811 TInt CMovieProcessorImpl::FinalizeAudioWrite() |
|
4812 { |
|
4813 |
|
4814 VPASSERT(iAudioFramesInBuffer < iAudioFramesInSample); |
|
4815 |
|
4816 if (iAudioFramesInBuffer == 0) |
|
4817 return KErrNone; |
|
4818 |
|
4819 if(iOutputAudioType == EAudioAAC) |
|
4820 { |
|
4821 // There should not be any frames in buffer !! |
|
4822 VPASSERT(0); |
|
4823 } |
|
4824 |
|
4825 TInt error = KErrNone; |
|
4826 TInt sampleSize = 0; |
|
4827 // TInt frameNum = 0; |
|
4828 // TInt frameSize = 0; |
|
4829 |
|
4830 TPtr8 outAudioPtr(iOutAudioBuffer->Des()); |
|
4831 // const TInt frameSamples = 160; |
|
4832 // const TInt samplingRate = KVedAudioSamplingRate8k; |
|
4833 |
|
4834 TInt audioSampleDurationInTicks = 0; |
|
4835 TPtrC8 writeDes; |
|
4836 TUint8* frameBuffer = (TUint8*)outAudioPtr.Ptr(); |
|
4837 |
|
4838 // Gets Size of buffer of current frames |
|
4839 sampleSize = outAudioPtr.Length(); |
|
4840 |
|
4841 // if frame is sampled at a different rate |
|
4842 audioSampleDurationInTicks = GetAudioTimeInTicksFromMs(iTotalDurationInSample/1000); |
|
4843 |
|
4844 // set descriptor to sample |
|
4845 writeDes.Set( outAudioPtr.Left(sampleSize) ); |
|
4846 |
|
4847 // compose audio to output 3gp file |
|
4848 error = iComposer->WriteFrames((TDesC8&)writeDes, sampleSize, audioSampleDurationInTicks, 0 /*iAudioKeyFrame*/, |
|
4849 iAudioFramesInBuffer, CMP4Composer::EFrameTypeAudio); |
|
4850 |
|
4851 iTotalDurationInSample = 0; // resetting the value in sample |
|
4852 |
|
4853 if (error != KErrNone) |
|
4854 return error; |
|
4855 |
|
4856 outAudioPtr.SetLength(0); |
|
4857 iAudioFrameNumber += iAudioFramesInBuffer; |
|
4858 iAudioFramesInBuffer = 0; |
|
4859 |
|
4860 |
|
4861 return KErrNone; |
|
4862 |
|
4863 } |
|
4864 |
|
4865 // ----------------------------------------------------------------------------- |
|
4866 // CMovieProcessorImpl::SetFrameType |
|
4867 // Sets the frame type (inter/intra) for a frame |
|
4868 // (other items were commented in a header). |
|
4869 // ----------------------------------------------------------------------------- |
|
4870 // |
|
4871 void CMovieProcessorImpl::SetFrameType(TInt aFrameIndex, TUint8 aType) |
|
4872 { |
|
4873 VPASSERT(iFrameParameters); |
|
4874 iFrameParameters[aFrameIndex].iType = aType; |
|
4875 } |
|
4876 |
|
4877 // ----------------------------------------------------------------------------- |
|
4878 // CMovieProcessorImpl::GetVideoTimeInMsFromTicks |
|
4879 // Converts a video timestamp from ticks to milliseconds |
|
4880 // (other items were commented in a header). |
|
4881 // ----------------------------------------------------------------------------- |
|
4882 // |
|
4883 TInt64 CMovieProcessorImpl::GetVideoTimeInMsFromTicks(TInt64 aTimeStampInTicks, TBool aCommonTimeScale) const |
|
4884 { |
|
4885 TUint timeScale = aCommonTimeScale ? iOutputVideoTimeScale : iParser->iStreamParameters.iVideoTimeScale; |
|
4886 VPASSERT(timeScale > 0); |
|
4887 return TInt64( I64REAL(aTimeStampInTicks) / (TReal)timeScale * 1000 + 0.5 ); |
|
4888 } |
|
4889 |
|
4890 // ----------------------------------------------------------------------------- |
|
4891 // CMovieProcessorImpl::GetVideoTimeInTicksFromMs |
|
4892 // Converts a video timestamp from ms to ticks |
|
4893 // (other items were commented in a header). |
|
4894 // ----------------------------------------------------------------------------- |
|
4895 // |
|
4896 TInt64 CMovieProcessorImpl::GetVideoTimeInTicksFromMs(TInt64 aTimeStampInMs, TBool aCommonTimeScale) const |
|
4897 { |
|
4898 TUint timeScale = aCommonTimeScale ? iOutputVideoTimeScale : iParser->iStreamParameters.iVideoTimeScale; |
|
4899 |
|
4900 VPASSERT(timeScale > 0); |
|
4901 |
|
4902 return TInt64( I64REAL(aTimeStampInMs) * (TReal)timeScale / 1000 + 0.5 ); |
|
4903 } |
|
4904 |
|
4905 // ----------------------------------------------------------------------------- |
|
4906 // CMovieProcessorImpl::GetAudioTimeInMsFromTicks |
|
4907 // Converts an audio timestamp from ticks to milliseconds |
|
4908 // (other items were commented in a header). |
|
4909 // ----------------------------------------------------------------------------- |
|
4910 // |
|
4911 TUint CMovieProcessorImpl::GetAudioTimeInMsFromTicks(TUint aTimeStampInTicks) const |
|
4912 { |
|
4913 |
|
4914 TUint timeScale = 0; |
|
4915 if(iParser) |
|
4916 { |
|
4917 timeScale = iParser->iStreamParameters.iAudioTimeScale; |
|
4918 } |
|
4919 if(timeScale == 0) |
|
4920 { |
|
4921 //means no audio in clip use output audio timescale itself |
|
4922 timeScale = iOutputAudioTimeScale; |
|
4923 } |
|
4924 VPASSERT(timeScale > 0); |
|
4925 return TUint( (TReal)aTimeStampInTicks / (TReal)timeScale * 1000 + 0.5 ); |
|
4926 } |
|
4927 |
|
4928 |
|
4929 // ----------------------------------------------------------------------------- |
|
4930 // CMovieProcessorImpl::GetAudioTimeInTicksFromMs |
|
4931 // Converts an audio timestamp from milliseconds to ticks |
|
4932 // (other items were commented in a header). |
|
4933 // ----------------------------------------------------------------------------- |
|
4934 // |
|
4935 TUint CMovieProcessorImpl::GetAudioTimeInTicksFromMs(TUint aTimeStampInMs) const |
|
4936 |
|
4937 { |
|
4938 return TUint( (TReal)aTimeStampInMs * (TReal)iOutputAudioTimeScale / 1000.0 + 0.5 ); |
|
4939 } |
|
4940 |
|
4941 // ----------------------------------------------------------------------------- |
|
4942 // CMovieProcessorImpl::WriteVideoFrameToFile |
|
4943 // Writes a video frame to output 3gp file |
|
4944 // (other items were commented in a header). |
|
4945 // ----------------------------------------------------------------------------- |
|
4946 // |
|
4947 TInt CMovieProcessorImpl::WriteVideoFrameToFile(TDesC8& aBuf, TInt64 aTimeStampInTicks, |
|
4948 TInt /*aDurationInTicks*/, TBool aKeyFrame, |
|
4949 TBool aCommonTimeScale, TBool aColorTransitionFlag, |
|
4950 TBool aFromEncoder) |
|
4951 { |
|
4952 |
|
4953 //TReal duration = TReal(aDurationInTicks); |
|
4954 TBool lessThenZero = false; |
|
4955 |
|
4956 if (iDiskFull) |
|
4957 return KErrDiskFull; |
|
4958 |
|
4959 // check available disk space |
|
4960 |
|
4961 TInt64 freeSpace = 0; |
|
4962 TInt error; |
|
4963 // get free space on disk |
|
4964 TRAP(error, freeSpace = iComposer->DriveFreeSpaceL()); |
|
4965 if (error != KErrNone) |
|
4966 return error; |
|
4967 |
|
4968 // subtract metadata length from free space |
|
4969 freeSpace -= TInt64(CurrentMetadataSize()); |
|
4970 |
|
4971 if (freeSpace < TInt64(KDiskSafetyLimit)) |
|
4972 { |
|
4973 PRINT((_L("CMovieProcessorImpl::WriteVideoFrameToFile() freeSpace below limit"))) |
|
4974 iDiskFull = ETrue; |
|
4975 return KErrDiskFull; |
|
4976 } |
|
4977 |
|
4978 // check if we are writing the last frames of the previous |
|
4979 // clip while doing crossfade / wipe, iFirstFrameOfClip should |
|
4980 // be true when writing the first frame of current clip |
|
4981 if ( iCurClipTimeStampList.Count() ) |
|
4982 { |
|
4983 iFirstFrameOfClip = EFalse; |
|
4984 if ( aColorTransitionFlag && !iFirstFrameFlagSet ) |
|
4985 { |
|
4986 // this frame is the first one of the second clip |
|
4987 iFirstFrameOfClip = ETrue; |
|
4988 iFirstFrameFlagSet = ETrue; |
|
4989 } |
|
4990 } |
|
4991 |
|
4992 TVideoType vt = (TVideoType)iOutputVideoType; |
|
4993 // This may be wrong as the files may be of different types so Mp4Specific size may be required to obtained for every file |
|
4994 // or every clip but only if it is the first frame |
|
4995 if (iVideoFrameNumber == 0 || iFirstFrameOfClip) |
|
4996 { |
|
4997 if (!iNumberOfVideoClips) |
|
4998 { |
|
4999 // all clips were either from generator or were black frames |
|
5000 if ( iAllGeneratedClips ) |
|
5001 { |
|
5002 iMP4SpecificSize = 0; |
|
5003 if (vt == EVideoMPEG4) |
|
5004 { |
|
5005 iModeChanged = ETrue; |
|
5006 iFirstClipUsesEncoder = ETrue; |
|
5007 } |
|
5008 } |
|
5009 else |
|
5010 { |
|
5011 // black frames; no parser or decoder exists; |
|
5012 // no need to modify the vos header by the decoder, |
|
5013 // but this forces the composer to search for the VOS header in the 1st frame data |
|
5014 iModeChanged = ETrue; |
|
5015 } |
|
5016 } |
|
5017 |
|
5018 else |
|
5019 { |
|
5020 // VOS header size is parsed in composer for MPEG-4, in H.263 it is ignored |
|
5021 iMP4SpecificSize = 0; |
|
5022 } |
|
5023 } |
|
5024 |
|
5025 // IMPORTANT: need to make decision here whether to change VosBit |
|
5026 // before writing to file, if first frame was encoded |
|
5027 |
|
5028 if (vt == EVideoMPEG4) |
|
5029 { |
|
5030 if(iVideoFrameNumber == 0) |
|
5031 { |
|
5032 if(iMpeg4ModeTranscoded) |
|
5033 { |
|
5034 TBool sBitChanged = EFalse; |
|
5035 TRAP(error, sBitChanged = iVideoProcessor->CheckVosHeaderL((TPtrC8&)aBuf)); |
|
5036 if(sBitChanged) { } |
|
5037 PRINT((_L("CMovieProcessorImpl::WritVideoFrameToFile() bit changed: %d"), sBitChanged)) |
|
5038 if (error != KErrNone) |
|
5039 return error; |
|
5040 } |
|
5041 } |
|
5042 } |
|
5043 |
|
5044 TInt currentMetaDataSize = CurrentMetadataSize(); |
|
5045 TInt64 oldVideoTime = GetVideoTimeInMsFromTicks((TInt64) iCurrentVideoTimeInTicks, ETrue); |
|
5046 |
|
5047 if (iFrameBuffered) |
|
5048 { |
|
5049 // write buffered frame to file |
|
5050 |
|
5051 TInt64 durationMs = GetVideoTimeInMsFromTicks(aTimeStampInTicks, aCommonTimeScale) - iBufferedTimeStamp; |
|
5052 |
|
5053 if (iWriting1stColorTransitionFrame) |
|
5054 { |
|
5055 durationMs = i1stColorTransitionFrameTS - iBufferedTimeStamp; |
|
5056 iWriting1stColorTransitionFrame = EFalse; |
|
5057 } |
|
5058 |
|
5059 else if (iFirstFrameOfClip) |
|
5060 { |
|
5061 if ( iCurClipTimeStampList.Count() == 0 ) |
|
5062 { |
|
5063 VPASSERT(iVideoClipNumber > 0); |
|
5064 CVedMovieImp* movie = reinterpret_cast<CVedMovieImp*>(iMovie); |
|
5065 // first frame of new clip |
|
5066 durationMs = movie->VideoClip(iVideoClipNumber-1)->CutOutTime().Int64()/1000 - iBufferedTimeStamp; |
|
5067 } |
|
5068 } |
|
5069 |
|
5070 if (durationMs < 0) |
|
5071 { |
|
5072 lessThenZero = true; |
|
5073 CVedMovieImp* movie = reinterpret_cast<CVedMovieImp*>(iMovie); |
|
5074 TReal frameRate = (movie->VideoFrameRate() > 0.0) ? movie->VideoFrameRate() : 15.0; |
|
5075 durationMs = TInt64( ( 1000.0 / frameRate ) + 0.5 ); |
|
5076 } |
|
5077 |
|
5078 if ( iNumberOfVideoClips ) |
|
5079 { |
|
5080 TInt64 clipDuration = iVideoClip->CutOutTime().Int64()/1000 - |
|
5081 iVideoClip->CutInTime().Int64()/1000; |
|
5082 |
|
5083 if ( iVideoClipNumber == iNumberOfVideoClips - 1 ) |
|
5084 { |
|
5085 // Add duration of possible black ending frames |
|
5086 CVedMovieImp* movie = reinterpret_cast<CVedMovieImp*>(iMovie); |
|
5087 clipDuration += ( movie->Duration().Int64()/1000 - |
|
5088 movie->VideoClip(iNumberOfVideoClips-1)->EndTime().Int64()/1000 ); |
|
5089 } |
|
5090 |
|
5091 if ( iVideoClipWritten + durationMs > clipDuration ) |
|
5092 { |
|
5093 durationMs = clipDuration - iVideoClipWritten; |
|
5094 |
|
5095 if ( durationMs <= 0 ) |
|
5096 { |
|
5097 TPtr8 outVideoPtr(iOutVideoBuffer->Des()); |
|
5098 outVideoPtr.SetLength(0); |
|
5099 iFrameBuffered = EFalse; |
|
5100 return KErrCompletion; |
|
5101 } |
|
5102 } |
|
5103 } |
|
5104 |
|
5105 TReal duration = I64REAL(durationMs) * TReal(iOutputVideoTimeScale) / 1000.0; |
|
5106 |
|
5107 error = WriteVideoFrameFromBuffer(duration, aColorTransitionFlag); |
|
5108 |
|
5109 if (error != KErrNone) |
|
5110 return error; |
|
5111 } |
|
5112 |
|
5113 TInt64 currentVideoTime = GetVideoTimeInMsFromTicks((TInt64) iCurrentVideoTimeInTicks, ETrue); |
|
5114 |
|
5115 iCurrentVideoSize += aBuf.Length(); |
|
5116 |
|
5117 TInt addAudio = 0; |
|
5118 TRAP( error, addAudio = (iMovie->Song())->GetFrameSizeEstimateL(oldVideoTime * 1000, currentVideoTime * 1000) ); |
|
5119 |
|
5120 if (error != KErrNone) |
|
5121 return error; |
|
5122 |
|
5123 iCurrentAudioSize += addAudio; |
|
5124 |
|
5125 PRINT((_L("CMovieProcessorImpl::WriteVideoFrameToFile() video size: %d, audio size %d, meta data %d"), iCurrentVideoSize, iCurrentAudioSize, currentMetaDataSize)) |
|
5126 |
|
5127 if (iMovieSizeLimit > 0 && iCurrentVideoSize + iCurrentAudioSize + currentMetaDataSize > iMovieSizeLimit) |
|
5128 { |
|
5129 // Cut video here |
|
5130 iEndCutTime = TTimeIntervalMicroSeconds(currentVideoTime); |
|
5131 |
|
5132 iMovieSizeLimitExceeded = ETrue; |
|
5133 |
|
5134 // To notify that movie has reached maximum size |
|
5135 return KErrCompletion; |
|
5136 } |
|
5137 |
|
5138 // Buffer frame |
|
5139 |
|
5140 VPASSERT(iOutVideoBuffer); |
|
5141 TPtr8 outVideoPtr(iOutVideoBuffer->Des()); |
|
5142 |
|
5143 if((outVideoPtr.Length() + aBuf.Length()) > outVideoPtr.MaxLength()) |
|
5144 { |
|
5145 // extend buffer size |
|
5146 |
|
5147 TUint newSize = outVideoPtr.Length() + aBuf.Length(); |
|
5148 // round up to the next full kilobyte |
|
5149 newSize = (newSize + 1023) & (~1023); |
|
5150 TRAP(error, (iOutVideoBuffer = iOutVideoBuffer->ReAllocL(newSize)) ); |
|
5151 |
|
5152 if (error != KErrNone) |
|
5153 return error; |
|
5154 |
|
5155 PRINT((_L("CMovieProcessorImpl::WriteVideoFrameToFile() - extended buffer to %d bytes"), newSize)); |
|
5156 |
|
5157 outVideoPtr.Set(iOutVideoBuffer->Des()); |
|
5158 } |
|
5159 outVideoPtr.Append(aBuf); |
|
5160 |
|
5161 #ifdef VIDEOEDITORENGINE_AVC_EDITING |
|
5162 if (vt == EVideoAVCProfileBaseline) |
|
5163 { |
|
5164 // TBool containsDCR = EFalse; |
|
5165 // if (!aFromEncoder && iFirstFrameOfClip) |
|
5166 // containsDCR = ETrue; |
|
5167 |
|
5168 error = iAvcEdit->ParseFrame(iOutVideoBuffer, EFalse, aFromEncoder); |
|
5169 if (error != KErrNone) |
|
5170 return error; |
|
5171 } |
|
5172 #endif |
|
5173 |
|
5174 if(!lessThenZero) |
|
5175 { |
|
5176 iBufferedTimeStamp = GetVideoTimeInMsFromTicks(aTimeStampInTicks, aCommonTimeScale); |
|
5177 } |
|
5178 iBufferedKeyFrame = aKeyFrame; |
|
5179 iBufferedFromEncoder = aFromEncoder; |
|
5180 iFrameBuffered = ETrue; |
|
5181 if ( iFirstFrameOfClip ) |
|
5182 iFirstFrameBuffered = ETrue; |
|
5183 |
|
5184 iFirstFrameOfClip = EFalse; |
|
5185 |
|
5186 iVideoFrameNumber++; |
|
5187 |
|
5188 if (aKeyFrame) |
|
5189 iVideoIntraFrameNumber++; |
|
5190 |
|
5191 return KErrNone; |
|
5192 |
|
5193 } |
|
5194 |
|
5195 // ----------------------------------------------------------------------------- |
|
5196 // CMovieProcessorImpl::FinalizeVideoWrite |
|
5197 // Writes the last video frame from buffer to file |
|
5198 // (other items were commented in a header). |
|
5199 // ----------------------------------------------------------------------------- |
|
5200 // |
|
5201 TInt CMovieProcessorImpl::FinalizeVideoWrite() |
|
5202 { |
|
5203 |
|
5204 if ( !iFrameBuffered ) |
|
5205 return KErrNone; |
|
5206 |
|
5207 CVedMovieImp* movie = reinterpret_cast<CVedMovieImp*>(iMovie); |
|
5208 |
|
5209 iApplySlowMotion = EFalse; // this duration must not be scaled |
|
5210 TInt64 durationMs = movie->Duration().Int64()/1000 - GetVideoTimeInMsFromTicks(iCurrentVideoTimeInTicks, ETrue); |
|
5211 TReal duration = I64REAL(durationMs) * TReal(iOutputVideoTimeScale) / 1000.0; |
|
5212 |
|
5213 TInt error = WriteVideoFrameFromBuffer(duration, EFalse); |
|
5214 |
|
5215 return error; |
|
5216 } |
|
5217 |
|
5218 // ----------------------------------------------------------------------------- |
|
5219 // CMovieProcessorImpl::FinalizeVideoWrite |
|
5220 // Writes a frame from buffer to file |
|
5221 // (other items were commented in a header). |
|
5222 // ----------------------------------------------------------------------------- |
|
5223 // |
|
5224 TInt CMovieProcessorImpl::WriteVideoFrameFromBuffer(TReal aDuration, TBool aColorTransitionFlag) |
|
5225 { |
|
5226 |
|
5227 VPASSERT(iFrameBuffered); |
|
5228 |
|
5229 TReal duration = aDuration; |
|
5230 |
|
5231 // slow motion |
|
5232 VPASSERT(iSpeed > 0); |
|
5233 const TInt maxSpeed = KMaxVideoSpeed; |
|
5234 TReal iScaleFactor = (TReal)maxSpeed/(TReal)iSpeed; |
|
5235 |
|
5236 if( iScaleFactor != 1.0 && iApplySlowMotion ) |
|
5237 { |
|
5238 duration = duration * iScaleFactor; |
|
5239 } |
|
5240 |
|
5241 iCurrentVideoTimeInTicks += duration; |
|
5242 |
|
5243 // if the first frame of a clip is being buffered, |
|
5244 // we are now writing the last frame of previous clip |
|
5245 // so don't increment iVideoClipWritten yet |
|
5246 |
|
5247 // Also take into account crossfade / wipe transition |
|
5248 // so that iVideoClipWritten is not incremented |
|
5249 // for transition frames of the previous clip |
|
5250 if ( !iFirstFrameOfClip && ( (iCurClipTimeStampList.Count() == 0) || |
|
5251 aColorTransitionFlag ) ) |
|
5252 { |
|
5253 iVideoClipWritten += GetVideoTimeInMsFromTicks(aDuration, ETrue); |
|
5254 } |
|
5255 |
|
5256 IncProgressBar(); |
|
5257 |
|
5258 TVedVideoBitstreamMode currClipMode; |
|
5259 TVideoType vt = (TVideoType)iOutputVideoType; |
|
5260 |
|
5261 if (iNumberOfVideoClips > 0) |
|
5262 currClipMode = iVideoClip->Info()->TranscodeFactor().iStreamType; |
|
5263 else |
|
5264 { |
|
5265 if ( (vt == EVideoH263Profile0Level10) || (vt == EVideoH263Profile0Level45) ) |
|
5266 currClipMode = EVedVideoBitstreamModeH263; |
|
5267 else |
|
5268 currClipMode = EVedVideoBitstreamModeMPEG4Regular; |
|
5269 } |
|
5270 TPtr8 outVideoPtr(iOutVideoBuffer->Des()); |
|
5271 |
|
5272 TInt returnedVal = 0; |
|
5273 returnedVal = iComposer->WriteFrames(outVideoPtr, outVideoPtr.Size(), TInt(duration + 0.5), |
|
5274 iBufferedKeyFrame, 1 /*numberOfFrames*/, |
|
5275 CMP4Composer::EFrameTypeVideo, iMP4SpecificSize, |
|
5276 iModeChanged, iFirstFrameBuffered, currClipMode, iBufferedFromEncoder); |
|
5277 |
|
5278 iFirstFrameBuffered = EFalse; |
|
5279 |
|
5280 if (returnedVal == KErrWrite) |
|
5281 { |
|
5282 // frame was not written |
|
5283 iVideoClipWritten -= GetVideoTimeInMsFromTicks(aDuration, ETrue); |
|
5284 iCurrentVideoTimeInTicks -= duration; |
|
5285 returnedVal = KErrNone; |
|
5286 } |
|
5287 |
|
5288 iFrameBuffered = EFalse; |
|
5289 outVideoPtr.SetLength(0); |
|
5290 |
|
5291 return returnedVal; |
|
5292 |
|
5293 } |
|
5294 |
|
5295 |
|
5296 // ----------------------------------------------------------------------------- |
|
5297 // CMovieProcessorImpl::SaveVideoFrameToFile |
|
5298 // Save a video YUV frame to the tmp file |
|
5299 // (other items were commented in a header). |
|
5300 // ----------------------------------------------------------------------------- |
|
5301 // |
|
5302 TInt CMovieProcessorImpl::SaveVideoFrameToFile(TDesC8& aBuf, TInt aDuration, TInt64 aTimeStamp ) |
|
5303 { |
|
5304 TBool isOpen = EFalse; |
|
5305 TInt errCode = KErrNone; |
|
5306 if ( ( errCode = iFs.IsFileOpen( iNextClipFileName, isOpen ) ) == KErrNone ) |
|
5307 { |
|
5308 if ( isOpen ) |
|
5309 { |
|
5310 if ( ( errCode = iNextClipDurationList.Append( aDuration ) ) == KErrNone ) |
|
5311 { |
|
5312 if ( ( errCode = iNextClipTimeStampList.Append( aTimeStamp ) ) == KErrNone ) |
|
5313 { |
|
5314 if ( iDiskFull ) |
|
5315 { |
|
5316 errCode = KErrDiskFull; |
|
5317 } |
|
5318 else |
|
5319 { |
|
5320 TInt64 freeSpace = 0; |
|
5321 // get free space on disk |
|
5322 TRAP( errCode, freeSpace = iComposer->DriveFreeSpaceL() ); |
|
5323 if ( errCode == KErrNone ) |
|
5324 { |
|
5325 // subtract yuv length from free space |
|
5326 freeSpace -= TInt64( aBuf.Length() ); |
|
5327 |
|
5328 if ( freeSpace < TInt64( KDiskSafetyLimit ) ) |
|
5329 { |
|
5330 iDiskFull = ETrue; |
|
5331 errCode = KErrDiskFull; |
|
5332 } |
|
5333 else |
|
5334 { |
|
5335 errCode = iNextClip.Write( aBuf ); |
|
5336 iPreviousTimeScale = iParser->iStreamParameters.iVideoTimeScale; |
|
5337 } |
|
5338 } |
|
5339 } |
|
5340 if ( errCode != KErrNone ) |
|
5341 { |
|
5342 // rollback the insertion |
|
5343 iNextClipDurationList.Remove( iNextClipDurationList.Count() - 1 ); |
|
5344 iNextClipTimeStampList.Remove( iNextClipTimeStampList.Count() - 1 ); |
|
5345 } |
|
5346 } |
|
5347 else |
|
5348 { |
|
5349 // rollback the insertion |
|
5350 iNextClipDurationList.Remove( iNextClipDurationList.Count() - 1 ); |
|
5351 } |
|
5352 } |
|
5353 } |
|
5354 else |
|
5355 { |
|
5356 errCode = KErrNotFound; |
|
5357 } |
|
5358 } |
|
5359 return errCode; |
|
5360 } |
|
5361 |
|
5362 // ----------------------------------------------------------------------------- |
|
5363 // CMovieProcessorImpl::GetVideoFrameFromFile |
|
5364 // Retrieve a video YUV frame from the tmp file |
|
5365 // (other items were commented in a header). |
|
5366 // ----------------------------------------------------------------------------- |
|
5367 // |
|
5368 TInt CMovieProcessorImpl::GetVideoFrameFromFile(TDes8& aBuf, TInt aLength, TInt& aDuration, TInt64& aTimeStamp) |
|
5369 { |
|
5370 |
|
5371 TBool isOpen = EFalse; |
|
5372 TInt errCode = KErrNone; |
|
5373 if ( ( errCode = iFs.IsFileOpen( iCurClipFileName, isOpen ) ) == KErrNone ) |
|
5374 { |
|
5375 if ( isOpen ) |
|
5376 { |
|
5377 if ( ( errCode = iCurClip.Read( aBuf, aLength ) ) == KErrNone ) |
|
5378 { |
|
5379 if ( !iTimeStampListScaled ) |
|
5380 { |
|
5381 // change timestamps to start from zero |
|
5382 TInt firstTs = iCurClipTimeStampList[0]; |
|
5383 for ( TInt index = 0; index < iCurClipTimeStampList.Count(); ++index ) |
|
5384 { |
|
5385 iCurClipTimeStampList[index] = iCurClipTimeStampList[index] - firstTs; |
|
5386 } |
|
5387 |
|
5388 iOffsetTimeStamp = iCurClipTimeStampList[iCurClipTimeStampList.Count() - 1] + |
|
5389 iCurClipDurationList[iCurClipDurationList.Count() - 1]; |
|
5390 TReal scaleFactor = (TReal)iParser->iStreamParameters.iVideoTimeScale/(TReal)iPreviousTimeScale; |
|
5391 iOffsetTimeStamp = TInt( I64REAL(iOffsetTimeStamp) * scaleFactor + 0.5 ); |
|
5392 |
|
5393 for ( TInt index = 0; index < iCurClipDurationList.Count(); ++index ) |
|
5394 { |
|
5395 // scale up or scale down |
|
5396 iCurClipDurationList[index] = TInt( TReal(iCurClipDurationList[index]) * scaleFactor + 0.5 ); |
|
5397 iCurClipTimeStampList[index] = TInt64( I64REAL(iCurClipTimeStampList[index]) * scaleFactor + 0.5 ); |
|
5398 } |
|
5399 iTimeStampListScaled = ETrue; |
|
5400 } |
|
5401 |
|
5402 if ( iCurClipIndex < iCurClipDurationList.Count() ) |
|
5403 { |
|
5404 aDuration = iCurClipDurationList[iCurClipIndex]; |
|
5405 aTimeStamp = iCurClipTimeStampList[iCurClipIndex++]; |
|
5406 } |
|
5407 else |
|
5408 { |
|
5409 aDuration = -1; |
|
5410 aTimeStamp = -1; |
|
5411 } |
|
5412 |
|
5413 } |
|
5414 } |
|
5415 else |
|
5416 { |
|
5417 errCode = KErrNotFound; |
|
5418 } |
|
5419 } |
|
5420 return errCode; |
|
5421 } |
|
5422 |
|
5423 |
|
5424 // ----------------------------------------------------------------------------- |
|
5425 // CMovieProcessorImpl::GetNextFrameDuration |
|
5426 // Get the next frame duration and timestamp |
|
5427 // (other items were commented in a header). |
|
5428 // ----------------------------------------------------------------------------- |
|
5429 // |
|
5430 void CMovieProcessorImpl::GetNextFrameDuration(TInt& aDuration, TInt64& aTimeStamp, |
|
5431 TInt aIndex, TInt& aTimeStampOffset) |
|
5432 { |
|
5433 |
|
5434 TInt count = iCurClipTimeStampList.Count(); |
|
5435 |
|
5436 if (!iTimeStampListScaled) |
|
5437 { |
|
5438 // save the timestamp of first color transition frame, so that the duration of |
|
5439 // last frame from first clip is calculated correctly in WriteVideoFrameToFile() |
|
5440 i1stColorTransitionFrameTS = TInt64( I64REAL(iCurClipTimeStampList[0]) / (TReal)iPreviousTimeScale * 1000 + 0.5 ); |
|
5441 iWriting1stColorTransitionFrame = ETrue; |
|
5442 |
|
5443 // change timestamps to start from zero |
|
5444 TInt firstTs = iCurClipTimeStampList[0]; |
|
5445 for ( TInt index = 0; index < iCurClipTimeStampList.Count(); ++index ) |
|
5446 { |
|
5447 iCurClipTimeStampList[index] = iCurClipTimeStampList[index] - firstTs; |
|
5448 } |
|
5449 |
|
5450 iOffsetTimeStamp = iCurClipTimeStampList[iCurClipTimeStampList.Count() - 1] + |
|
5451 iCurClipDurationList[iCurClipDurationList.Count() - 1]; |
|
5452 TReal scaleFactor = (TReal)iParser->iStreamParameters.iVideoTimeScale/(TReal)iPreviousTimeScale; |
|
5453 iOffsetTimeStamp = TInt( I64REAL(iOffsetTimeStamp) * scaleFactor + 0.5 ); |
|
5454 |
|
5455 for ( TInt index = 0; index < iCurClipDurationList.Count(); ++index ) |
|
5456 { |
|
5457 // scale up or scale down |
|
5458 iCurClipDurationList[index] = TInt( TReal(iCurClipDurationList[index]) * scaleFactor + 0.5 ); |
|
5459 iCurClipTimeStampList[index] = TInt64( I64REAL(iCurClipTimeStampList[index]) * scaleFactor + 0.5 ); |
|
5460 } |
|
5461 iTimeStampListScaled = ETrue; |
|
5462 } |
|
5463 |
|
5464 aTimeStampOffset = iOffsetTimeStamp; |
|
5465 |
|
5466 if (aIndex >= 0) |
|
5467 { |
|
5468 // just get the values for given index, do not change iCurClipIndex |
|
5469 VPASSERT(aIndex < iCurClipDurationList.Count()); |
|
5470 aDuration = iCurClipDurationList[aIndex]; |
|
5471 aTimeStamp = iCurClipTimeStampList[aIndex]; |
|
5472 return; |
|
5473 } |
|
5474 |
|
5475 if ( iCurClipIndex < iCurClipDurationList.Count() ) |
|
5476 { |
|
5477 aDuration = iCurClipDurationList[iCurClipIndex]; |
|
5478 aTimeStamp = iCurClipTimeStampList[iCurClipIndex++]; |
|
5479 } |
|
5480 else |
|
5481 { |
|
5482 aDuration = 0; |
|
5483 aTimeStamp = 0; |
|
5484 } |
|
5485 } |
|
5486 |
|
5487 |
|
5488 // ----------------------------------------------------------------------------- |
|
5489 // CMovieProcessorImpl::AppendNextFrameDuration |
|
5490 // Append the next frame duration and timestamp |
|
5491 // (other items were commented in a header). |
|
5492 // ----------------------------------------------------------------------------- |
|
5493 // |
|
5494 void CMovieProcessorImpl::AppendNextFrameDuration(TInt aDuration, TInt64 aTimeStamp) |
|
5495 { |
|
5496 if ( iCurClipDurationList.Append( aDuration ) == KErrNone ) |
|
5497 { |
|
5498 if ( iCurClipTimeStampList.Append( aTimeStamp + iOffsetTimeStamp ) != KErrNone ) |
|
5499 { |
|
5500 // rollback the insertion |
|
5501 iCurClipDurationList.Remove( iCurClipDurationList.Count() - 1 ); |
|
5502 } |
|
5503 } |
|
5504 } |
|
5505 |
|
5506 // ----------------------------------------------------------------------------- |
|
5507 // CMovieProcessorImpl::GetNextClipStartTransitionNumber |
|
5508 // Get the number of transition at the start of next clip |
|
5509 // (other items were commented in a header). |
|
5510 // ----------------------------------------------------------------------------- |
|
5511 // |
|
5512 TInt CMovieProcessorImpl::NextClipStartTransitionNumber() |
|
5513 { |
|
5514 TInt transitionNumber = 5; |
|
5515 if ( iMiddleTransitionEffect == EVedMiddleTransitionEffectCrossfade || |
|
5516 iMiddleTransitionEffect == EVedMiddleTransitionEffectWipeLeftToRight || |
|
5517 iMiddleTransitionEffect == EVedMiddleTransitionEffectWipeRightToLeft || |
|
5518 iMiddleTransitionEffect == EVedMiddleTransitionEffectWipeTopToBottom || |
|
5519 iMiddleTransitionEffect == EVedMiddleTransitionEffectWipeBottomToTop ) |
|
5520 { |
|
5521 TInt nextClipNumber = iVideoClipNumber + 1; |
|
5522 CVedMovieImp* movie = (iMovie); |
|
5523 CVedVideoClip* videoClip = movie->VideoClip(nextClipNumber); |
|
5524 CVedVideoClip* origVideoClip = iVideoClip; |
|
5525 |
|
5526 TVedMiddleTransitionEffect middleTransitionEffect = EVedMiddleTransitionEffectNone; |
|
5527 if ( iMovie->MiddleTransitionEffectCount() > nextClipNumber ) |
|
5528 { |
|
5529 middleTransitionEffect = iMovie->MiddleTransitionEffect( nextClipNumber ); |
|
5530 } |
|
5531 |
|
5532 TTimeIntervalMicroSeconds startCutTime = videoClip->CutInTime(); |
|
5533 TTimeIntervalMicroSeconds endCutTime = videoClip->CutOutTime(); |
|
5534 |
|
5535 TInt numberOfFrame = videoClip->Info()->VideoFrameCount(); |
|
5536 |
|
5537 // temporary set iVideoClip to next video clip |
|
5538 iVideoClip = videoClip; |
|
5539 |
|
5540 TInt startFrameIndex = GetVideoFrameIndex( startCutTime ); |
|
5541 // the following is because binary search gives us frame with timestamp < startCutTime |
|
5542 // this frame would be out of range for movie |
|
5543 if ( startFrameIndex > 0 && startFrameIndex < ( numberOfFrame - 1 ) ) |
|
5544 startFrameIndex++; |
|
5545 |
|
5546 TInt endFrameIndex = GetVideoFrameIndex( endCutTime ); |
|
5547 |
|
5548 // determine the total number of included frames in the clip |
|
5549 TInt numberOfIncludedFrames = endFrameIndex - startFrameIndex + 1; |
|
5550 |
|
5551 // make sure there are enough frames to apply transition |
|
5552 // for transition at both ends |
|
5553 if ( middleTransitionEffect != EVedMiddleTransitionEffectNone ) |
|
5554 { |
|
5555 if ( middleTransitionEffect == EVedMiddleTransitionEffectCrossfade || |
|
5556 middleTransitionEffect == EVedMiddleTransitionEffectWipeLeftToRight || |
|
5557 middleTransitionEffect == EVedMiddleTransitionEffectWipeRightToLeft || |
|
5558 middleTransitionEffect == EVedMiddleTransitionEffectWipeTopToBottom || |
|
5559 middleTransitionEffect == EVedMiddleTransitionEffectWipeBottomToTop ) |
|
5560 { |
|
5561 if ( numberOfIncludedFrames < ( transitionNumber << 1 ) ) |
|
5562 { |
|
5563 transitionNumber = numberOfIncludedFrames >> 1; |
|
5564 } |
|
5565 } |
|
5566 else |
|
5567 { |
|
5568 if ( numberOfIncludedFrames < ( transitionNumber + 10 ) ) |
|
5569 { |
|
5570 if ( ( numberOfIncludedFrames >> 1 ) < transitionNumber ) |
|
5571 { |
|
5572 transitionNumber = numberOfIncludedFrames >> 1; |
|
5573 } |
|
5574 } |
|
5575 } |
|
5576 } |
|
5577 else |
|
5578 { |
|
5579 if ( numberOfIncludedFrames < transitionNumber ) |
|
5580 { |
|
5581 transitionNumber = numberOfIncludedFrames; |
|
5582 } |
|
5583 } |
|
5584 // reset iVideoClip back to the original video clip |
|
5585 iVideoClip = origVideoClip; |
|
5586 } |
|
5587 return transitionNumber; |
|
5588 } |
|
5589 |
|
5590 // ----------------------------------------------------------------------------- |
|
5591 // CMovieProcessorImpl::TransitionDuration |
|
5592 // Get the transition duration of current clip |
|
5593 // (other items were commented in a header). |
|
5594 // ----------------------------------------------------------------------------- |
|
5595 // |
|
5596 TInt CMovieProcessorImpl::TransitionDuration() |
|
5597 { |
|
5598 TInt duration = 0; |
|
5599 if ( iNextClipDurationList.Count() ) |
|
5600 { |
|
5601 for ( TInt index = 0; index < iNextClipDurationList.Count() /*- 1*/; index++ ) |
|
5602 { |
|
5603 duration += iNextClipDurationList[index]; |
|
5604 } |
|
5605 } |
|
5606 return duration; |
|
5607 } |
|
5608 |
|
5609 // ----------------------------------------------------------------------------- |
|
5610 // CMovieProcessorImpl::CloseTransitionInfoL |
|
5611 // Release all internal data hold for transition effect |
|
5612 // (other items were commented in a header). |
|
5613 // ----------------------------------------------------------------------------- |
|
5614 // |
|
5615 void CMovieProcessorImpl::CloseTransitionInfoL() |
|
5616 { |
|
5617 // remove old tmp files |
|
5618 if ( iCurClipFileName.Length() ) |
|
5619 { |
|
5620 iCurClip.Close(); |
|
5621 // it is ok if the tmp file does not exist |
|
5622 iFs.Delete( iCurClipFileName ); |
|
5623 iCurClipFileName.Zero(); |
|
5624 } |
|
5625 |
|
5626 if ( iNextClipFileName.Length() ) |
|
5627 { |
|
5628 iNextClip.Close(); |
|
5629 // it is ok if the tmp file does not exist |
|
5630 iFs.Delete( iNextClipFileName ); |
|
5631 iNextClipFileName.Zero(); |
|
5632 } |
|
5633 |
|
5634 // use file manager to delete all previously |
|
5635 // *_nokia_vpi.tmp files |
|
5636 CFileMan *fileMan = CFileMan::NewL( iFs ); |
|
5637 CleanupStack::PushL( fileMan ); |
|
5638 TParse filepath; |
|
5639 filepath.Set( iOutputMovieFileName, NULL, NULL ); |
|
5640 TFileName filesToDelete = filepath.DriveAndPath(); |
|
5641 filesToDelete.Append( _L( "*_nokia_vpi.tmp" ) ); |
|
5642 fileMan->Delete( filesToDelete ); |
|
5643 CleanupStack::PopAndDestroy( fileMan ); |
|
5644 |
|
5645 iCurClipDurationList.Reset(); |
|
5646 iNextClipDurationList.Reset(); |
|
5647 iCurClipTimeStampList.Reset(); |
|
5648 iNextClipTimeStampList.Reset(); |
|
5649 iCurClipIndex = 0; |
|
5650 iPreviousTimeScale = 0; |
|
5651 iOffsetTimeStamp = 0; |
|
5652 |
|
5653 // Delete Im_nokia_vpi.tmp |
|
5654 TFileName tmpath = TPtrC(KTempFilePath); |
|
5655 tmpath.Append( _L("x.3gp") ); |
|
5656 |
|
5657 // use file manager to delete all previously |
|
5658 CFileMan *tempfileMan = CFileMan::NewL( iFs ); |
|
5659 CleanupStack::PushL( tempfileMan ); |
|
5660 TParse tempfilepath; |
|
5661 tempfilepath.Set(tmpath, NULL, NULL ); |
|
5662 TFileName tempfilesToDelete = tempfilepath.DriveAndPath(); |
|
5663 tempfilesToDelete.Append( _L( "Im_nokia_vpi.tmp" ) ); |
|
5664 tempfileMan->Delete( tempfilesToDelete,0); |
|
5665 CleanupStack::PopAndDestroy( tempfileMan ); |
|
5666 } |
|
5667 |
|
5668 // ----------------------------------------------------------------------------- |
|
5669 // CMovieProcessorImpl::GetVideoFrameIndex |
|
5670 // Gets frame index based on its timestamp |
|
5671 // (other items were commented in a header). |
|
5672 // ----------------------------------------------------------------------------- |
|
5673 // |
|
5674 TInt CMovieProcessorImpl::GetVideoFrameIndex(TTimeIntervalMicroSeconds aTime) const |
|
5675 { |
|
5676 __ASSERT_ALWAYS((aTime >= TTimeIntervalMicroSeconds(0)) /*&& (aTime <= iMovie->Duration())*/, |
|
5677 TVedPanic::Panic(TVedPanic::EVideoClipInfoIllegalVideoFrameTime)); |
|
5678 |
|
5679 /* Use binary search to find the right frame. */ |
|
5680 |
|
5681 TInt videoFrameCount; |
|
5682 if (!iThumbnailInProgress) |
|
5683 videoFrameCount = iVideoClip->Info()->VideoFrameCount(); |
|
5684 else |
|
5685 videoFrameCount = iParser->GetNumberOfVideoFrames(); |
|
5686 |
|
5687 TInt start = 0; |
|
5688 TInt end = videoFrameCount - 1; |
|
5689 TInt index = -1; |
|
5690 while ( start <= end ) |
|
5691 { |
|
5692 index = start + ((end - start) / 2); |
|
5693 |
|
5694 TTimeIntervalMicroSeconds startTime(0); |
|
5695 TTimeIntervalMicroSeconds endTime(0); |
|
5696 |
|
5697 TInt tsInTicks; |
|
5698 startTime = TTimeIntervalMicroSeconds( TInt64(iParser->GetVideoFrameStartTime(index, &tsInTicks)) * TInt64(1000) ); |
|
5699 |
|
5700 if (index < (videoFrameCount - 1)) |
|
5701 endTime = TTimeIntervalMicroSeconds( TInt64(iParser->GetVideoFrameStartTime(index + 1, &tsInTicks)) * TInt64(1000) ); |
|
5702 else |
|
5703 { |
|
5704 TInt durationMs; |
|
5705 iParser->GetVideoDuration(durationMs); |
|
5706 endTime = TTimeIntervalMicroSeconds( TInt64(durationMs) * TInt64(1000) ); |
|
5707 } |
|
5708 |
|
5709 if (index < (videoFrameCount - 1)) |
|
5710 { |
|
5711 endTime = TTimeIntervalMicroSeconds(endTime.Int64() - 1); |
|
5712 } |
|
5713 |
|
5714 if (aTime < startTime) |
|
5715 { |
|
5716 end = index - 1; |
|
5717 } |
|
5718 else if (aTime > endTime) |
|
5719 { |
|
5720 start = index + 1; |
|
5721 } |
|
5722 else |
|
5723 { |
|
5724 break; |
|
5725 } |
|
5726 } |
|
5727 |
|
5728 return index; |
|
5729 } |
|
5730 |
|
5731 |
|
5732 // ----------------------------------------------------------------------------- |
|
5733 // CMovieProcessorImpl::EnhanceThumbnailL |
|
5734 // Enhances the visual quality of the frame |
|
5735 // (other items were commented in a header). |
|
5736 // ----------------------------------------------------------------------------- |
|
5737 // |
|
5738 void CMovieProcessorImpl::EnhanceThumbnailL(const CFbsBitmap* aInBitmap, |
|
5739 CFbsBitmap* aTargetBitmap) |
|
5740 { |
|
5741 |
|
5742 // create enhancement object |
|
5743 if(!iEnhancer) |
|
5744 iEnhancer = (CDisplayChain*) CDisplayChain::NewL(); |
|
5745 |
|
5746 // enhance image |
|
5747 iEnhancer->ProcessL(aInBitmap, aTargetBitmap); |
|
5748 |
|
5749 // clear enhancement object |
|
5750 delete iEnhancer; |
|
5751 iEnhancer=0; |
|
5752 |
|
5753 } |
|
5754 |
|
5755 |
|
5756 // ----------------------------------------------------------------------------- |
|
5757 // CMovieProcessorImpl::NotifyVideoClipGeneratorFrameCompleted |
|
5758 // The cal back functin called when a frame is to be obtained from the frame generator |
|
5759 // (other items were commented in a header). |
|
5760 // ----------------------------------------------------------------------------- |
|
5761 // |
|
5762 void CMovieProcessorImpl::NotifyVideoClipGeneratorFrameCompleted(CVedVideoClipGenerator& /*aGenerator*/, |
|
5763 TInt aError, |
|
5764 CFbsBitmap* aFrame) |
|
5765 { |
|
5766 |
|
5767 if (iProcessingCancelled) |
|
5768 { |
|
5769 if (aFrame) |
|
5770 { |
|
5771 delete aFrame; aFrame = 0; |
|
5772 } |
|
5773 return; |
|
5774 } |
|
5775 |
|
5776 if(aError == KErrNone) |
|
5777 { |
|
5778 CFbsBitmap* aBitmapLocal = aFrame; |
|
5779 |
|
5780 if (aFrame == 0) |
|
5781 { |
|
5782 iMonitor->Error(KErrArgument); |
|
5783 return; |
|
5784 } |
|
5785 |
|
5786 VPASSERT(aFrame); |
|
5787 TInt error; |
|
5788 |
|
5789 // Convert the frame obtained from bitmap to YUV before giving it to encoder only if error is not there |
|
5790 TRAP(error, iImageYuvConverter = CVSFbsBitmapYUV420Converter::NewL(*aBitmapLocal)); |
|
5791 if (error != KErrNone) |
|
5792 { |
|
5793 delete aFrame; aFrame = 0; // removed the frame obtained from engine |
|
5794 iMonitor->Error(error); |
|
5795 return; |
|
5796 } |
|
5797 |
|
5798 VPASSERT(iImageYuvConverter); |
|
5799 |
|
5800 TRAP(error, iImageYuvConverter->ProcessL()); |
|
5801 if (error != KErrNone) |
|
5802 { |
|
5803 delete aFrame; aFrame = 0; // removed the frame obtained from engine |
|
5804 iMonitor->Error(error); |
|
5805 return; |
|
5806 } |
|
5807 |
|
5808 TRAP(error, iYuvImageBuf = (TUint8*) (TUint8*) HBufC::NewL((iImageYuvConverter->YUVData()).Length())); |
|
5809 if(error != KErrNone) |
|
5810 { |
|
5811 delete aFrame; aFrame = 0; // removed the frame obtained from engine |
|
5812 iMonitor->Error(error); |
|
5813 return; |
|
5814 } |
|
5815 iImageSize = (iImageYuvConverter->YUVData()).Length(); |
|
5816 Mem::Copy(iYuvImageBuf,(iImageYuvConverter->YUVData()).Ptr(),(iImageYuvConverter->YUVData()).Length()); // may be iYUVBuf only |
|
5817 iReadImageDes.Set(iYuvImageBuf,iImageSize, iImageSize); |
|
5818 |
|
5819 delete iImageYuvConverter; |
|
5820 iImageYuvConverter = 0; |
|
5821 |
|
5822 aBitmapLocal = 0; |
|
5823 if(aFrame) |
|
5824 { |
|
5825 delete aFrame; // removed the frame obtained from engine |
|
5826 aFrame = 0; |
|
5827 } |
|
5828 aBitmapLocal = 0; |
|
5829 |
|
5830 // finished converting to YUV |
|
5831 TRAP(error, ProcessImageSetsL(EVideoEncodeFrame)); // EVideoEncodeFrame indicates to encoder to encode and return later |
|
5832 if(error != KErrNone) |
|
5833 { |
|
5834 iMonitor->Error(error); |
|
5835 return; |
|
5836 } |
|
5837 } |
|
5838 else |
|
5839 { |
|
5840 if(aError == KErrCancel) |
|
5841 { |
|
5842 if(aFrame) |
|
5843 { |
|
5844 delete aFrame; |
|
5845 aFrame =0; |
|
5846 } |
|
5847 |
|
5848 if(iImageComposer) |
|
5849 { |
|
5850 iImageComposer->Close(); |
|
5851 delete iImageComposer; |
|
5852 iImageComposer=0; |
|
5853 } |
|
5854 |
|
5855 if (iImageAvcEdit) |
|
5856 { |
|
5857 delete iImageAvcEdit; |
|
5858 iImageAvcEdit = 0; |
|
5859 } |
|
5860 } |
|
5861 else |
|
5862 { |
|
5863 iMonitor->Error(aError); |
|
5864 } |
|
5865 } |
|
5866 } |
|
5867 |
|
5868 // ----------------------------------------------------------------------------- |
|
5869 // CMovieProcessorImpl::GetOutputVideoType |
|
5870 // This function returns the video type of output movie |
|
5871 // (other items were commented in a header). |
|
5872 // ----------------------------------------------------------------------------- |
|
5873 // |
|
5874 TVedVideoType CMovieProcessorImpl::GetOutputVideoType() |
|
5875 { |
|
5876 |
|
5877 if (iOutputVideoType == EVideoH263Profile0Level10) |
|
5878 return EVedVideoTypeH263Profile0Level10; |
|
5879 |
|
5880 if (iOutputVideoType == EVideoH263Profile0Level45) |
|
5881 return EVedVideoTypeH263Profile0Level45; |
|
5882 |
|
5883 #ifdef VIDEOEDITORENGINE_AVC_EDITING |
|
5884 if (iOutputVideoType == EVideoAVCProfileBaseline) |
|
5885 return EVedVideoTypeAVCBaselineProfile; |
|
5886 #endif |
|
5887 |
|
5888 if (iOutputVideoType == EVideoMPEG4) |
|
5889 return EVedVideoTypeMPEG4SimpleProfile; |
|
5890 |
|
5891 return EVedVideoTypeNoVideo; |
|
5892 |
|
5893 } |
|
5894 |
|
5895 // ----------------------------------------------------------------------------- |
|
5896 // CMovieProcessorImpl::GetCurrentClipVideoType |
|
5897 // This function returns the video type of the current video clip |
|
5898 // (other items were commented in a header). |
|
5899 // ----------------------------------------------------------------------------- |
|
5900 // |
|
5901 TVedVideoType CMovieProcessorImpl::GetCurrentClipVideoType() |
|
5902 { |
|
5903 |
|
5904 if (!iThumbnailInProgress) |
|
5905 { |
|
5906 |
|
5907 CVedMovieImp* movie = reinterpret_cast<CVedMovieImp*>(iMovie); |
|
5908 |
|
5909 CVedVideoClip* currentClip = movie->VideoClip(iVideoClipNumber); |
|
5910 CVedVideoClipInfo* currentInfo = currentClip->Info(); |
|
5911 |
|
5912 if (currentInfo->Class() == EVedVideoClipClassGenerated) |
|
5913 { |
|
5914 switch (iOutputVideoType) |
|
5915 { |
|
5916 case EVideoH263Profile0Level10: |
|
5917 return EVedVideoTypeH263Profile0Level10; |
|
5918 // break; |
|
5919 |
|
5920 case EVideoH263Profile0Level45: |
|
5921 return EVedVideoTypeH263Profile0Level45; |
|
5922 // break; |
|
5923 |
|
5924 #ifdef VIDEOEDITORENGINE_AVC_EDITING |
|
5925 case EVideoAVCProfileBaseline: |
|
5926 return EVedVideoTypeAVCBaselineProfile; |
|
5927 // break; |
|
5928 #endif |
|
5929 |
|
5930 case EVideoMPEG4: |
|
5931 return EVedVideoTypeMPEG4SimpleProfile; |
|
5932 // break; |
|
5933 |
|
5934 default: |
|
5935 return EVedVideoTypeUnrecognized; |
|
5936 } |
|
5937 } |
|
5938 return currentInfo->VideoType(); |
|
5939 } |
|
5940 |
|
5941 switch (iParser->iStreamParameters.iVideoFormat) |
|
5942 { |
|
5943 |
|
5944 case CParser::EVideoFormatH263Profile0Level10: |
|
5945 return EVedVideoTypeH263Profile0Level10; |
|
5946 // break; |
|
5947 |
|
5948 case CParser::EVideoFormatH263Profile0Level45: |
|
5949 return EVedVideoTypeH263Profile0Level45; |
|
5950 // break; |
|
5951 |
|
5952 case CParser::EVideoFormatMPEG4: |
|
5953 return EVedVideoTypeMPEG4SimpleProfile; |
|
5954 // break; |
|
5955 |
|
5956 case CParser::EVideoFormatAVCProfileBaseline: |
|
5957 return EVedVideoTypeAVCBaselineProfile; |
|
5958 // break; |
|
5959 |
|
5960 default: |
|
5961 return EVedVideoTypeUnrecognized; |
|
5962 } |
|
5963 } |
|
5964 |
|
5965 |
|
5966 |
|
5967 // ----------------------------------------------------------------------------- |
|
5968 // CMovieProcessorImpl::GetVideoClipTranscodeFactor Added for transcoding reqs |
|
5969 // This function returns the mode of the clip indicated by the location aNum |
|
5970 // (other items were commented in a header). |
|
5971 // ----------------------------------------------------------------------------- |
|
5972 // |
|
5973 TVedTranscodeFactor CMovieProcessorImpl::GetVideoClipTranscodeFactor(TInt aNum) |
|
5974 { |
|
5975 CVedVideoClipInfo* curInfo = NULL; |
|
5976 |
|
5977 if(iMovie) |
|
5978 { |
|
5979 CVedMovieImp* tmovie = (iMovie); |
|
5980 CVedVideoClip* currentClip = tmovie->VideoClip(aNum); |
|
5981 CVedVideoClipInfo* currentInfo = currentClip->Info(); |
|
5982 curInfo = currentInfo; |
|
5983 } |
|
5984 else |
|
5985 { |
|
5986 // this means there is no movie, which can happen in case of thumb generation as processor does not have it |
|
5987 User::Panic(_L("CMovieProcessorImpl MoviePtr is Missing in VideoProcessor"), -1); |
|
5988 } |
|
5989 |
|
5990 return curInfo->TranscodeFactor(); |
|
5991 } |
|
5992 |
|
5993 |
|
5994 // ----------------------------------------------------------------------------- |
|
5995 // CMovieProcessorImpl::GetVideoClipResolution |
|
5996 // Panics the program on error |
|
5997 // (other items were commented in a header). |
|
5998 // ----------------------------------------------------------------------------- |
|
5999 // |
|
6000 TSize CMovieProcessorImpl::GetVideoClipResolution() |
|
6001 { |
|
6002 return iVideoClip->Info()->Resolution(); |
|
6003 } |
|
6004 |
|
6005 // ----------------------------------------------------------------------------- |
|
6006 // CMovieProcessorImpl::GetMovieResolution |
|
6007 // Gets target movie resolution |
|
6008 // (other items were commented in a header). |
|
6009 // ----------------------------------------------------------------------------- |
|
6010 // |
|
6011 TSize CMovieProcessorImpl::GetMovieResolution() |
|
6012 { |
|
6013 if(iMovie) |
|
6014 { |
|
6015 return iMovie->Resolution(); |
|
6016 } |
|
6017 else |
|
6018 { |
|
6019 TSize stdres(KVedMaxVideoWidth, KVedMaxVideoHeight); |
|
6020 return stdres; |
|
6021 } |
|
6022 } |
|
6023 |
|
6024 // ----------------------------------------------------------------------------- |
|
6025 // CMovieProcessorImpl::GetVideoClipFrameRate |
|
6026 // Gets video frame rate of current clip |
|
6027 // (other items were commented in a header). |
|
6028 // ----------------------------------------------------------------------------- |
|
6029 // |
|
6030 TReal CMovieProcessorImpl::GetVideoClipFrameRate() |
|
6031 { |
|
6032 |
|
6033 TReal rate; |
|
6034 iParser->GetVideoFrameRate(rate); |
|
6035 |
|
6036 return rate; |
|
6037 |
|
6038 } |
|
6039 |
|
6040 // ----------------------------------------------------------------------------- |
|
6041 // CMovieProcessorImpl::GetMovieFrameRate |
|
6042 // Gets target movie frame rate |
|
6043 // (other items were commented in a header). |
|
6044 // ----------------------------------------------------------------------------- |
|
6045 // |
|
6046 TReal CMovieProcessorImpl::GetMovieFrameRate() |
|
6047 { |
|
6048 return iMovie->VideoFrameRate(); |
|
6049 } |
|
6050 |
|
6051 // ----------------------------------------------------------------------------- |
|
6052 // CMovieProcessorImpl::GetMovieVideoBitrate |
|
6053 // Gets target movie video bit rate |
|
6054 // (other items were commented in a header). |
|
6055 // ----------------------------------------------------------------------------- |
|
6056 // |
|
6057 TInt CMovieProcessorImpl::GetMovieVideoBitrate() |
|
6058 { |
|
6059 return iMovie->VideoBitrate(); |
|
6060 } |
|
6061 |
|
6062 // ----------------------------------------------------------------------------- |
|
6063 // CMovieProcessorImpl::GetMovieVideoBitrate |
|
6064 // Gets standard movie video bit rate (mandated by standard for used video codec) |
|
6065 // (other items were commented in a header). |
|
6066 // ----------------------------------------------------------------------------- |
|
6067 // |
|
6068 TInt CMovieProcessorImpl::GetMovieStandardVideoBitrate() |
|
6069 { |
|
6070 return iMovie->VideoStandardBitrate(); |
|
6071 } |
|
6072 |
|
6073 // ----------------------------------------------------------------------------- |
|
6074 // CMovieProcessorImpl::GetMovieAudioBitrate |
|
6075 // Gets target movie audio bit rate |
|
6076 // (other items were commented in a header). |
|
6077 // ----------------------------------------------------------------------------- |
|
6078 // |
|
6079 TInt CMovieProcessorImpl::GetMovieAudioBitrate() |
|
6080 { |
|
6081 return iMovie->AudioBitrate(); |
|
6082 } |
|
6083 |
|
6084 // ----------------------------------------------------------------------------- |
|
6085 // CMovieProcessorImpl::GetSyncIntervalInPicture |
|
6086 // Gets sync interval in picture |
|
6087 // (other items were commented in a header). |
|
6088 // ----------------------------------------------------------------------------- |
|
6089 // |
|
6090 TInt CMovieProcessorImpl::GetSyncIntervalInPicture() |
|
6091 { |
|
6092 if ( iMovie ) |
|
6093 { |
|
6094 return iMovie->SyncIntervalInPicture(); |
|
6095 } |
|
6096 else |
|
6097 { |
|
6098 return 0; |
|
6099 } |
|
6100 } |
|
6101 |
|
6102 // ----------------------------------------------------------------------------- |
|
6103 // CMovieProcessorImpl::GetRandomAccessRate |
|
6104 // Get random access rate setting |
|
6105 // (other items were commented in a header). |
|
6106 // ----------------------------------------------------------------------------- |
|
6107 // |
|
6108 TReal CMovieProcessorImpl::GetRandomAccessRate() |
|
6109 { |
|
6110 if ( iMovie ) |
|
6111 { |
|
6112 return iMovie->RandomAccessRate(); |
|
6113 } |
|
6114 else |
|
6115 { |
|
6116 return 0.2; |
|
6117 } |
|
6118 } |
|
6119 |
|
6120 // ----------------------------------------------------------------------------- |
|
6121 // CMovieProcessorImpl::GetOutputAudioType() Added for AAC support |
|
6122 // This function returns the audio type of the final movie which will be created |
|
6123 // (other items were commented in a header). |
|
6124 // ----------------------------------------------------------------------------- |
|
6125 // |
|
6126 TVedAudioType CMovieProcessorImpl::GetOutputAudioType() //added for AAC support |
|
6127 { |
|
6128 return DecideAudioType(iOutputAudioType); |
|
6129 } |
|
6130 |
|
6131 // ----------------------------------------------------------------------------- |
|
6132 // CMovieProcessorImpl::DecideAudioType(TAudioType aAudioType) added for AAC support |
|
6133 // This function returns the audio type depending on the parameter sent |
|
6134 // (other items were commented in a header). |
|
6135 // ----------------------------------------------------------------------------- |
|
6136 // |
|
6137 TVedAudioType CMovieProcessorImpl::DecideAudioType(TAudioType aAudioType) //added for AAC support |
|
6138 { |
|
6139 if( aAudioType == (TAudioType)EAudioAMR) |
|
6140 { |
|
6141 return EVedAudioTypeAMR; |
|
6142 } |
|
6143 else if(aAudioType == (TAudioType)EAudioAAC) |
|
6144 { |
|
6145 // changed EVedAudioTypeAAC_LC --> EVedAudioTypeAAC_LC --Sami |
|
6146 return EVedAudioTypeAAC_LC; |
|
6147 } |
|
6148 else if(aAudioType == (TAudioType)EAudioNone) |
|
6149 { |
|
6150 return EVedAudioTypeNoAudio; |
|
6151 } |
|
6152 else |
|
6153 { |
|
6154 return EVedAudioTypeUnrecognized; |
|
6155 } |
|
6156 } |
|
6157 |
|
6158 // ----------------------------------------------------------------------------- |
|
6159 // CMovieProcessorImpl::SetMovieSizeLimit |
|
6160 // Sets the maximum size for the movie |
|
6161 // (other items were commented in a header). |
|
6162 // ----------------------------------------------------------------------------- |
|
6163 // |
|
6164 void CMovieProcessorImpl::SetMovieSizeLimit(TInt aLimit) |
|
6165 { |
|
6166 // reserve 2000 bytes for safety margin, since size is checked after a video frame is written to file. |
|
6167 iMovieSizeLimit = (aLimit - 2000 > 0) ? aLimit - 2000 : 0; |
|
6168 } |
|
6169 |
|
6170 |
|
6171 //================================== |
|
6172 // color tone RGB TO YUV Function |
|
6173 //================================== |
|
6174 |
|
6175 void CMovieProcessorImpl::ConvertColorToneRGBToYUV(TVedColorEffect aColorEffect,TRgb aColorToneRgb) |
|
6176 { |
|
6177 TInt uVal, vVal; // to get the U,V Values of the ColorTone |
|
6178 |
|
6179 CVedVideoClipInfo* currentInfo = iVideoClip->Info(); |
|
6180 TVedVideoBitstreamMode streamMode = currentInfo->TranscodeFactor().iStreamType; |
|
6181 |
|
6182 // evaluate the u,v values for color toning |
|
6183 if (aColorEffect == EVedColorEffectToning) |
|
6184 { |
|
6185 uVal = TInt(-(0.16875*aColorToneRgb.Red()) - (0.33126*aColorToneRgb.Green()) + (0.5*aColorToneRgb.Blue()) + 0.5); |
|
6186 vVal = TInt((0.5*aColorToneRgb.Red()) - (0.41869*aColorToneRgb.Green()) - (0.08131*aColorToneRgb.Blue()) + 0.5); |
|
6187 } |
|
6188 |
|
6189 // adjust the u,v values for h.263 and mpeg-4 |
|
6190 if(iOutputVideoType == EVideoH263Profile0Level10 || iOutputVideoType == EVideoH263Profile0Level45 || (!(iOutputVideoType == EVideoMPEG4) && streamMode == EVedVideoBitstreamModeMPEG4ShortHeader)) |
|
6191 { |
|
6192 if(aColorEffect == EVedColorEffectBlackAndWhite) |
|
6193 { |
|
6194 uVal = 255; // codeword for value=128 |
|
6195 vVal = 255; // codeword for value=128 |
|
6196 } |
|
6197 else if (aColorEffect == EVedColorEffectToning) |
|
6198 { |
|
6199 uVal += 128; |
|
6200 vVal += 128; |
|
6201 |
|
6202 AdjustH263UV(uVal); |
|
6203 AdjustH263UV(vVal); |
|
6204 } |
|
6205 } |
|
6206 else if (iOutputVideoType == EVideoMPEG4) |
|
6207 { |
|
6208 if(aColorEffect == EVedColorEffectBlackAndWhite) |
|
6209 { |
|
6210 uVal = 0; // codeword for value=128 |
|
6211 vVal = 0; // codeword for value=128 |
|
6212 } |
|
6213 else if (aColorEffect == EVedColorEffectToning) |
|
6214 { |
|
6215 uVal /= 2; // do not use bit shift; may have negative values |
|
6216 vVal /= 2; // do not use bit shift; may have negative values |
|
6217 } |
|
6218 } |
|
6219 |
|
6220 iColorToneU = uVal; |
|
6221 iColorToneV = vVal; |
|
6222 // iMovie->VideoClipSetColorTone(0, iColorToneYUV); |
|
6223 |
|
6224 } |
|
6225 |
|
6226 //======================================= |
|
6227 // CMovieProcessorImpl::AdjustH263UV() |
|
6228 // Adjusts the UV values for Color Toning |
|
6229 //======================================= |
|
6230 void CMovieProcessorImpl::AdjustH263UV(TInt& aValue) |
|
6231 { |
|
6232 if(aValue == 0) // end points are not used |
|
6233 { |
|
6234 aValue = 1; |
|
6235 } |
|
6236 else if (aValue == 128) // not used |
|
6237 { |
|
6238 aValue = 255; |
|
6239 } |
|
6240 else if (aValue >= 255) // end points are not used |
|
6241 { |
|
6242 aValue = 254; |
|
6243 } |
|
6244 } |
|
6245 |
|
6246 // ----------------------------------------------------------------------------- |
|
6247 // CMovieProcessorImpl::GetOutputVideoMimeType |
|
6248 // Return Mime type for output video codec |
|
6249 // (other items were commented in a header). |
|
6250 // ----------------------------------------------------------------------------- |
|
6251 // |
|
6252 TPtrC8& CMovieProcessorImpl::GetOutputVideoMimeType() |
|
6253 { |
|
6254 VPASSERT(iMovie); |
|
6255 |
|
6256 return iMovie->VideoCodecMimeType(); |
|
6257 } |
|
6258 |
|
6259 // ----------------------------------------------------------------------------- |
|
6260 // CMovieProcessorImpl::GetOutputAVCLevel |
|
6261 // Get output AVC level |
|
6262 // (other items were commented in a header). |
|
6263 // ----------------------------------------------------------------------------- |
|
6264 // |
|
6265 TInt CMovieProcessorImpl::GetOutputAVCLevel() |
|
6266 { |
|
6267 |
|
6268 VPASSERT( iOutputVideoType == EVideoAVCProfileBaseline ); |
|
6269 |
|
6270 const TPtrC8& mimeType = iMovie->VideoCodecMimeType(); |
|
6271 |
|
6272 if ( mimeType.MatchF( _L8("*profile-level-id=42800A*") ) != KErrNotFound ) |
|
6273 { |
|
6274 // baseline profile level 1 |
|
6275 return 10; |
|
6276 } |
|
6277 |
|
6278 else if ( mimeType.MatchF( _L8("*profile-level-id=42900B*") ) != KErrNotFound ) |
|
6279 { |
|
6280 // baseline profile level 1b |
|
6281 return 101; // internal constant for level 1b |
|
6282 } |
|
6283 |
|
6284 else if ( mimeType.MatchF( _L8("*profile-level-id=42800B*") ) != KErrNotFound ) |
|
6285 { |
|
6286 // baseline profile level 1.1 |
|
6287 return 11; |
|
6288 } |
|
6289 |
|
6290 else if ( mimeType.MatchF( _L8("*profile-level-id=42800C*") ) != KErrNotFound ) |
|
6291 { |
|
6292 // baseline profile level 1.2 |
|
6293 return 12; |
|
6294 } |
|
6295 //WVGA task |
|
6296 else if ( mimeType.MatchF( _L8("*profile-level-id=42800D*") ) != KErrNotFound ) |
|
6297 { |
|
6298 // baseline profile level 1.3 |
|
6299 return 13; |
|
6300 } |
|
6301 else if ( mimeType.MatchF( _L8("*profile-level-id=428014*") ) != KErrNotFound ) |
|
6302 { |
|
6303 // baseline profile level 2 |
|
6304 return 20; |
|
6305 } |
|
6306 else if ( mimeType.MatchF( _L8("*profile-level-id=428015*") ) != KErrNotFound ) |
|
6307 { |
|
6308 // baseline profile level 2.1 |
|
6309 return 21; |
|
6310 } |
|
6311 else if ( mimeType.MatchF( _L8("*profile-level-id=428016*") ) != KErrNotFound ) |
|
6312 { |
|
6313 // baseline profile level 2.2 |
|
6314 return 22; |
|
6315 } |
|
6316 else if ( mimeType.MatchF( _L8("*profile-level-id=42801E*") ) != KErrNotFound ) |
|
6317 { |
|
6318 // baseline profile level 3 |
|
6319 return 30; |
|
6320 } |
|
6321 else if ( mimeType.MatchF( _L8("*profile-level-id=42801F*") ) != KErrNotFound ) |
|
6322 { |
|
6323 // baseline profile level 3.1 |
|
6324 return 31; |
|
6325 } |
|
6326 |
|
6327 else if ( mimeType.MatchF( _L8("*profile-level-id=*") ) != KErrNotFound ) |
|
6328 { |
|
6329 // no other profile-level ids supported |
|
6330 User::Panic(_L("CMovieProcessorImpl"), EInvalidInternalState); |
|
6331 } |
|
6332 |
|
6333 else |
|
6334 { |
|
6335 // Default is level 1 (?) |
|
6336 return 10; |
|
6337 } |
|
6338 return 10; |
|
6339 } |
|
6340 |
|
6341 // ----------------------------------------------------------------------------- |
|
6342 // CMovieProcessorImpl::SuspendProcessing |
|
6343 // Suspends processing |
|
6344 // (other items were commented in a header). |
|
6345 // ----------------------------------------------------------------------------- |
|
6346 // |
|
6347 TInt CMovieProcessorImpl::SuspendProcessing() |
|
6348 { |
|
6349 |
|
6350 PRINT((_L("CMovieProcessorImpl::SuspendProcessing()"))); |
|
6351 |
|
6352 iDemux->Stop(); |
|
6353 |
|
6354 return KErrNone; |
|
6355 } |
|
6356 |
|
6357 |
|
6358 // ----------------------------------------------------------------------------- |
|
6359 // CMovieProcessorImpl::ResumeProcessing |
|
6360 // Resumes processing |
|
6361 // (other items were commented in a header). |
|
6362 // ----------------------------------------------------------------------------- |
|
6363 // |
|
6364 TInt CMovieProcessorImpl::ResumeProcessing(TInt& aStartFrameIndex, TInt aFrameNumber) |
|
6365 { |
|
6366 |
|
6367 PRINT((_L("CMovieProcessorImpl::ResumeProcessing(), frame number = %d"), aFrameNumber)); |
|
6368 PRINT((_L("CMovieProcessorImpl::ResumeProcessing(), start index = %d"), iStartFrameIndex )); |
|
6369 |
|
6370 // get index of last written frame |
|
6371 TInt index = iStartFrameIndex + aFrameNumber; |
|
6372 |
|
6373 PRINT((_L("CMovieProcessorImpl::ResumeProcessing(), index is %d"), index)); |
|
6374 |
|
6375 TInt ticks; |
|
6376 // get start time for next frame |
|
6377 TInt time = iParser->GetVideoFrameStartTime(index + 1, &ticks); |
|
6378 if ( time < 0 ) |
|
6379 { |
|
6380 // time represents an error code from parser |
|
6381 PRINT((_L("CMovieProcessorImpl::ResumeProcessing(), error from iParser %d"), time)); |
|
6382 return time; |
|
6383 } |
|
6384 |
|
6385 PRINT((_L("CMovieProcessorImpl::ResumeProcessing(), start frame time = %d ms"), time)); |
|
6386 iStartCutTime = TTimeIntervalMicroSeconds(TInt64(time) * TInt64(1000)); |
|
6387 |
|
6388 // reset parser variables |
|
6389 iParser->Reset(); |
|
6390 |
|
6391 // seek to Intra from where to start decoding to resume processing |
|
6392 TInt error = iParser->SeekOptimalIntraFrame(iStartCutTime, 0, EFalse); |
|
6393 |
|
6394 if (error != KErrNone) |
|
6395 return error; |
|
6396 |
|
6397 aStartFrameIndex = iParser->GetStartFrameIndex(); |
|
6398 |
|
6399 PRINT((_L("CMovieProcessorImpl::ResumeProcessing(), aStartFrameIndex = %d"), aStartFrameIndex)); |
|
6400 |
|
6401 // iStartFrameIndex contains the index of the first included |
|
6402 // frame, which is != 0 in case the clip is cut from beginning |
|
6403 aStartFrameIndex -= iStartFrameIndex; |
|
6404 |
|
6405 PRINT((_L("CMovieProcessorImpl::ResumeProcessing(), aStartFrameIndex = %d"), aStartFrameIndex)); |
|
6406 |
|
6407 iVideoQueue->ResetStreamEnd(); |
|
6408 iDemux->Start(); |
|
6409 |
|
6410 return KErrNone; |
|
6411 } |
|
6412 |
|
6413 // ----------------------------------------------------------------------------- |
|
6414 // CMovieProcessorImpl::NeedTranscoderAnyMore |
|
6415 // Check if all video is processed already |
|
6416 // (other items were commented in a header). |
|
6417 // ----------------------------------------------------------------------------- |
|
6418 // |
|
6419 TBool CMovieProcessorImpl::NeedTranscoderAnyMore() |
|
6420 { |
|
6421 PRINT((_L("CMovieProcessorImpl::NeedTranscoderAnyMore()"))); |
|
6422 if ( iAllVideoProcessed ) |
|
6423 { |
|
6424 PRINT((_L("CMovieProcessorImpl::NeedTranscoderAnyMore() EFalse"))); |
|
6425 return EFalse; |
|
6426 } |
|
6427 else |
|
6428 { |
|
6429 PRINT((_L("CMovieProcessorImpl::NeedTranscoderAnyMore() ETrue"))); |
|
6430 return ETrue; |
|
6431 } |
|
6432 |
|
6433 |
|
6434 } |
|
6435 |
|
6436 |
|
6437 // OTHER EXPORTED FUNCTIONS |
|
6438 |
|
6439 |
|
6440 //============================================================================= |
|
6441 |
|
6442 |
|
6443 // End of File |
|
6444 |
|