1
/* This file is part of the KDE project.
3
Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies).
5
This library is free software: you can redistribute it and/or modify
6
it under the terms of the GNU Lesser General Public License as published by
7
the Free Software Foundation, either version 2.1 or 3 of the License.
9
This library is distributed in the hope that it will be useful,
10
but WITHOUT ANY WARRANTY; without even the implied warranty of
11
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12
GNU Lesser General Public License for more details.
14
You should have received a copy of the GNU Lesser General Public License
15
along with this library. If not, see <http://www.gnu.org/licenses/>.
18
#include <QtCore/QVector>
19
#include <QtCore/QTimerEvent>
20
#include <QtCore/QTimer>
21
#include <QtCore/QTime>
22
#include <QtCore/QLibrary>
35
#include "mediaobject.h"
36
#include "videowidget.h"
37
#include "audiooutput.h"
40
#include <QtCore/QDebug>
42
#define TIMER_INTERVAL 16 //... ms for the timer that polls the current state (we use the multimedia timer)
43
#define PRELOAD_TIME 2000 // 2 seconds to load a source
51
typedef BOOL (WINAPI* LPAMGETERRORTEXT)(HRESULT, WCHAR *, DWORD);
53
//first the definition of the WorkerThread class
54
WorkerThread::WorkerThread()
55
: QThread(), m_finished(false), m_currentWorkId(1)
59
WorkerThread::~WorkerThread()
63
void WorkerThread::run()
65
while (m_finished == false) {
66
HANDLE handles[FILTER_COUNT +1];
67
handles[0] = m_waitCondition;
69
for(int i = 0; i < FILTER_COUNT; ++i) {
70
if (m_graphHandle[i].graph) {
71
handles[count++] = m_graphHandle[i].handle;
74
DWORD result = ::WaitForMultipleObjects(count, handles, FALSE, INFINITE);
75
if (result == WAIT_OBJECT_0) {
78
//this is the event management
79
const Graph &graph = m_graphHandle[result - WAIT_OBJECT_0 - 1].graph;
81
LONG_PTR param1, param2;
83
ComPointer<IMediaEvent> mediaEvent(graph, IID_IMediaEvent);
84
mediaEvent->GetEvent(&eventCode, ¶m1, ¶m2, 0);
85
emit eventReady(graph, eventCode, param1);
86
mediaEvent->FreeEventParams(eventCode, param1, param2);
91
//wants to know as soon as the state is set
92
void WorkerThread::addStateChangeRequest(Graph graph, OAFilterState state, QList<Filter> decoders)
94
QMutexLocker locker(&m_mutex);
96
//we try to see if there is already an attempt to change the state and we remove it
97
for(int i = 0; !found && i < m_queue.size(); ++i) {
98
const Work &w = m_queue.at(i);
99
if (w.graph == graph && w.task == ChangeState) {
105
//now let's create the new task
107
w.task = ChangeState;
108
w.id = m_currentWorkId++;
111
w.decoders = decoders;
113
m_waitCondition.set();
116
quint16 WorkerThread::addSeekRequest(Graph graph, qint64 time)
118
QMutexLocker locker(&m_mutex);
120
//we try to see if there is already an attempt to seek and we remove it
121
for(int i = 0; !found && i < m_queue.size(); ++i) {
122
const Work &w = m_queue.at(i);
123
if (w.graph == graph && w.task == Seek) {
131
//we create a new graph
133
w.id = m_currentWorkId++;
136
m_waitCondition.set();
140
quint16 WorkerThread::addUrlToRender(const QString &url)
142
QMutexLocker locker(&m_mutex);
145
//we create a new graph
146
w.graph = Graph(CLSID_FilterGraph, IID_IGraphBuilder);
149
w.id = m_currentWorkId++;
151
m_waitCondition.set();
155
quint16 WorkerThread::addFilterToRender(const Filter &filter)
157
QMutexLocker locker(&m_mutex);
160
//we create a new graph
161
w.graph = Graph(CLSID_FilterGraph, IID_IGraphBuilder);
163
w.graph->AddFilter(filter, 0);
164
w.id = m_currentWorkId++;
166
m_waitCondition.set();
170
void WorkerThread::replaceGraphForEventManagement(Graph newGraph, Graph oldGraph)
172
QMutexLocker locker(&m_mutex);
174
w.task = ReplaceGraph;
176
w.oldGraph = oldGraph;
178
m_waitCondition.set();
181
void WorkerThread::handleTask()
183
QMutexLocker locker(Backend::directShowMutex);
185
QMutexLocker locker(&m_mutex);
186
if (m_finished || m_queue.isEmpty()) {
190
m_currentWork = m_queue.dequeue();
192
//we ensure to have the wait condition in the right state
193
if (m_queue.isEmpty()) {
194
m_waitCondition.reset();
196
m_waitCondition.set();
202
if (m_currentWork.task == ReplaceGraph) {
204
for(int i = 0; i < FILTER_COUNT; ++i) {
205
if (m_graphHandle[i].graph == m_currentWork.oldGraph) {
206
m_graphHandle[i].graph = Graph();
209
} else if (index == -1 && m_graphHandle[i].graph == 0) {
210
//this is the first available slot
215
Q_ASSERT(index != -1);
219
if (SUCCEEDED(ComPointer<IMediaEvent>(m_currentWork.graph, IID_IMediaEvent)
220
->GetEventHandle(reinterpret_cast<OAEVENT*>(&h)))) {
221
m_graphHandle[index].graph = m_currentWork.graph;
222
m_graphHandle[index].handle = h;
224
} else if (m_currentWork.task == Render) {
225
if (m_currentWork.filter) {
227
const QList<OutputPin> outputs = BackendNode::pins(m_currentWork.filter, PINDIR_OUTPUT);
228
for (int i = 0; SUCCEEDED(hr) && i < outputs.count(); ++i) {
229
hr = m_currentWork.graph->Render(outputs.at(i));
231
} else if (!m_currentWork.url.isEmpty()) {
232
//let's render a url (blocking call)
233
hr = m_currentWork.graph->RenderFile(reinterpret_cast<const wchar_t *>(m_currentWork.url.utf16()), 0);
236
emit asyncRenderFinished(m_currentWork.id, hr, m_currentWork.graph);
238
} else if (m_currentWork.task == Seek) {
239
//that's a seekrequest
240
ComPointer<IMediaSeeking> mediaSeeking(m_currentWork.graph, IID_IMediaSeeking);
241
qint64 newtime = m_currentWork.time * 10000;
242
hr = mediaSeeking->SetPositions(&newtime, AM_SEEKING_AbsolutePositioning,
243
0, AM_SEEKING_NoPositioning);
244
emit asyncSeekingFinished(m_currentWork.id, newtime / 10000);
245
hr = E_ABORT; //to avoid emitting asyncRenderFinished
246
} else if (m_currentWork.task == ChangeState) {
248
//remove useless decoders
249
QList<Filter> unused;
250
for (int i = 0; i < m_currentWork.decoders.count(); ++i) {
251
const Filter &filter = m_currentWork.decoders.at(i);
253
const QList<OutputPin> pins = BackendNode::pins(filter, PINDIR_OUTPUT);
254
for( int i = 0; i < pins.count(); ++i) {
256
if (pins.at(i)->ConnectedTo(input.pparam()) == S_OK) {
265
//we can get the state
266
for (int i = 0; i < unused.count(); ++i) {
267
//we should remove this filter from the graph
268
m_currentWork.graph->RemoveFilter(unused.at(i));
272
//we can get the state
273
ComPointer<IMediaControl> mc(m_currentWork.graph, IID_IMediaControl);
275
//we change the state here
276
switch(m_currentWork.state)
290
HRESULT hr = mc->GetState(INFINITE, &s);
293
if (s == State_Stopped) {
294
emit stateReady(m_currentWork.graph, Phonon::StoppedState);
295
} else if (s == State_Paused) {
296
emit stateReady(m_currentWork.graph, Phonon::PausedState);
297
} else /*if (s == State_Running)*/ {
298
emit stateReady(m_currentWork.graph, Phonon::PlayingState);
304
QMutexLocker locker(&m_mutex);
305
m_currentWork = Work(); //reinitialize
309
void WorkerThread::abortCurrentRender(qint16 renderId)
311
QMutexLocker locker(&m_mutex);
312
if (m_currentWork.id == renderId) {
313
m_currentWork.graph->Abort();
316
for(int i = 0; !found && i < m_queue.size(); ++i) {
317
const Work &w = m_queue.at(i);
318
if (w.id == renderId) {
321
if (m_queue.isEmpty()) {
322
m_waitCondition.reset();
328
//tells the thread to stop processing
329
void WorkerThread::signalStop()
331
QMutexLocker locker(&m_mutex);
333
if (m_currentWork.graph) {
334
//in case we're currently rendering something
335
m_currentWork.graph->Abort();
340
m_waitCondition.set();
344
MediaObject::MediaObject(QObject *parent) : BackendNode(parent),
345
transactionState(Phonon::StoppedState),
346
m_errorType(Phonon::NoError),
347
m_state(Phonon::LoadingState),
348
m_nextState(Phonon::StoppedState),
352
m_oldHasVideo(false),
353
m_prefinishMarkSent(false),
354
m_aboutToFinishSent(false),
355
m_nextSourceReadyToStart(false),
356
#ifndef QT_NO_PHONON_MEDIACONTROLLER
357
m_autoplayTitles(true),
359
#endif //QT_NO_PHONON_MEDIACONTROLLER
360
m_targetTick(INFINITE)
363
for(int i = 0; i < FILTER_COUNT; ++i) {
364
m_graphs[i] = new MediaGraph(this, i);
367
connect(&m_thread, SIGNAL(stateReady(Graph,Phonon::State)),
368
SLOT(slotStateReady(Graph,Phonon::State)));
370
connect(&m_thread, SIGNAL(eventReady(Graph,long,long)),
371
SLOT(handleEvents(Graph,long,long)));
373
connect(&m_thread, SIGNAL(asyncRenderFinished(quint16,HRESULT,Graph)),
374
SLOT(finishLoading(quint16,HRESULT,Graph)));
376
connect(&m_thread, SIGNAL(asyncSeekingFinished(quint16,qint64)),
377
SLOT(finishSeeking(quint16,qint64)));
378
//really special case
379
m_mediaObject = this;
383
MediaObject::~MediaObject()
385
//be sure to finish the timer first
388
//we finish the worker thread here
389
m_thread.signalStop();
392
//and then we delete the graphs
393
for (int i = 0; i < FILTER_COUNT; ++i) {
398
WorkerThread *MediaObject::workerThread()
403
MediaGraph *MediaObject::currentGraph() const
408
MediaGraph *MediaObject::nextGraph() const
410
return m_graphs[FILTER_COUNT - 1];
413
//utility function to save the graph to a file
414
void MediaObject::timerEvent(QTimerEvent *e)
416
if (e->timerId() == m_tickTimer.timerId()) {
418
const qint64 current = currentTime();
419
const qint64 total = totalTime();
421
if ( m_tickInterval != 0 && current > m_targetTick) {
426
//check that the title hasn't changed
427
#ifndef QT_NO_PHONON_MEDIACONTROLLER
428
if (m_autoplayTitles && m_currentTitle < _iface_availableTitles() - 1) {
430
if (current >= total) {
431
//we go to the next title
432
_iface_setCurrentTitle(m_currentTitle + 1, false);
437
#endif //QT_NO_PHONON_MEDIACONTROLLER
440
const qint64 remaining = total - current;
442
if (m_transitionTime < 0 && m_nextSourceReadyToStart) {
443
if (remaining < -m_transitionTime + TIMER_INTERVAL/2) {
444
//we need to switch graphs to run the next source in the queue (with cross-fading)
445
switchToNextSource();
447
} else if (current < -m_transitionTime) {
448
//we are currently crossfading
449
for (int i = 0; i < m_audioOutputs.count(); ++i) {
450
m_audioOutputs.at(i)->setCrossFadingProgress( currentGraph()->index(), qMin( qreal(1.), qreal(current) / qreal(-m_transitionTime)));
455
if (m_prefinishMark > 0 && !m_prefinishMarkSent && remaining < m_prefinishMark + TIMER_INTERVAL/2) {
457
qDebug() << "DS9: emit prefinishMarkReached" << remaining << QTime::currentTime().toString();
459
m_prefinishMarkSent = true;
461
emit prefinishMarkReached( remaining );
464
if (!m_aboutToFinishSent && remaining < PRELOAD_TIME - m_transitionTime + TIMER_INTERVAL/2) {
465
//let's take a 2 seconds time to actually load the next file
467
qDebug() << "DS9: emit aboutToFinish" << remaining << QTime::currentTime().toString();
469
m_aboutToFinishSent = true;
470
emit aboutToFinish();
473
//total is 0: the stream is probably live (endless)
477
ComPointer<IAMNetworkStatus> status(currentGraph()->realSource(), IID_IAMNetworkStatus);
480
status->get_BufferingProgress(&l);
481
emit bufferStatus(l);
483
qDebug() << "emit bufferStatus(" << l << ")";
490
void MediaObject::switchToNextSource()
492
m_prefinishMarkSent = false;
493
m_aboutToFinishSent = false;
494
m_nextSourceReadyToStart = false;
496
m_oldHasVideo = currentGraph()->hasVideo();
498
qSwap(m_graphs[0], m_graphs[1]); //swap the graphs
500
if (m_transitionTime >= 0)
501
m_graphs[1]->stop(); //make sure we stop the previous graph
503
if (currentGraph()->mediaSource().type() != Phonon::MediaSource::Invalid &&
504
catchComError(currentGraph()->renderResult())) {
505
setState(Phonon::ErrorState);
509
//we need to play the next media
512
//we tell the video widgets to switch now to the new source
513
#ifndef QT_NO_PHONON_VIDEO
514
for (int i = 0; i < m_videoWidgets.count(); ++i) {
515
m_videoWidgets.at(i)->setCurrentGraph(currentGraph()->index());
517
#endif //QT_NO_PHONON_VIDEO
519
emit currentSourceChanged(currentGraph()->mediaSource());
520
emit metaDataChanged(currentGraph()->metadata());
522
if (nextGraph()->hasVideo() != currentGraph()->hasVideo()) {
523
emit hasVideoChanged(currentGraph()->hasVideo());
527
emit totalTimeChanged(totalTime());
529
#ifndef QT_NO_PHONON_MEDIACONTROLLER
530
setTitles(currentGraph()->titles());
531
#endif //QT_NO_PHONON_MEDIACONTROLLER
534
Phonon::State MediaObject::state() const
537
return Phonon::BufferingState;
543
bool MediaObject::hasVideo() const
545
return currentGraph()->hasVideo();
548
bool MediaObject::isSeekable() const
550
return currentGraph()->isSeekable();
553
qint64 MediaObject::totalTime() const
555
#ifndef QT_NO_PHONON_MEDIACONTROLLER
556
//1st, check if there is more titles after
557
const qint64 ret = (m_currentTitle < _iface_availableTitles() - 1) ?
558
titleAbsolutePosition(m_currentTitle+1) : currentGraph()->absoluteTotalTime();
560
//this is the duration of the current title
561
return ret - titleAbsolutePosition(m_currentTitle);
563
return currentGraph()->absoluteTotalTime();
564
#endif //QT_NO_PHONON_MEDIACONTROLLER
567
qint64 MediaObject::currentTime() const
569
//this handles inaccuracy when stopping on a title
570
return currentGraph()->absoluteCurrentTime()
571
#ifndef QT_NO_PHONON_MEDIACONTROLLER
572
- titleAbsolutePosition(m_currentTitle)
573
#endif //QT_NO_PHONON_MEDIACONTROLLER
577
qint32 MediaObject::tickInterval() const
579
return m_tickInterval;
582
void MediaObject::setTickInterval(qint32 newTickInterval)
584
m_tickInterval = newTickInterval;
588
void MediaObject::pause()
590
if (currentGraph()->isLoading()) {
591
m_nextState = Phonon::PausedState;
593
currentGraph()->pause();
597
void MediaObject::stop()
599
if (currentGraph()->isLoading()) {
600
m_nextState = Phonon::StoppedState;
602
currentGraph()->stop();
606
void MediaObject::ensureStopped()
608
currentGraph()->ensureStopped();
609
if (m_state == Phonon::ErrorState) {
610
//we reset the state here
611
m_state = Phonon::StoppedState;
615
void MediaObject::play()
617
if (currentGraph()->isLoading()) {
618
m_nextState = Phonon::PlayingState;
620
currentGraph()->play();
624
QString MediaObject::errorString() const
626
return m_errorString;
629
Phonon::ErrorType MediaObject::errorType() const
635
void MediaObject::updateTargetTick()
637
if (m_tickInterval) {
638
const qint64 current = currentTime();
639
m_targetTick = current / m_tickInterval * m_tickInterval;
640
if (current == 0 || m_targetTick < current) {
641
m_targetTick += m_tickInterval;
646
void MediaObject::setState(Phonon::State newstate)
648
if (newstate == Phonon::PlayingState) {
652
if (newstate == m_state) {
657
if (newstate == Phonon::PlayingState) {
658
m_tickTimer.start(TIMER_INTERVAL, this);
663
Phonon::State oldstate = state();
665
emit stateChanged(newstate, oldstate);
669
qint32 MediaObject::prefinishMark() const
671
return m_prefinishMark;
674
void MediaObject::setPrefinishMark(qint32 newPrefinishMark)
676
m_prefinishMark = newPrefinishMark;
679
qint32 MediaObject::transitionTime() const
681
return m_transitionTime;
684
void MediaObject::setTransitionTime(qint32 time)
686
m_transitionTime = time;
689
qint64 MediaObject::remainingTime() const
691
return totalTime() - currentTime();
695
Phonon::MediaSource MediaObject::source() const
697
return currentGraph()->mediaSource();
700
void MediaObject::setNextSource(const Phonon::MediaSource &source)
702
m_nextSourceReadyToStart = true;
703
const bool shouldSwitch = (m_state == Phonon::StoppedState || m_state == Phonon::ErrorState);
704
nextGraph()->loadSource(source); //let's preload the source
707
switchToNextSource();
711
void MediaObject::setSource(const Phonon::MediaSource &source)
713
m_nextSourceReadyToStart = false;
714
m_prefinishMarkSent = false;
715
m_aboutToFinishSent = false;
717
m_oldHasVideo = currentGraph()->hasVideo();
718
setState(Phonon::LoadingState);
719
//After loading we go into stopped state
720
m_nextState = Phonon::StoppedState;
721
catchComError(currentGraph()->loadSource(source));
722
emit currentSourceChanged(source);
725
void MediaObject::slotStateReady(Graph graph, Phonon::State newState)
727
if (graph == currentGraph()->graph() && !currentGraph()->isLoading()) {
732
void MediaObject::loadingFinished(MediaGraph *mg)
734
if (mg == currentGraph()) {
735
#ifndef QT_NO_PHONON_MEDIACONTROLLER
738
setTitles(currentGraph()->titles());
739
#endif //QT_NO_PHONON_MEDIACONTROLLER
741
HRESULT hr = mg->renderResult();
743
if (catchComError(hr)) {
747
if (m_oldHasVideo != currentGraph()->hasVideo()) {
748
emit hasVideoChanged(currentGraph()->hasVideo());
751
#ifndef QT_NO_PHONON_VIDEO
752
if (currentGraph()->hasVideo()) {
753
updateVideoGeometry();
755
#endif //QT_NO_PHONON_VIDEO
757
emit metaDataChanged(currentGraph()->metadata());
758
emit totalTimeChanged(totalTime());
760
//let's put the next state
763
case Phonon::PausedState:
766
case Phonon::PlayingState:
769
case Phonon::ErrorState:
770
setState(Phonon::ErrorState);
772
case Phonon::StoppedState:
780
void MediaObject::seek(qint64 time)
782
//we seek into the current title
783
currentGraph()->absoluteSeek(time
784
#ifndef QT_NO_PHONON_MEDIACONTROLLER
785
+ titleAbsolutePosition(m_currentTitle)
786
#endif //QT_NO_PHONON_MEDIACONTROLLER
790
void MediaObject::seekingFinished(MediaGraph *mg)
792
if (mg == currentGraph()) {
795
if (currentTime() < totalTime() - m_prefinishMark) {
796
m_prefinishMarkSent = false;
799
if (currentTime() < totalTime() - PRELOAD_TIME + m_transitionTime) {
800
m_aboutToFinishSent = false;
803
//this helps the update of the application (seekslider for example)
804
if (m_state == PausedState || m_state == PlayingState) {
805
emit tick(currentTime());
811
bool MediaObject::catchComError(HRESULT hr)
814
m_errorString.clear();
815
m_errorType = Phonon::NoError;
819
qWarning("an error occurred 0x%x",hr);
821
LPAMGETERRORTEXT getErrorText = (LPAMGETERRORTEXT)QLibrary::resolve(QLatin1String("quartz"), "AMGetErrorTextW");
823
WCHAR buffer[MAX_ERROR_TEXT_LEN];
824
if (getErrorText && getErrorText(hr, buffer, MAX_ERROR_TEXT_LEN)) {
825
m_errorString = QString::fromUtf16((const unsigned short*) buffer);
827
m_errorString = QString::fromLatin1("Unknown error");
829
const QString comError = QString::number(uint(hr), 16);
830
if (!m_errorString.toLower().contains(comError.toLower())) {
831
m_errorString += QString::fromLatin1(" (0x%1)").arg(comError);
834
m_errorType = Phonon::FatalError;
835
setState(Phonon::ErrorState);
837
m_errorType = Phonon::NormalError;
838
m_nextState = Phonon::ErrorState;
841
m_errorType = Phonon::NoError;
845
return m_errorType == Phonon::FatalError;
849
void MediaObject::grabNode(BackendNode *node)
851
for (int i = 0; i < FILTER_COUNT; ++i) {
852
m_graphs[i]->grabNode(node);
854
node->setMediaObject(this);
857
bool MediaObject::connectNodes(BackendNode *source, BackendNode *sink)
860
for (int i = 0; i < FILTER_COUNT; ++i) {
861
ret = ret && m_graphs[i]->connectNodes(source, sink);
864
#ifndef QT_NO_PHONON_VIDEO
865
if (VideoWidget *video = qobject_cast<VideoWidget*>(sink)) {
866
m_videoWidgets += video;
868
#endif //QT_NO_PHONON_VIDEO
869
if (AudioOutput *audio = qobject_cast<AudioOutput*>(sink)) {
870
m_audioOutputs += audio;
876
bool MediaObject::disconnectNodes(BackendNode *source, BackendNode *sink)
879
for (int i = 0; i < FILTER_COUNT; ++i) {
880
ret = ret && m_graphs[i]->disconnectNodes(source, sink);
883
#ifndef QT_NO_PHONON_VIDEO
884
if (VideoWidget *video = qobject_cast<VideoWidget*>(sink)) {
885
m_videoWidgets.removeOne(video);
887
#endif //QT_NO_PHONON_VIDEO
888
if (AudioOutput *audio = qobject_cast<AudioOutput*>(sink)) {
889
m_audioOutputs.removeOne(audio);
895
#ifndef QT_NO_PHONON_VIDEO
896
void MediaObject::updateVideoGeometry()
898
for (int i = 0; i < m_videoWidgets.count(); ++i) {
899
m_videoWidgets.at(i)->notifyVideoLoaded();
902
#endif //QT_NO_PHONON_VIDEO
904
void MediaObject::handleComplete(IGraphBuilder *graph)
906
if (graph == currentGraph()->graph()) {
907
if (m_transitionTime >= PRELOAD_TIME || m_aboutToFinishSent == false) {
908
emit aboutToFinish(); //give a chance to the frontend to give a next source
909
m_aboutToFinishSent = true;
912
if (!m_nextSourceReadyToStart) {
913
//this is the last source, we simply finish
914
const qint64 current = currentTime();
915
const OAFilterState currentState = currentGraph()->syncGetRealState();
917
emit tick(current); //this ensures that the end of the seek slider is reached
920
if (currentTime() == current && currentGraph()->syncGetRealState() == currentState) {
921
//no seek operation in-between
923
setState(Phonon::PausedState); //we set it here
926
} else if (m_transitionTime == 0) {
928
switchToNextSource(); //let's call the function immediately
929
} else if (m_transitionTime > 0) {
930
//management of the transition (if it is >= 0)
931
QTimer::singleShot(m_transitionTime, this, SLOT(switchToNextSource()));
934
//it is just the end of the previous source (in case of cross-fading)
935
nextGraph()->cleanup();
937
for (int i = 0; i < m_audioOutputs.count(); ++i) {
938
m_audioOutputs.at(i)->setCrossFadingProgress( currentGraph()->index(), 1.); //cross-fading is in any case finished
942
void MediaObject::finishLoading(quint16 workId, HRESULT hr, Graph graph)
944
for(int i = 0; i < FILTER_COUNT; ++i) {
945
m_graphs[i]->finishLoading(workId, hr, graph);
949
void MediaObject::finishSeeking(quint16 workId, qint64 time)
951
for(int i = 0; i < FILTER_COUNT; ++i) {
952
m_graphs[i]->finishSeeking(workId, time);
957
void MediaObject::handleEvents(Graph graph, long eventCode, long param1)
959
QString eventDescription;
962
case EC_BUFFERING_DATA:
963
if (graph == currentGraph()->graph()) {
964
m_buffering = param1;
965
emit stateChanged(state(), m_state);
968
case EC_LENGTH_CHANGED:
969
if (graph == currentGraph()->graph()) {
970
emit totalTimeChanged( totalTime() );
975
handleComplete(graph);
978
#ifndef QT_NO_PHONON_VIDEO
979
case EC_VIDEO_SIZE_CHANGED:
980
if (graph == currentGraph()->graph()) {
981
updateVideoGeometry();
984
#endif //QT_NO_PHONON_VIDEO
987
case EC_ACTIVATE: qDebug() << "EC_ACTIVATE: A video window is being " << (param1 ? "ACTIVATED" : "DEACTIVATED"); break;
988
case EC_BUILT: qDebug() << "EC_BUILT: Send by the Video Control when a graph has been built. Not forwarded to applications."; break;
989
case EC_CLOCK_CHANGED: qDebug() << "EC_CLOCK_CHANGED"; break;
990
case EC_CLOCK_UNSET: qDebug() << "EC_CLOCK_UNSET: The clock provider was disconnected."; break;
991
case EC_CODECAPI_EVENT: qDebug() << "EC_CODECAPI_EVENT: Sent by an encoder to signal an encoding event."; break;
992
case EC_DEVICE_LOST: qDebug() << "EC_DEVICE_LOST: A Plug and Play device was removed or has become available again."; break;
993
case EC_DISPLAY_CHANGED: qDebug() << "EC_DISPLAY_CHANGED: The display mode has changed."; break;
994
case EC_END_OF_SEGMENT: qDebug() << "EC_END_OF_SEGMENT: The end of a segment has been reached."; break;
995
case EC_ERROR_STILLPLAYING: qDebug() << "EC_ERROR_STILLPLAYING: An asynchronous command to run the graph has failed."; break;
996
case EC_ERRORABORT: qDebug() << "EC_ERRORABORT: An operation was aborted because of an error."; break;
997
case EC_EXTDEVICE_MODE_CHANGE: qDebug() << "EC_EXTDEVICE_MODE_CHANGE: Not supported."; break;
998
case EC_FULLSCREEN_LOST: qDebug() << "EC_FULLSCREEN_LOST: The video renderer is switching out of full-screen mode."; break;
999
case EC_GRAPH_CHANGED: qDebug() << "EC_GRAPH_CHANGED: The filter graph has changed."; break;
1000
case EC_NEED_RESTART: qDebug() << "EC_NEED_RESTART: A filter is requesting that the graph be restarted."; break;
1001
case EC_NOTIFY_WINDOW: qDebug() << "EC_NOTIFY_WINDOW: Notifies a filter of the video renderer's window."; break;
1002
case EC_OLE_EVENT: qDebug() << "EC_OLE_EVENT: A filter is passing a text string to the application."; break;
1003
case EC_OPENING_FILE: qDebug() << "EC_OPENING_FILE: The graph is opening a file, or has finished opening a file."; break;
1004
case EC_PALETTE_CHANGED: qDebug() << "EC_PALETTE_CHANGED: The video palette has changed."; break;
1005
case EC_PAUSED: qDebug() << "EC_PAUSED: A pause request has completed."; break;
1006
case EC_PREPROCESS_COMPLETE: qDebug() << "EC_PREPROCESS_COMPLETE: Sent by the WM ASF Writer filter when it completes the pre-processing for multipass encoding."; break;
1007
case EC_QUALITY_CHANGE: qDebug() << "EC_QUALITY_CHANGE: The graph is dropping samples, for quality control."; break;
1008
case EC_REPAINT: qDebug() << "EC_REPAINT: A video renderer requires a repaint."; break;
1009
case EC_SEGMENT_STARTED: qDebug() << "EC_SEGMENT_STARTED: A new segment has started."; break;
1010
case EC_SHUTTING_DOWN: qDebug() << "EC_SHUTTING_DOWN: The filter graph is shutting down, prior to being destroyed."; break;
1011
case EC_SNDDEV_IN_ERROR: qDebug() << "EC_SNDDEV_IN_ERROR: A device error has occurred in an audio capture filter."; break;
1012
case EC_SNDDEV_OUT_ERROR: qDebug() << "EC_SNDDEV_OUT_ERROR: A device error has occurred in an audio renderer filter."; break;
1013
case EC_STARVATION: qDebug() << "EC_STARVATION: A filter is not receiving enough data."; break;
1014
case EC_STATE_CHANGE: qDebug() << "EC_STATE_CHANGE: The filter graph has changed state."; break;
1015
case EC_STEP_COMPLETE: qDebug() << "EC_STEP_COMPLETE: A filter performing frame stepping has stepped the specified number of frames."; break;
1016
case EC_STREAM_CONTROL_STARTED: qDebug() << "EC_STREAM_CONTROL_STARTED: A stream-control start command has taken effect."; break;
1017
case EC_STREAM_CONTROL_STOPPED: qDebug() << "EC_STREAM_CONTROL_STOPPED: A stream-control stop command has taken effect."; break;
1018
case EC_STREAM_ERROR_STILLPLAYING: qDebug() << "EC_STREAM_ERROR_STILLPLAYING: An error has occurred in a stream. The stream is still playing."; break;
1019
case EC_STREAM_ERROR_STOPPED: qDebug() << "EC_STREAM_ERROR_STOPPED: A stream has stopped because of an error."; break;
1020
case EC_TIMECODE_AVAILABLE: qDebug() << "EC_TIMECODE_AVAILABLE: Not supported."; break;
1021
case EC_UNBUILT: qDebug() << "Sent by the Video Control when a graph has been torn down. Not forwarded to applications."; break;
1022
case EC_USERABORT: qDebug() << "EC_USERABORT: Send by the Video Control when a graph has been torn down. Not forwarded to applications."; break;
1023
case EC_VMR_RECONNECTION_FAILED: qDebug() << "EC_VMR_RECONNECTION_FAILED: Sent by the VMR-7 and the VMR-9 when it was unable to accept a dynamic format change request from the upstream decoder."; break;
1024
case EC_VMR_RENDERDEVICE_SET: qDebug() << "EC_VMR_RENDERDEVICE_SET: Sent when the VMR has selected its rendering mechanism."; break;
1025
case EC_VMR_SURFACE_FLIPPED: qDebug() << "EC_VMR_SURFACE_FLIPPED: Sent when the VMR-7's allocator presenter has called the DirectDraw Flip method on the surface being presented."; break;
1026
case EC_WINDOW_DESTROYED: qDebug() << "EC_WINDOW_DESTROYED: The video renderer was destroyed or removed from the graph"; break;
1027
case EC_WMT_EVENT: qDebug() << "EC_WMT_EVENT: Sent by the Windows Media Format SDK when an application uses the ASF Reader filter to play ASF files protected by digital rights management (DRM)."; break;
1028
case EC_WMT_INDEX_EVENT: qDebug() << "EC_WMT_INDEX_EVENT: Sent by the Windows Media Format SDK when an application uses the ASF Writer to index Windows Media Video files."; break;
1030
//documented by Microsoft but not supported in the Platform SDK
1031
// case EC_BANDWIDTHCHANGE : qDebug() << "EC_BANDWIDTHCHANGE: not supported"; break;
1032
// case EC_CONTENTPROPERTY_CHANGED: qDebug() << "EC_CONTENTPROPERTY_CHANGED: not supported."; break;
1033
// case EC_EOS_SOON: qDebug() << "EC_EOS_SOON: not supported"; break;
1034
// case EC_ERRORABORTEX: qDebug() << "EC_ERRORABORTEX: An operation was aborted because of an error."; break;
1035
// case EC_FILE_CLOSED: qDebug() << "EC_FILE_CLOSED: The source file was closed because of an unexpected event."; break;
1036
// case EC_LOADSTATUS: qDebug() << "EC_LOADSTATUS: Notifies the application of progress when opening a network file."; break;
1037
// case EC_MARKER_HIT: qDebug() << "EC_MARKER_HIT: not supported."; break;
1038
// case EC_NEW_PIN: qDebug() << "EC_NEW_PIN: not supported."; break;
1039
// case EC_PLEASE_REOPEN: qDebug() << "EC_PLEASE_REOPEN: The source file has changed."; break;
1040
// case EC_PROCESSING_LATENCY: qDebug() << "EC_PROCESSING_LATENCY: Indicates the amount of time that a component is taking to process each sample."; break;
1041
// case EC_RENDER_FINISHED: qDebug() << "EC_RENDER_FINISHED: Not supported."; break;
1042
// case EC_SAMPLE_LATENCY: qDebug() << "EC_SAMPLE_LATENCY: Specifies how far behind schedule a component is for processing samples."; break;
1043
// case EC_SAMPLE_NEEDED: qDebug() << "EC_SAMPLE_NEEDED: Requests a new input sample from the Enhanced Video Renderer (EVR) filter."; break;
1044
// case EC_SCRUB_TIME: qDebug() << "EC_SCRUB_TIME: Specifies the time stamp for the most recent frame step."; break;
1045
// case EC_STATUS: qDebug() << "EC_STATUS: Contains two arbitrary status strings."; break;
1046
// case EC_VIDEOFRAMEREADY: qDebug() << "EC_VIDEOFRAMEREADY: A video frame is ready for display."; break;
1049
qDebug() << "Unknown event" << eventCode << "(" << param1 << ")";
1059
#ifndef QT_NO_PHONON_MEDIACONTROLLER
1060
//interface management
1061
bool MediaObject::hasInterface(Interface iface) const
1063
return iface == AddonInterface::TitleInterface;
1066
QVariant MediaObject::interfaceCall(Interface iface, int command, const QList<QVariant> ¶ms)
1068
if (hasInterface(iface)) {
1072
case TitleInterface:
1075
case availableTitles:
1076
return _iface_availableTitles();
1078
return _iface_currentTitle();
1080
_iface_setCurrentTitle(params.first().toInt());
1082
case autoplayTitles:
1083
return m_autoplayTitles;
1084
case setAutoplayTitles:
1085
m_autoplayTitles = params.first().toBool();
1086
updateStopPosition();
1100
//this is called to set the time for the different titles
1101
qint64 MediaObject::titleAbsolutePosition(int title) const
1103
if (title >= 0 && title < m_titles.count()) {
1104
return m_titles.at(title);
1110
void MediaObject::setTitles(const QList<qint64> &titles)
1112
//this is called when the source is loaded
1113
const bool emitSignal = m_titles.count() != titles.count();
1116
emit availableTitlesChanged(titles.count());
1118
updateStopPosition();
1122
int MediaObject::_iface_availableTitles() const
1124
return m_titles.count() - 1;
1127
int MediaObject::_iface_currentTitle() const
1129
return m_currentTitle;
1132
void MediaObject::_iface_setCurrentTitle(int title, bool bseek)
1135
qDebug() << "_iface_setCurrentTitle" << title;
1137
const int oldTitle = m_currentTitle;
1138
m_currentTitle = title;
1139
updateStopPosition();
1141
//let's seek to the beginning of the song
1146
if (oldTitle != title) {
1147
emit titleChanged(title);
1148
emit totalTimeChanged(totalTime());
1153
void MediaObject::updateStopPosition()
1155
if (!m_autoplayTitles && m_currentTitle < _iface_availableTitles() - 1) {
1156
//stop position is set to the end of the track
1157
currentGraph()->setStopPosition(titleAbsolutePosition(m_currentTitle+1));
1159
//stop position is set to the end
1160
currentGraph()->setStopPosition(-1);
1163
#endif //QT_NO_PHONON_QT_NO_PHONON_MEDIACONTROLLER
1165
void MediaObject::switchFilters(int index, Filter oldFilter, Filter newFilter)
1167
if (currentGraph()->index() == index) {
1168
currentGraph()->switchFilters(oldFilter, newFilter);
1170
nextGraph()->switchFilters(oldFilter, newFilter);
1181
#include "moc_mediaobject.cpp"