Qt 6.x
The Qt SDK
Loading...
Searching...
No Matches
evrcustompresenter.cpp
Go to the documentation of this file.
1// Copyright (C) 2016 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
5
7#include "evrhelpers_p.h"
8#include <private/qwindowsmultimediautils_p.h>
9#include <private/qplatformvideosink_p.h>
10#include <private/qwindowsmfdefs_p.h>
11
12#include <rhi/qrhi.h>
13
14#include <QtCore/qmutex.h>
15#include <QtCore/qvarlengtharray.h>
16#include <QtCore/qrect.h>
17#include <qthread.h>
18#include <qcoreapplication.h>
19#include <qmath.h>
20#include <QtCore/qdebug.h>
21
22#include <mutex>
23
24#include <float.h>
25#include <evcode.h>
26
28
29const static MFRatio g_DefaultFrameRate = { 30, 1 };
30static const DWORD SCHEDULER_TIMEOUT = 5000;
31static const MFTIME ONE_SECOND = 10000000;
32static const LONG ONE_MSEC = 1000;
33
34// Function declarations.
35static HRESULT setDesiredSampleTime(IMFSample *sample, const LONGLONG& hnsSampleTime, const LONGLONG& hnsDuration);
36static HRESULT clearDesiredSampleTime(IMFSample *sample);
37static HRESULT setMixerSourceRect(IMFTransform *mixer, const MFVideoNormalizedRect& nrcSource);
39
40static inline LONG MFTimeToMsec(const LONGLONG& time)
41{
42 return (LONG)(time / (ONE_SECOND / ONE_MSEC));
43}
44
45bool qt_evr_setCustomPresenter(IUnknown *evr, EVRCustomPresenter *presenter)
46{
47 if (!evr || !presenter)
48 return false;
49
50 HRESULT result = E_FAIL;
51
52 IMFVideoRenderer *renderer = NULL;
53 if (SUCCEEDED(evr->QueryInterface(IID_PPV_ARGS(&renderer)))) {
54 result = renderer->InitializeRenderer(NULL, presenter);
55 renderer->Release();
56 }
57
58 return result == S_OK;
59}
60
62{
63public:
65 : QEvent(QEvent::Type(EVRCustomPresenter::PresentSample))
66 , m_sample(sample)
67 {
68 if (m_sample)
69 m_sample->AddRef();
70 }
71
73 {
74 if (m_sample)
75 m_sample->Release();
76 }
77
78 IMFSample *sample() const { return m_sample; }
79
80private:
81 IMFSample *m_sample;
82};
83
85 : m_presenter(presenter)
86 , m_clock(NULL)
87 , m_threadID(0)
88 , m_schedulerThread(0)
89 , m_threadReadyEvent(0)
90 , m_flushEvent(0)
91 , m_playbackRate(1.0f)
92 , m_perFrameInterval(0)
93 , m_perFrame_1_4th(0)
94 , m_lastSampleTime(0)
95{
96}
97
99{
100 qt_evr_safe_release(&m_clock);
101 for (int i = 0; i < m_scheduledSamples.size(); ++i)
102 m_scheduledSamples[i]->Release();
103 m_scheduledSamples.clear();
104}
105
106void Scheduler::setFrameRate(const MFRatio& fps)
107{
108 UINT64 AvgTimePerFrame = 0;
109
110 // Convert to a duration.
111 MFFrameRateToAverageTimePerFrame(fps.Numerator, fps.Denominator, &AvgTimePerFrame);
112
113 m_perFrameInterval = (MFTIME)AvgTimePerFrame;
114
115 // Calculate 1/4th of this value, because we use it frequently.
116 m_perFrame_1_4th = m_perFrameInterval / 4;
117}
118
120{
121 if (m_schedulerThread)
122 return E_UNEXPECTED;
123
124 HRESULT hr = S_OK;
125 DWORD dwID = 0;
126 HANDLE hObjects[2];
127 DWORD dwWait = 0;
128
129 if (m_clock)
130 m_clock->Release();
131 m_clock = clock;
132 if (m_clock)
133 m_clock->AddRef();
134
135 // Set a high the timer resolution (ie, short timer period).
136 timeBeginPeriod(1);
137
138 // Create an event to wait for the thread to start.
139 m_threadReadyEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
140 if (!m_threadReadyEvent) {
141 hr = HRESULT_FROM_WIN32(GetLastError());
142 goto done;
143 }
144
145 // Create an event to wait for flush commands to complete.
146 m_flushEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
147 if (!m_flushEvent) {
148 hr = HRESULT_FROM_WIN32(GetLastError());
149 goto done;
150 }
151
152 // Create the scheduler thread.
153 m_schedulerThread = CreateThread(NULL, 0, schedulerThreadProc, (LPVOID)this, 0, &dwID);
154 if (!m_schedulerThread) {
155 hr = HRESULT_FROM_WIN32(GetLastError());
156 goto done;
157 }
158
159 // Wait for the thread to signal the "thread ready" event.
160 hObjects[0] = m_threadReadyEvent;
161 hObjects[1] = m_schedulerThread;
162 dwWait = WaitForMultipleObjects(2, hObjects, FALSE, INFINITE); // Wait for EITHER of these handles.
163 if (WAIT_OBJECT_0 != dwWait) {
164 // The thread terminated early for some reason. This is an error condition.
165 CloseHandle(m_schedulerThread);
166 m_schedulerThread = NULL;
167
168 hr = E_UNEXPECTED;
169 goto done;
170 }
171
172 m_threadID = dwID;
173
174done:
175 // Regardless success/failure, we are done using the "thread ready" event.
176 if (m_threadReadyEvent) {
177 CloseHandle(m_threadReadyEvent);
178 m_threadReadyEvent = NULL;
179 }
180 return hr;
181}
182
184{
185 if (!m_schedulerThread)
186 return S_OK;
187
188 // Ask the scheduler thread to exit.
189 PostThreadMessage(m_threadID, Terminate, 0, 0);
190
191 // Wait for the thread to exit.
192 WaitForSingleObject(m_schedulerThread, INFINITE);
193
194 // Close handles.
195 CloseHandle(m_schedulerThread);
196 m_schedulerThread = NULL;
197
198 CloseHandle(m_flushEvent);
199 m_flushEvent = NULL;
200
201 // Discard samples.
202 m_mutex.lock();
203 for (int i = 0; i < m_scheduledSamples.size(); ++i)
204 m_scheduledSamples[i]->Release();
205 m_scheduledSamples.clear();
206 m_mutex.unlock();
207
208 // Restore the timer resolution.
209 timeEndPeriod(1);
210
211 return S_OK;
212}
213
215{
216 if (m_schedulerThread) {
217 // Ask the scheduler thread to flush.
218 PostThreadMessage(m_threadID, Flush, 0 , 0);
219
220 // Wait for the scheduler thread to signal the flush event,
221 // OR for the thread to terminate.
222 HANDLE objects[] = { m_flushEvent, m_schedulerThread };
223
224 WaitForMultipleObjects(ARRAYSIZE(objects), objects, FALSE, SCHEDULER_TIMEOUT);
225 }
226
227 return S_OK;
228}
229
231{
232 QMutexLocker locker(&m_mutex);
233 return m_scheduledSamples.count() > 0;
234}
235
236HRESULT Scheduler::scheduleSample(IMFSample *sample, bool presentNow)
237{
238 if (!m_schedulerThread)
239 return MF_E_NOT_INITIALIZED;
240
241 HRESULT hr = S_OK;
242 DWORD dwExitCode = 0;
243
244 GetExitCodeThread(m_schedulerThread, &dwExitCode);
245 if (dwExitCode != STILL_ACTIVE)
246 return E_FAIL;
247
248 if (presentNow || !m_clock) {
249 m_presenter->presentSample(sample);
250 } else {
251 // Queue the sample and ask the scheduler thread to wake up.
252 m_mutex.lock();
253 sample->AddRef();
254 m_scheduledSamples.enqueue(sample);
255 m_mutex.unlock();
256
257 if (SUCCEEDED(hr))
258 PostThreadMessage(m_threadID, Schedule, 0, 0);
259 }
260
261 return hr;
262}
263
265{
266 HRESULT hr = S_OK;
267 LONG wait = 0;
268 IMFSample *sample = NULL;
269
270 // Process samples until the queue is empty or until the wait time > 0.
271 while (!m_scheduledSamples.isEmpty()) {
272 m_mutex.lock();
273 sample = m_scheduledSamples.dequeue();
274 m_mutex.unlock();
275
276 // Process the next sample in the queue. If the sample is not ready
277 // for presentation. the value returned in wait is > 0, which
278 // means the scheduler should sleep for that amount of time.
279
280 hr = processSample(sample, &wait);
281 qt_evr_safe_release(&sample);
282
283 if (FAILED(hr) || wait > 0)
284 break;
285 }
286
287 // If the wait time is zero, it means we stopped because the queue is
288 // empty (or an error occurred). Set the wait time to infinite; this will
289 // make the scheduler thread sleep until it gets another thread message.
290 if (wait == 0)
291 wait = INFINITE;
292
293 *nextSleep = wait;
294 return hr;
295}
296
297HRESULT Scheduler::processSample(IMFSample *sample, LONG *pNextSleep)
298{
299 HRESULT hr = S_OK;
300
301 LONGLONG hnsPresentationTime = 0;
302 LONGLONG hnsTimeNow = 0;
303 MFTIME hnsSystemTime = 0;
304
305 bool presentNow = true;
306 LONG nextSleep = 0;
307
308 if (m_clock) {
309 // Get the sample's time stamp. It is valid for a sample to
310 // have no time stamp.
311 hr = sample->GetSampleTime(&hnsPresentationTime);
312
313 // Get the clock time. (But if the sample does not have a time stamp,
314 // we don't need the clock time.)
315 if (SUCCEEDED(hr))
316 hr = m_clock->GetCorrelatedTime(0, &hnsTimeNow, &hnsSystemTime);
317
318 // Calculate the time until the sample's presentation time.
319 // A negative value means the sample is late.
320 LONGLONG hnsDelta = hnsPresentationTime - hnsTimeNow;
321 if (m_playbackRate < 0) {
322 // For reverse playback, the clock runs backward. Therefore, the
323 // delta is reversed.
324 hnsDelta = - hnsDelta;
325 }
326
327 if (hnsDelta < - m_perFrame_1_4th) {
328 // This sample is late.
329 presentNow = true;
330 } else if (hnsDelta > (3 * m_perFrame_1_4th)) {
331 // This sample is still too early. Go to sleep.
332 nextSleep = MFTimeToMsec(hnsDelta - (3 * m_perFrame_1_4th));
333
334 // Adjust the sleep time for the clock rate. (The presentation clock runs
335 // at m_fRate, but sleeping uses the system clock.)
336 if (m_playbackRate != 0)
337 nextSleep = (LONG)(nextSleep / qFabs(m_playbackRate));
338
339 // Don't present yet.
340 presentNow = false;
341 }
342 }
343
344 if (presentNow) {
345 m_presenter->presentSample(sample);
346 } else {
347 // The sample is not ready yet. Return it to the queue.
348 m_mutex.lock();
349 sample->AddRef();
350 m_scheduledSamples.prepend(sample);
351 m_mutex.unlock();
352 }
353
354 *pNextSleep = nextSleep;
355
356 return hr;
357}
358
359DWORD WINAPI Scheduler::schedulerThreadProc(LPVOID parameter)
360{
361 Scheduler* scheduler = reinterpret_cast<Scheduler*>(parameter);
362 if (!scheduler)
363 return -1;
364 return scheduler->schedulerThreadProcPrivate();
365}
366
367DWORD Scheduler::schedulerThreadProcPrivate()
368{
369 HRESULT hr = S_OK;
370 MSG msg;
371 LONG wait = INFINITE;
372 bool exitThread = false;
373
374 // Force the system to create a message queue for this thread.
375 // (See MSDN documentation for PostThreadMessage.)
376 PeekMessage(&msg, NULL, WM_USER, WM_USER, PM_NOREMOVE);
377
378 // Signal to the scheduler that the thread is ready.
379 SetEvent(m_threadReadyEvent);
380
381 while (!exitThread) {
382 // Wait for a thread message OR until the wait time expires.
383 DWORD result = MsgWaitForMultipleObjects(0, NULL, FALSE, wait, QS_POSTMESSAGE);
384
385 if (result == WAIT_TIMEOUT) {
386 // If we timed out, then process the samples in the queue
387 hr = processSamplesInQueue(&wait);
388 if (FAILED(hr))
389 exitThread = true;
390 }
391
392 while (PeekMessage(&msg, NULL, 0, 0, PM_REMOVE)) {
393 bool processSamples = true;
394
395 switch (msg.message) {
396 case Terminate:
397 exitThread = true;
398 break;
399 case Flush:
400 // Flushing: Clear the sample queue and set the event.
401 m_mutex.lock();
402 for (int i = 0; i < m_scheduledSamples.size(); ++i)
403 m_scheduledSamples[i]->Release();
404 m_scheduledSamples.clear();
405 m_mutex.unlock();
406 wait = INFINITE;
407 SetEvent(m_flushEvent);
408 break;
409 case Schedule:
410 // Process as many samples as we can.
411 if (processSamples) {
412 hr = processSamplesInQueue(&wait);
413 if (FAILED(hr))
414 exitThread = true;
415 processSamples = (wait != (LONG)INFINITE);
416 }
417 break;
418 }
419 }
420
421 }
422
423 return (SUCCEEDED(hr) ? 0 : 1);
424}
425
426
428 : m_initialized(false)
429{
430}
431
433{
434 clear();
435}
436
438{
439 QMutexLocker locker(&m_mutex);
440
441 if (!m_initialized)
442 return MF_E_NOT_INITIALIZED;
443
444 if (m_videoSampleQueue.isEmpty())
445 return MF_E_SAMPLEALLOCATOR_EMPTY;
446
447 // Get a sample from the allocated queue.
448
449 // It doesn't matter if we pull them from the head or tail of the list,
450 // but when we get it back, we want to re-insert it onto the opposite end.
451 // (see ReturnSample)
452
453 IMFSample *taken = m_videoSampleQueue.takeFirst();
454
455 // Give the sample to the caller.
456 *sample = taken;
457 (*sample)->AddRef();
458
459 taken->Release();
460
461 return S_OK;
462}
463
465{
466 QMutexLocker locker(&m_mutex);
467
468 if (!m_initialized)
469 return MF_E_NOT_INITIALIZED;
470
471 m_videoSampleQueue.append(sample);
472 sample->AddRef();
473
474 return S_OK;
475}
476
478{
479 QMutexLocker locker(&m_mutex);
480
481 if (m_initialized)
482 return MF_E_INVALIDREQUEST;
483
484 // Move these samples into our allocated queue.
485 for (auto sample : std::as_const(samples)) {
486 sample->AddRef();
487 m_videoSampleQueue.append(sample);
488 }
489
490 m_initialized = true;
491
492 for (auto sample : std::as_const(samples))
493 sample->Release();
494 samples.clear();
495 return S_OK;
496}
497
499{
500 QMutexLocker locker(&m_mutex);
501
502 for (auto sample : std::as_const(m_videoSampleQueue))
503 sample->Release();
504 m_videoSampleQueue.clear();
505 m_initialized = false;
506
507 return S_OK;
508}
509
510
512 : QObject()
513 , m_sampleFreeCB(this, &EVRCustomPresenter::onSampleFree)
514 , m_refCount(1)
515 , m_renderState(RenderShutdown)
516 , m_scheduler(this)
517 , m_tokenCounter(0)
518 , m_sampleNotify(false)
519 , m_repaint(false)
520 , m_prerolled(false)
521 , m_endStreaming(false)
522 , m_playbackRate(1.0f)
523 , m_presentEngine(new D3DPresentEngine(sink))
524 , m_clock(0)
525 , m_mixer(0)
526 , m_mediaEventSink(0)
527 , m_mediaType(0)
528 , m_videoSink(0)
529 , m_canRenderToSurface(false)
530 , m_positionOffset(0)
531{
532 // Initial source rectangle = (0,0,1,1)
533 m_sourceRect.top = 0;
534 m_sourceRect.left = 0;
535 m_sourceRect.bottom = 1;
536 m_sourceRect.right = 1;
537
538 setSink(sink);
539}
540
542{
543 m_scheduler.flush();
544 m_scheduler.stopScheduler();
545 m_samplePool.clear();
546
547 qt_evr_safe_release(&m_clock);
548 qt_evr_safe_release(&m_mixer);
549 qt_evr_safe_release(&m_mediaEventSink);
550 qt_evr_safe_release(&m_mediaType);
551
552 delete m_presentEngine;
553}
554
556{
557 if (!ppvObject)
558 return E_POINTER;
559 if (riid == IID_IMFGetService) {
560 *ppvObject = static_cast<IMFGetService*>(this);
561 } else if (riid == IID_IMFTopologyServiceLookupClient) {
562 *ppvObject = static_cast<IMFTopologyServiceLookupClient*>(this);
563 } else if (riid == IID_IMFVideoDeviceID) {
564 *ppvObject = static_cast<IMFVideoDeviceID*>(this);
565 } else if (riid == IID_IMFVideoPresenter) {
566 *ppvObject = static_cast<IMFVideoPresenter*>(this);
567 } else if (riid == IID_IMFRateSupport) {
568 *ppvObject = static_cast<IMFRateSupport*>(this);
569 } else if (riid == IID_IUnknown) {
570 *ppvObject = static_cast<IUnknown*>(static_cast<IMFGetService*>(this));
571 } else if (riid == IID_IMFClockStateSink) {
572 *ppvObject = static_cast<IMFClockStateSink*>(this);
573 } else {
574 *ppvObject = NULL;
575 return E_NOINTERFACE;
576 }
577 AddRef();
578 return S_OK;
579}
580
581ULONG EVRCustomPresenter::AddRef()
582{
583 return InterlockedIncrement(&m_refCount);
584}
585
586ULONG EVRCustomPresenter::Release()
587{
588 ULONG uCount = InterlockedDecrement(&m_refCount);
589 if (uCount == 0)
590 deleteLater();
591 return uCount;
592}
593
594HRESULT EVRCustomPresenter::GetService(REFGUID guidService, REFIID riid, LPVOID *ppvObject)
595{
596 HRESULT hr = S_OK;
597
598 if (!ppvObject)
599 return E_POINTER;
600
601 // The only service GUID that we support is MR_VIDEO_RENDER_SERVICE.
602 if (guidService != MR_VIDEO_RENDER_SERVICE)
603 return MF_E_UNSUPPORTED_SERVICE;
604
605 // First try to get the service interface from the D3DPresentEngine object.
606 hr = m_presentEngine->getService(guidService, riid, ppvObject);
607 if (FAILED(hr))
608 // Next, check if this object supports the interface.
609 hr = QueryInterface(riid, ppvObject);
610
611 return hr;
612}
613
615{
616 if (!deviceID)
617 return E_POINTER;
618
619 *deviceID = IID_IDirect3DDevice9;
620
621 return S_OK;
622}
623
624HRESULT EVRCustomPresenter::InitServicePointers(IMFTopologyServiceLookup *lookup)
625{
626 if (!lookup)
627 return E_POINTER;
628
629 HRESULT hr = S_OK;
630 DWORD objectCount = 0;
631
632 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
633
634 // Do not allow initializing when playing or paused.
635 if (isActive())
636 return MF_E_INVALIDREQUEST;
637
638 qt_evr_safe_release(&m_clock);
639 qt_evr_safe_release(&m_mixer);
640 qt_evr_safe_release(&m_mediaEventSink);
641
642 // Ask for the clock. Optional, because the EVR might not have a clock.
643 objectCount = 1;
644
645 lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0,
646 MR_VIDEO_RENDER_SERVICE, IID_PPV_ARGS(&m_clock),
647 &objectCount
648 );
649
650 // Ask for the mixer. (Required.)
651 objectCount = 1;
652
653 hr = lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0,
654 MR_VIDEO_MIXER_SERVICE, IID_PPV_ARGS(&m_mixer),
655 &objectCount
656 );
657
658 if (FAILED(hr))
659 return hr;
660
661 // Make sure that we can work with this mixer.
662 hr = configureMixer(m_mixer);
663 if (FAILED(hr))
664 return hr;
665
666 // Ask for the EVR's event-sink interface. (Required.)
667 objectCount = 1;
668
669 hr = lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0,
670 MR_VIDEO_RENDER_SERVICE, IID_PPV_ARGS(&m_mediaEventSink),
671 &objectCount
672 );
673
674 if (SUCCEEDED(hr))
675 m_renderState = RenderStopped;
676
677 return hr;
678}
679
681{
682 // Enter the shut-down state.
683 m_mutex.lock();
684
685 m_renderState = RenderShutdown;
686
687 m_mutex.unlock();
688
689 // Flush any samples that were scheduled.
690 flush();
691
692 // Clear the media type and release related resources.
693 setMediaType(NULL);
694
695 // Release all services that were acquired from InitServicePointers.
696 qt_evr_safe_release(&m_clock);
697 qt_evr_safe_release(&m_mixer);
698 qt_evr_safe_release(&m_mediaEventSink);
699
700 return S_OK;
701}
702
704{
705 return m_presentEngine->isValid() && m_canRenderToSurface;
706}
707
709{
710 HRESULT hr = S_OK;
711
712 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
713
714 hr = checkShutdown();
715 if (FAILED(hr))
716 return hr;
717
718 switch (message) {
719 // Flush all pending samples.
720 case MFVP_MESSAGE_FLUSH:
721 hr = flush();
722 break;
723
724 // Renegotiate the media type with the mixer.
725 case MFVP_MESSAGE_INVALIDATEMEDIATYPE:
726 hr = renegotiateMediaType();
727 break;
728
729 // The mixer received a new input sample.
730 case MFVP_MESSAGE_PROCESSINPUTNOTIFY:
731 hr = processInputNotify();
732 break;
733
734 // Streaming is about to start.
735 case MFVP_MESSAGE_BEGINSTREAMING:
736 hr = beginStreaming();
737 break;
738
739 // Streaming has ended. (The EVR has stopped.)
740 case MFVP_MESSAGE_ENDSTREAMING:
741 hr = endStreaming();
742 break;
743
744 // All input streams have ended.
745 case MFVP_MESSAGE_ENDOFSTREAM:
746 // Set the EOS flag.
747 m_endStreaming = true;
748 // Check if it's time to send the EC_COMPLETE event to the EVR.
749 hr = checkEndOfStream();
750 break;
751
752 // Frame-stepping is starting.
753 case MFVP_MESSAGE_STEP:
754 hr = prepareFrameStep(DWORD(param));
755 break;
756
757 // Cancels frame-stepping.
758 case MFVP_MESSAGE_CANCELSTEP:
759 hr = cancelFrameStep();
760 break;
761
762 default:
763 hr = E_INVALIDARG; // Unknown message. This case should never occur.
764 break;
765 }
766
767 return hr;
768}
769
771{
772 HRESULT hr = S_OK;
773
774 if (!mediaType)
775 return E_POINTER;
776
777 *mediaType = NULL;
778
779 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
780
781 hr = checkShutdown();
782 if (FAILED(hr))
783 return hr;
784
785 if (!m_mediaType)
786 return MF_E_NOT_INITIALIZED;
787
788 return m_mediaType->QueryInterface(IID_PPV_ARGS(mediaType));
789}
790
791HRESULT EVRCustomPresenter::OnClockStart(MFTIME, LONGLONG clockStartOffset)
792{
793 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
794
795 // We cannot start after shutdown.
796 HRESULT hr = checkShutdown();
797 if (FAILED(hr))
798 return hr;
799
800 // Check if the clock is already active (not stopped).
801 if (isActive()) {
802 m_renderState = RenderStarted;
803
804 // If the clock position changes while the clock is active, it
805 // is a seek request. We need to flush all pending samples.
806 if (clockStartOffset != QMM_PRESENTATION_CURRENT_POSITION)
807 flush();
808 } else {
809 m_renderState = RenderStarted;
810
811 // The clock has started from the stopped state.
812
813 // Possibly we are in the middle of frame-stepping OR have samples waiting
814 // in the frame-step queue. Deal with these two cases first:
815 hr = startFrameStep();
816 if (FAILED(hr))
817 return hr;
818 }
819
820 // Now try to get new output samples from the mixer.
821 processOutputLoop();
822
823 return hr;
824}
825
827{
828 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
829
830 HRESULT hr = checkShutdown();
831 if (FAILED(hr))
832 return hr;
833
834 // The EVR calls OnClockRestart only while paused.
835
836 m_renderState = RenderStarted;
837
838 // Possibly we are in the middle of frame-stepping OR we have samples waiting
839 // in the frame-step queue. Deal with these two cases first:
840 hr = startFrameStep();
841 if (FAILED(hr))
842 return hr;
843
844 // Now resume the presentation loop.
845 processOutputLoop();
846
847 return hr;
848}
849
851{
852 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
853
854 HRESULT hr = checkShutdown();
855 if (FAILED(hr))
856 return hr;
857
858 if (m_renderState != RenderStopped) {
859 m_renderState = RenderStopped;
860 flush();
861
862 // If we are in the middle of frame-stepping, cancel it now.
863 if (m_frameStep.state != FrameStepNone)
864 cancelFrameStep();
865 }
866
867 return S_OK;
868}
869
871{
872 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
873
874 // We cannot pause the clock after shutdown.
875 HRESULT hr = checkShutdown();
876
877 if (SUCCEEDED(hr))
878 m_renderState = RenderPaused;
879
880 return hr;
881}
882
884{
885 // Note:
886 // The presenter reports its maximum rate through the IMFRateSupport interface.
887 // Here, we assume that the EVR honors the maximum rate.
888
889 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
890
891 HRESULT hr = checkShutdown();
892 if (FAILED(hr))
893 return hr;
894
895 // If the rate is changing from zero (scrubbing) to non-zero, cancel the
896 // frame-step operation.
897 if ((m_playbackRate == 0.0f) && (rate != 0.0f)) {
898 cancelFrameStep();
899 for (auto sample : std::as_const(m_frameStep.samples))
900 sample->Release();
901 m_frameStep.samples.clear();
902 }
903
904 m_playbackRate = rate;
905
906 // Tell the scheduler about the new rate.
907 m_scheduler.setClockRate(rate);
908
909 return S_OK;
910}
911
912HRESULT EVRCustomPresenter::GetSlowestRate(MFRATE_DIRECTION, BOOL, float *rate)
913{
914 if (!rate)
915 return E_POINTER;
916
917 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
918
919 HRESULT hr = checkShutdown();
920
921 if (SUCCEEDED(hr)) {
922 // There is no minimum playback rate, so the minimum is zero.
923 *rate = 0;
924 }
925
926 return S_OK;
927}
928
929HRESULT EVRCustomPresenter::GetFastestRate(MFRATE_DIRECTION direction, BOOL thin, float *rate)
930{
931 if (!rate)
932 return E_POINTER;
933
934 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
935
936 float maxRate = 0.0f;
937
938 HRESULT hr = checkShutdown();
939 if (FAILED(hr))
940 return hr;
941
942 // Get the maximum *forward* rate.
943 maxRate = getMaxRate(thin);
944
945 // For reverse playback, it's the negative of maxRate.
946 if (direction == MFRATE_REVERSE)
947 maxRate = -maxRate;
948
949 *rate = maxRate;
950
951 return S_OK;
952}
953
954HRESULT EVRCustomPresenter::IsRateSupported(BOOL thin, float rate, float *nearestSupportedRate)
955{
956 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
957
958 float maxRate = 0.0f;
959 float nearestRate = rate; // If we support rate, that is the nearest.
960
961 HRESULT hr = checkShutdown();
962 if (FAILED(hr))
963 return hr;
964
965 // Find the maximum forward rate.
966 // Note: We have no minimum rate (that is, we support anything down to 0).
967 maxRate = getMaxRate(thin);
968
969 if (qFabs(rate) > maxRate) {
970 // The (absolute) requested rate exceeds the maximum rate.
971 hr = MF_E_UNSUPPORTED_RATE;
972
973 // The nearest supported rate is maxRate.
974 nearestRate = maxRate;
975 if (rate < 0) {
976 // Negative for reverse playback.
977 nearestRate = -nearestRate;
978 }
979 }
980
981 // Return the nearest supported rate.
982 if (nearestSupportedRate)
983 *nearestSupportedRate = nearestRate;
984
985 return hr;
986}
987
989{
990 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
991
992 m_canRenderToSurface = false;
993
994 // check if we can render to the surface (compatible formats)
995 if (m_videoSink) {
996 for (int f = 0; f < QVideoFrameFormat::NPixelFormats; ++f) {
997 // ### set a better preference order
999 if (SUCCEEDED(m_presentEngine->checkFormat(qt_evr_D3DFormatFromPixelFormat(format)))) {
1000 m_canRenderToSurface = true;
1001 break;
1002 }
1003 }
1004 }
1005
1006 // TODO: if media type already set, renegotiate?
1007}
1008
1010{
1011 m_mutex.lock();
1012 m_videoSink = sink;
1013 m_presentEngine->setSink(sink);
1014 m_mutex.unlock();
1015
1017}
1018
1020{
1021 m_mutex.lock();
1022 m_cropRect = cropRect;
1023 m_mutex.unlock();
1024}
1025
1026HRESULT EVRCustomPresenter::configureMixer(IMFTransform *mixer)
1027{
1028 // Set the zoom rectangle (ie, the source clipping rectangle).
1029 return setMixerSourceRect(mixer, m_sourceRect);
1030}
1031
1032HRESULT EVRCustomPresenter::renegotiateMediaType()
1033{
1034 HRESULT hr = S_OK;
1035 bool foundMediaType = false;
1036
1037 IMFMediaType *mixerType = NULL;
1038 IMFMediaType *optimalType = NULL;
1039
1040 if (!m_mixer)
1041 return MF_E_INVALIDREQUEST;
1042
1043 // Loop through all of the mixer's proposed output types.
1044 DWORD typeIndex = 0;
1045 while (!foundMediaType && (hr != MF_E_NO_MORE_TYPES)) {
1046 qt_evr_safe_release(&mixerType);
1047 qt_evr_safe_release(&optimalType);
1048
1049 // Step 1. Get the next media type supported by mixer.
1050 hr = m_mixer->GetOutputAvailableType(0, typeIndex++, &mixerType);
1051 if (FAILED(hr))
1052 break;
1053
1054 // From now on, if anything in this loop fails, try the next type,
1055 // until we succeed or the mixer runs out of types.
1056
1057 // Step 2. Check if we support this media type.
1058 if (SUCCEEDED(hr))
1059 hr = isMediaTypeSupported(mixerType);
1060
1061 // Step 3. Adjust the mixer's type to match our requirements.
1062 if (SUCCEEDED(hr))
1063 hr = createOptimalVideoType(mixerType, &optimalType);
1064
1065 // Step 4. Check if the mixer will accept this media type.
1066 if (SUCCEEDED(hr))
1067 hr = m_mixer->SetOutputType(0, optimalType, MFT_SET_TYPE_TEST_ONLY);
1068
1069 // Step 5. Try to set the media type on ourselves.
1070 if (SUCCEEDED(hr))
1071 hr = setMediaType(optimalType);
1072
1073 // Step 6. Set output media type on mixer.
1074 if (SUCCEEDED(hr)) {
1075 hr = m_mixer->SetOutputType(0, optimalType, 0);
1076
1077 // If something went wrong, clear the media type.
1078 if (FAILED(hr))
1079 setMediaType(NULL);
1080 }
1081
1082 if (SUCCEEDED(hr))
1083 foundMediaType = true;
1084 }
1085
1086 qt_evr_safe_release(&mixerType);
1087 qt_evr_safe_release(&optimalType);
1088
1089 return hr;
1090}
1091
1092HRESULT EVRCustomPresenter::flush()
1093{
1094 m_prerolled = false;
1095
1096 // The scheduler might have samples that are waiting for
1097 // their presentation time. Tell the scheduler to flush.
1098
1099 // This call blocks until the scheduler threads discards all scheduled samples.
1100 m_scheduler.flush();
1101
1102 // Flush the frame-step queue.
1103 for (auto sample : std::as_const(m_frameStep.samples))
1104 sample->Release();
1105 m_frameStep.samples.clear();
1106
1107 if (m_renderState == RenderStopped && m_videoSink) {
1108 // Repaint with black.
1109 presentSample(NULL);
1110 }
1111
1112 return S_OK;
1113}
1114
1115HRESULT EVRCustomPresenter::processInputNotify()
1116{
1117 HRESULT hr = S_OK;
1118
1119 // Set the flag that says the mixer has a new sample.
1120 m_sampleNotify = true;
1121
1122 if (!m_mediaType) {
1123 // We don't have a valid media type yet.
1124 hr = MF_E_TRANSFORM_TYPE_NOT_SET;
1125 } else {
1126 // Try to process an output sample.
1127 processOutputLoop();
1128 }
1129 return hr;
1130}
1131
1132HRESULT EVRCustomPresenter::beginStreaming()
1133{
1134 HRESULT hr = S_OK;
1135
1136 // Start the scheduler thread.
1137 hr = m_scheduler.startScheduler(m_clock);
1138
1139 return hr;
1140}
1141
1142HRESULT EVRCustomPresenter::endStreaming()
1143{
1144 HRESULT hr = S_OK;
1145
1146 // Stop the scheduler thread.
1147 hr = m_scheduler.stopScheduler();
1148
1149 return hr;
1150}
1151
1152HRESULT EVRCustomPresenter::checkEndOfStream()
1153{
1154 if (!m_endStreaming) {
1155 // The EVR did not send the MFVP_MESSAGE_ENDOFSTREAM message.
1156 return S_OK;
1157 }
1158
1159 if (m_sampleNotify) {
1160 // The mixer still has input.
1161 return S_OK;
1162 }
1163
1164 if (m_scheduler.areSamplesScheduled()) {
1165 // Samples are still scheduled for rendering.
1166 return S_OK;
1167 }
1168
1169 // Everything is complete. Now we can tell the EVR that we are done.
1170 notifyEvent(EC_COMPLETE, (LONG_PTR)S_OK, 0);
1171 m_endStreaming = false;
1172
1173 stopSurface();
1174 return S_OK;
1175}
1176
1177HRESULT EVRCustomPresenter::prepareFrameStep(DWORD steps)
1178{
1179 HRESULT hr = S_OK;
1180
1181 // Cache the step count.
1182 m_frameStep.steps += steps;
1183
1184 // Set the frame-step state.
1185 m_frameStep.state = FrameStepWaitingStart;
1186
1187 // If the clock is are already running, we can start frame-stepping now.
1188 // Otherwise, we will start when the clock starts.
1189 if (m_renderState == RenderStarted)
1190 hr = startFrameStep();
1191
1192 return hr;
1193}
1194
1195HRESULT EVRCustomPresenter::startFrameStep()
1196{
1197 HRESULT hr = S_OK;
1198 IMFSample *sample = NULL;
1199
1200 if (m_frameStep.state == FrameStepWaitingStart) {
1201 // We have a frame-step request, and are waiting for the clock to start.
1202 // Set the state to "pending," which means we are waiting for samples.
1203 m_frameStep.state = FrameStepPending;
1204
1205 // If the frame-step queue already has samples, process them now.
1206 while (!m_frameStep.samples.isEmpty() && (m_frameStep.state == FrameStepPending)) {
1207 sample = m_frameStep.samples.takeFirst();
1208
1209 hr = deliverFrameStepSample(sample);
1210 if (FAILED(hr))
1211 goto done;
1212
1213 qt_evr_safe_release(&sample);
1214
1215 // We break from this loop when:
1216 // (a) the frame-step queue is empty, or
1217 // (b) the frame-step operation is complete.
1218 }
1219 } else if (m_frameStep.state == FrameStepNone) {
1220 // We are not frame stepping. Therefore, if the frame-step queue has samples,
1221 // we need to process them normally.
1222 while (!m_frameStep.samples.isEmpty()) {
1223 sample = m_frameStep.samples.takeFirst();
1224
1225 hr = deliverSample(sample, false);
1226 if (FAILED(hr))
1227 goto done;
1228
1229 qt_evr_safe_release(&sample);
1230 }
1231 }
1232
1233done:
1234 qt_evr_safe_release(&sample);
1235 return hr;
1236}
1237
1238HRESULT EVRCustomPresenter::completeFrameStep(IMFSample *sample)
1239{
1240 HRESULT hr = S_OK;
1241 MFTIME sampleTime = 0;
1242 MFTIME systemTime = 0;
1243
1244 // Update our state.
1245 m_frameStep.state = FrameStepComplete;
1246 m_frameStep.sampleNoRef = 0;
1247
1248 // Notify the EVR that the frame-step is complete.
1249 notifyEvent(EC_STEP_COMPLETE, FALSE, 0); // FALSE = completed (not cancelled)
1250
1251 // If we are scrubbing (rate == 0), also send the "scrub time" event.
1252 if (isScrubbing()) {
1253 // Get the time stamp from the sample.
1254 hr = sample->GetSampleTime(&sampleTime);
1255 if (FAILED(hr)) {
1256 // No time stamp. Use the current presentation time.
1257 if (m_clock)
1258 m_clock->GetCorrelatedTime(0, &sampleTime, &systemTime);
1259
1260 hr = S_OK; // (Not an error condition.)
1261 }
1262
1263 notifyEvent(EC_SCRUB_TIME, DWORD(sampleTime), DWORD(((sampleTime) >> 32) & 0xffffffff));
1264 }
1265 return hr;
1266}
1267
1268HRESULT EVRCustomPresenter::cancelFrameStep()
1269{
1270 FrameStepState oldState = m_frameStep.state;
1271
1272 m_frameStep.state = FrameStepNone;
1273 m_frameStep.steps = 0;
1274 m_frameStep.sampleNoRef = 0;
1275 // Don't clear the frame-step queue yet, because we might frame step again.
1276
1277 if (oldState > FrameStepNone && oldState < FrameStepComplete) {
1278 // We were in the middle of frame-stepping when it was cancelled.
1279 // Notify the EVR.
1280 notifyEvent(EC_STEP_COMPLETE, TRUE, 0); // TRUE = cancelled
1281 }
1282 return S_OK;
1283}
1284
1285HRESULT EVRCustomPresenter::createOptimalVideoType(IMFMediaType *proposedType, IMFMediaType **optimalType)
1286{
1287 HRESULT hr = S_OK;
1288
1289 RECT rcOutput;
1290 ZeroMemory(&rcOutput, sizeof(rcOutput));
1291
1292 MFVideoArea displayArea;
1293 ZeroMemory(&displayArea, sizeof(displayArea));
1294
1295 IMFMediaType *mtOptimal = NULL;
1296
1297 UINT64 size;
1298 int width;
1299 int height;
1300
1301 // Clone the proposed type.
1302
1303 hr = MFCreateMediaType(&mtOptimal);
1304 if (FAILED(hr))
1305 goto done;
1306
1307 hr = proposedType->CopyAllItems(mtOptimal);
1308 if (FAILED(hr))
1309 goto done;
1310
1311 // Modify the new type.
1312
1313 hr = proposedType->GetUINT64(MF_MT_FRAME_SIZE, &size);
1314 width = int(HI32(size));
1315 height = int(LO32(size));
1316
1317 if (m_cropRect.isValid()) {
1318 rcOutput.left = m_cropRect.x();
1319 rcOutput.top = m_cropRect.y();
1320 rcOutput.right = m_cropRect.x() + m_cropRect.width();
1321 rcOutput.bottom = m_cropRect.y() + m_cropRect.height();
1322
1323 m_sourceRect.left = float(m_cropRect.x()) / width;
1324 m_sourceRect.top = float(m_cropRect.y()) / height;
1325 m_sourceRect.right = float(m_cropRect.x() + m_cropRect.width()) / width;
1326 m_sourceRect.bottom = float(m_cropRect.y() + m_cropRect.height()) / height;
1327
1328 if (m_mixer)
1329 configureMixer(m_mixer);
1330 } else {
1331 rcOutput.left = 0;
1332 rcOutput.top = 0;
1333 rcOutput.right = width;
1334 rcOutput.bottom = height;
1335 }
1336
1337 // Set the geometric aperture, and disable pan/scan.
1338 displayArea = qt_evr_makeMFArea(0, 0, rcOutput.right - rcOutput.left,
1339 rcOutput.bottom - rcOutput.top);
1340
1341 hr = mtOptimal->SetUINT32(MF_MT_PAN_SCAN_ENABLED, FALSE);
1342 if (FAILED(hr))
1343 goto done;
1344
1345 hr = mtOptimal->SetBlob(MF_MT_GEOMETRIC_APERTURE, reinterpret_cast<UINT8*>(&displayArea),
1346 sizeof(displayArea));
1347 if (FAILED(hr))
1348 goto done;
1349
1350 // Set the pan/scan aperture and the minimum display aperture. We don't care
1351 // about them per se, but the mixer will reject the type if these exceed the
1352 // frame dimentions.
1353 hr = mtOptimal->SetBlob(MF_MT_PAN_SCAN_APERTURE, reinterpret_cast<UINT8*>(&displayArea),
1354 sizeof(displayArea));
1355 if (FAILED(hr))
1356 goto done;
1357
1358 hr = mtOptimal->SetBlob(MF_MT_MINIMUM_DISPLAY_APERTURE, reinterpret_cast<UINT8*>(&displayArea),
1359 sizeof(displayArea));
1360 if (FAILED(hr))
1361 goto done;
1362
1363 // Return the pointer to the caller.
1364 *optimalType = mtOptimal;
1365 (*optimalType)->AddRef();
1366
1367done:
1368 qt_evr_safe_release(&mtOptimal);
1369 return hr;
1370
1371}
1372
1373HRESULT EVRCustomPresenter::setMediaType(IMFMediaType *mediaType)
1374{
1375 // Note: mediaType can be NULL (to clear the type)
1376
1377 // Clearing the media type is allowed in any state (including shutdown).
1378 if (!mediaType) {
1379 stopSurface();
1380 qt_evr_safe_release(&m_mediaType);
1381 releaseResources();
1382 return S_OK;
1383 }
1384
1385 MFRatio fps = { 0, 0 };
1386 QList<IMFSample*> sampleQueue;
1387
1388 // Cannot set the media type after shutdown.
1389 HRESULT hr = checkShutdown();
1390 if (FAILED(hr))
1391 goto done;
1392
1393 // Check if the new type is actually different.
1394 // Note: This function safely handles NULL input parameters.
1395 if (qt_evr_areMediaTypesEqual(m_mediaType, mediaType))
1396 goto done; // Nothing more to do.
1397
1398 // We're really changing the type. First get rid of the old type.
1399 qt_evr_safe_release(&m_mediaType);
1400 releaseResources();
1401
1402 // Initialize the presenter engine with the new media type.
1403 // The presenter engine allocates the samples.
1404
1405 hr = m_presentEngine->createVideoSamples(mediaType, sampleQueue, m_cropRect.size());
1406 if (FAILED(hr))
1407 goto done;
1408
1409 // Mark each sample with our token counter. If this batch of samples becomes
1410 // invalid, we increment the counter, so that we know they should be discarded.
1411 for (auto sample : std::as_const(sampleQueue)) {
1412 hr = sample->SetUINT32(MFSamplePresenter_SampleCounter, m_tokenCounter);
1413 if (FAILED(hr))
1414 goto done;
1415 }
1416
1417 // Add the samples to the sample pool.
1418 hr = m_samplePool.initialize(sampleQueue);
1419 if (FAILED(hr))
1420 goto done;
1421
1422 // Set the frame rate on the scheduler.
1423 if (SUCCEEDED(qt_evr_getFrameRate(mediaType, &fps)) && (fps.Numerator != 0) && (fps.Denominator != 0)) {
1424 m_scheduler.setFrameRate(fps);
1425 } else {
1426 // NOTE: The mixer's proposed type might not have a frame rate, in which case
1427 // we'll use an arbitrary default. (Although it's unlikely the video source
1428 // does not have a frame rate.)
1429 m_scheduler.setFrameRate(g_DefaultFrameRate);
1430 }
1431
1432 // Store the media type.
1433 m_mediaType = mediaType;
1434 m_mediaType->AddRef();
1435
1436 startSurface();
1437
1438done:
1439 if (FAILED(hr))
1440 releaseResources();
1441 return hr;
1442}
1443
1444HRESULT EVRCustomPresenter::isMediaTypeSupported(IMFMediaType *proposed)
1445{
1446 D3DFORMAT d3dFormat = D3DFMT_UNKNOWN;
1447 BOOL compressed = FALSE;
1448 MFVideoInterlaceMode interlaceMode = MFVideoInterlace_Unknown;
1449 MFVideoArea videoCropArea;
1450 UINT32 width = 0, height = 0;
1451
1452 // Validate the format.
1453 HRESULT hr = qt_evr_getFourCC(proposed, reinterpret_cast<DWORD*>(&d3dFormat));
1454 if (FAILED(hr))
1455 return hr;
1456
1458 if (pixelFormat == QVideoFrameFormat::Format_Invalid)
1459 return MF_E_INVALIDMEDIATYPE;
1460
1461 // Reject compressed media types.
1462 hr = proposed->IsCompressedFormat(&compressed);
1463 if (FAILED(hr))
1464 return hr;
1465
1466 if (compressed)
1467 return MF_E_INVALIDMEDIATYPE;
1468
1469 // The D3DPresentEngine checks whether surfaces can be created using this format
1470 hr = m_presentEngine->checkFormat(d3dFormat);
1471 if (FAILED(hr))
1472 return hr;
1473
1474 // Reject interlaced formats.
1475 hr = proposed->GetUINT32(MF_MT_INTERLACE_MODE, reinterpret_cast<UINT32*>(&interlaceMode));
1476 if (FAILED(hr))
1477 return hr;
1478
1479 if (interlaceMode != MFVideoInterlace_Progressive)
1480 return MF_E_INVALIDMEDIATYPE;
1481
1482 hr = MFGetAttributeSize(proposed, MF_MT_FRAME_SIZE, &width, &height);
1483 if (FAILED(hr))
1484 return hr;
1485
1486 // Validate the various apertures (cropping regions) against the frame size.
1487 // Any of these apertures may be unspecified in the media type, in which case
1488 // we ignore it. We just want to reject invalid apertures.
1489
1490 if (SUCCEEDED(proposed->GetBlob(MF_MT_PAN_SCAN_APERTURE,
1491 reinterpret_cast<UINT8*>(&videoCropArea),
1492 sizeof(videoCropArea), nullptr))) {
1493 hr = qt_evr_validateVideoArea(videoCropArea, width, height);
1494 }
1495 if (SUCCEEDED(proposed->GetBlob(MF_MT_GEOMETRIC_APERTURE,
1496 reinterpret_cast<UINT8*>(&videoCropArea),
1497 sizeof(videoCropArea), nullptr))) {
1498 hr = qt_evr_validateVideoArea(videoCropArea, width, height);
1499 }
1500 if (SUCCEEDED(proposed->GetBlob(MF_MT_MINIMUM_DISPLAY_APERTURE,
1501 reinterpret_cast<UINT8*>(&videoCropArea),
1502 sizeof(videoCropArea), nullptr))) {
1503 hr = qt_evr_validateVideoArea(videoCropArea, width, height);
1504 }
1505 return hr;
1506}
1507
1508void EVRCustomPresenter::processOutputLoop()
1509{
1510 HRESULT hr = S_OK;
1511
1512 // Process as many samples as possible.
1513 while (hr == S_OK) {
1514 // If the mixer doesn't have a new input sample, break from the loop.
1515 if (!m_sampleNotify) {
1516 hr = MF_E_TRANSFORM_NEED_MORE_INPUT;
1517 break;
1518 }
1519
1520 // Try to process a sample.
1521 hr = processOutput();
1522
1523 // NOTE: ProcessOutput can return S_FALSE to indicate it did not
1524 // process a sample. If so, break out of the loop.
1525 }
1526
1527 if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
1528 // The mixer has run out of input data. Check for end-of-stream.
1529 checkEndOfStream();
1530 }
1531}
1532
1533HRESULT EVRCustomPresenter::processOutput()
1534{
1535 HRESULT hr = S_OK;
1536 DWORD status = 0;
1537 LONGLONG mixerStartTime = 0, mixerEndTime = 0;
1538 MFTIME systemTime = 0;
1539 BOOL repaint = m_repaint; // Temporarily store this state flag.
1540
1541 MFT_OUTPUT_DATA_BUFFER dataBuffer;
1542 ZeroMemory(&dataBuffer, sizeof(dataBuffer));
1543
1544 IMFSample *sample = NULL;
1545
1546 // If the clock is not running, we present the first sample,
1547 // and then don't present any more until the clock starts.
1548
1549 if ((m_renderState != RenderStarted) && !m_repaint && m_prerolled)
1550 return S_FALSE;
1551
1552 // Make sure we have a pointer to the mixer.
1553 if (!m_mixer)
1554 return MF_E_INVALIDREQUEST;
1555
1556 // Try to get a free sample from the video sample pool.
1557 hr = m_samplePool.getSample(&sample);
1558 if (hr == MF_E_SAMPLEALLOCATOR_EMPTY) // No free samples. Try again when a sample is released.
1559 return S_FALSE;
1560 if (FAILED(hr))
1561 return hr;
1562
1563 // From now on, we have a valid video sample pointer, where the mixer will
1564 // write the video data.
1565
1566 if (m_repaint) {
1567 // Repaint request. Ask the mixer for the most recent sample.
1568 setDesiredSampleTime(sample, m_scheduler.lastSampleTime(), m_scheduler.frameDuration());
1569
1570 m_repaint = false; // OK to clear this flag now.
1571 } else {
1572 // Not a repaint request. Clear the desired sample time; the mixer will
1573 // give us the next frame in the stream.
1574 clearDesiredSampleTime(sample);
1575
1576 if (m_clock) {
1577 // Latency: Record the starting time for ProcessOutput.
1578 m_clock->GetCorrelatedTime(0, &mixerStartTime, &systemTime);
1579 }
1580 }
1581
1582 // Now we are ready to get an output sample from the mixer.
1583 dataBuffer.dwStreamID = 0;
1584 dataBuffer.pSample = sample;
1585 dataBuffer.dwStatus = 0;
1586
1587 hr = m_mixer->ProcessOutput(0, 1, &dataBuffer, &status);
1588
1589 if (FAILED(hr)) {
1590 // Return the sample to the pool.
1591 HRESULT hr2 = m_samplePool.returnSample(sample);
1592 if (FAILED(hr2)) {
1593 hr = hr2;
1594 goto done;
1595 }
1596 // Handle some known error codes from ProcessOutput.
1597 if (hr == MF_E_TRANSFORM_TYPE_NOT_SET) {
1598 // The mixer's format is not set. Negotiate a new format.
1599 hr = renegotiateMediaType();
1600 } else if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
1601 // There was a dynamic media type change. Clear our media type.
1602 setMediaType(NULL);
1603 } else if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
1604 // The mixer needs more input.
1605 // We have to wait for the mixer to get more input.
1606 m_sampleNotify = false;
1607 }
1608 } else {
1609 // We got an output sample from the mixer.
1610
1611 if (m_clock && !repaint) {
1612 // Latency: Record the ending time for the ProcessOutput operation,
1613 // and notify the EVR of the latency.
1614
1615 m_clock->GetCorrelatedTime(0, &mixerEndTime, &systemTime);
1616
1617 LONGLONG latencyTime = mixerEndTime - mixerStartTime;
1618 notifyEvent(EC_PROCESSING_LATENCY, reinterpret_cast<LONG_PTR>(&latencyTime), 0);
1619 }
1620
1621 // Set up notification for when the sample is released.
1622 hr = trackSample(sample);
1623 if (FAILED(hr))
1624 goto done;
1625
1626 // Schedule the sample.
1627 if ((m_frameStep.state == FrameStepNone) || repaint) {
1628 hr = deliverSample(sample, repaint);
1629 if (FAILED(hr))
1630 goto done;
1631 } else {
1632 // We are frame-stepping (and this is not a repaint request).
1633 hr = deliverFrameStepSample(sample);
1634 if (FAILED(hr))
1635 goto done;
1636 }
1637
1638 m_prerolled = true; // We have presented at least one sample now.
1639 }
1640
1641done:
1642 qt_evr_safe_release(&sample);
1643
1644 // Important: Release any events returned from the ProcessOutput method.
1645 qt_evr_safe_release(&dataBuffer.pEvents);
1646 return hr;
1647}
1648
1649HRESULT EVRCustomPresenter::deliverSample(IMFSample *sample, bool repaint)
1650{
1651 // If we are not actively playing, OR we are scrubbing (rate = 0) OR this is a
1652 // repaint request, then we need to present the sample immediately. Otherwise,
1653 // schedule it normally.
1654
1655 bool presentNow = ((m_renderState != RenderStarted) || isScrubbing() || repaint);
1656
1657 HRESULT hr = m_scheduler.scheduleSample(sample, presentNow);
1658
1659 if (FAILED(hr)) {
1660 // Notify the EVR that we have failed during streaming. The EVR will notify the
1661 // pipeline.
1662
1663 notifyEvent(EC_ERRORABORT, hr, 0);
1664 }
1665
1666 return hr;
1667}
1668
1669HRESULT EVRCustomPresenter::deliverFrameStepSample(IMFSample *sample)
1670{
1671 HRESULT hr = S_OK;
1672 IUnknown *unk = NULL;
1673
1674 // For rate 0, discard any sample that ends earlier than the clock time.
1675 if (isScrubbing() && m_clock && qt_evr_isSampleTimePassed(m_clock, sample)) {
1676 // Discard this sample.
1677 } else if (m_frameStep.state >= FrameStepScheduled) {
1678 // A frame was already submitted. Put this sample on the frame-step queue,
1679 // in case we are asked to step to the next frame. If frame-stepping is
1680 // cancelled, this sample will be processed normally.
1681 sample->AddRef();
1682 m_frameStep.samples.append(sample);
1683 } else {
1684 // We're ready to frame-step.
1685
1686 // Decrement the number of steps.
1687 if (m_frameStep.steps > 0)
1688 m_frameStep.steps--;
1689
1690 if (m_frameStep.steps > 0) {
1691 // This is not the last step. Discard this sample.
1692 } else if (m_frameStep.state == FrameStepWaitingStart) {
1693 // This is the right frame, but the clock hasn't started yet. Put the
1694 // sample on the frame-step queue. When the clock starts, the sample
1695 // will be processed.
1696 sample->AddRef();
1697 m_frameStep.samples.append(sample);
1698 } else {
1699 // This is the right frame *and* the clock has started. Deliver this sample.
1700 hr = deliverSample(sample, false);
1701 if (FAILED(hr))
1702 goto done;
1703
1704 // Query for IUnknown so that we can identify the sample later.
1705 // Per COM rules, an object always returns the same pointer when QI'ed for IUnknown.
1706 hr = sample->QueryInterface(IID_PPV_ARGS(&unk));
1707 if (FAILED(hr))
1708 goto done;
1709
1710 m_frameStep.sampleNoRef = reinterpret_cast<DWORD_PTR>(unk); // No add-ref.
1711
1712 // NOTE: We do not AddRef the IUnknown pointer, because that would prevent the
1713 // sample from invoking the OnSampleFree callback after the sample is presented.
1714 // We use this IUnknown pointer purely to identify the sample later; we never
1715 // attempt to dereference the pointer.
1716
1717 m_frameStep.state = FrameStepScheduled;
1718 }
1719 }
1720done:
1721 qt_evr_safe_release(&unk);
1722 return hr;
1723}
1724
1725HRESULT EVRCustomPresenter::trackSample(IMFSample *sample)
1726{
1727 IMFTrackedSample *tracked = NULL;
1728
1729 HRESULT hr = sample->QueryInterface(IID_PPV_ARGS(&tracked));
1730
1731 if (SUCCEEDED(hr))
1732 hr = tracked->SetAllocator(&m_sampleFreeCB, NULL);
1733
1734 qt_evr_safe_release(&tracked);
1735 return hr;
1736}
1737
1738void EVRCustomPresenter::releaseResources()
1739{
1740 // Increment the token counter to indicate that all existing video samples
1741 // are "stale." As these samples get released, we'll dispose of them.
1742 //
1743 // Note: The token counter is required because the samples are shared
1744 // between more than one thread, and they are returned to the presenter
1745 // through an asynchronous callback (onSampleFree). Without the token, we
1746 // might accidentally re-use a stale sample after the ReleaseResources
1747 // method returns.
1748
1749 m_tokenCounter++;
1750
1751 flush();
1752
1753 m_samplePool.clear();
1754
1755 m_presentEngine->releaseResources();
1756}
1757
1758HRESULT EVRCustomPresenter::onSampleFree(IMFAsyncResult *result)
1759{
1760 IUnknown *object = NULL;
1761 IMFSample *sample = NULL;
1762 IUnknown *unk = NULL;
1763 UINT32 token;
1764
1765 // Get the sample from the async result object.
1766 HRESULT hr = result->GetObject(&object);
1767 if (FAILED(hr))
1768 goto done;
1769
1770 hr = object->QueryInterface(IID_PPV_ARGS(&sample));
1771 if (FAILED(hr))
1772 goto done;
1773
1774 // If this sample was submitted for a frame-step, the frame step operation
1775 // is complete.
1776
1777 if (m_frameStep.state == FrameStepScheduled) {
1778 // Query the sample for IUnknown and compare it to our cached value.
1779 hr = sample->QueryInterface(IID_PPV_ARGS(&unk));
1780 if (FAILED(hr))
1781 goto done;
1782
1783 if (m_frameStep.sampleNoRef == reinterpret_cast<DWORD_PTR>(unk)) {
1784 // Notify the EVR.
1785 hr = completeFrameStep(sample);
1786 if (FAILED(hr))
1787 goto done;
1788 }
1789
1790 // Note: Although object is also an IUnknown pointer, it is not
1791 // guaranteed to be the exact pointer value returned through
1792 // QueryInterface. Therefore, the second QueryInterface call is
1793 // required.
1794 }
1795
1796 m_mutex.lock();
1797
1798 token = MFGetAttributeUINT32(sample, MFSamplePresenter_SampleCounter, (UINT32)-1);
1799
1800 if (token == m_tokenCounter) {
1801 // Return the sample to the sample pool.
1802 hr = m_samplePool.returnSample(sample);
1803 if (SUCCEEDED(hr)) {
1804 // A free sample is available. Process more data if possible.
1805 processOutputLoop();
1806 }
1807 }
1808
1809 m_mutex.unlock();
1810
1811done:
1812 if (FAILED(hr))
1813 notifyEvent(EC_ERRORABORT, hr, 0);
1814 qt_evr_safe_release(&object);
1815 qt_evr_safe_release(&sample);
1816 qt_evr_safe_release(&unk);
1817 return hr;
1818}
1819
1820float EVRCustomPresenter::getMaxRate(bool thin)
1821{
1822 // Non-thinned:
1823 // If we have a valid frame rate and a monitor refresh rate, the maximum
1824 // playback rate is equal to the refresh rate. Otherwise, the maximum rate
1825 // is unbounded (FLT_MAX).
1826
1827 // Thinned: The maximum rate is unbounded.
1828
1829 float maxRate = FLT_MAX;
1830 MFRatio fps = { 0, 0 };
1831 UINT monitorRateHz = 0;
1832
1833 if (!thin && m_mediaType) {
1834 qt_evr_getFrameRate(m_mediaType, &fps);
1835 monitorRateHz = m_presentEngine->refreshRate();
1836
1837 if (fps.Denominator && fps.Numerator && monitorRateHz) {
1838 // Max Rate = Refresh Rate / Frame Rate
1839 maxRate = (float)MulDiv(monitorRateHz, fps.Denominator, fps.Numerator);
1840 }
1841 }
1842
1843 return maxRate;
1844}
1845
1847{
1848 switch (int(e->type())) {
1849 case StartSurface:
1850 startSurface();
1851 return true;
1852 case StopSurface:
1853 stopSurface();
1854 return true;
1855 case PresentSample:
1856 presentSample(static_cast<PresentSampleEvent *>(e)->sample());
1857 return true;
1858 default:
1859 break;
1860 }
1861 return QObject::event(e);
1862}
1863
1865{
1866 if (thread() != QThread::currentThread()) {
1868 return;
1869 }
1870}
1871
1873{
1874 if (thread() != QThread::currentThread()) {
1876 return;
1877 }
1878}
1879
1881{
1882 if (thread() != QThread::currentThread()) {
1884 return;
1885 }
1886
1887 if (!m_videoSink || !m_presentEngine->videoSurfaceFormat().isValid())
1888 return;
1889
1890 QVideoFrame frame = m_presentEngine->makeVideoFrame(sample);
1891
1892 // Since start/end times are related to a position when the clock is started,
1893 // to have times from the beginning, need to adjust it by adding seeked position.
1894 if (m_positionOffset) {
1895 if (frame.startTime())
1896 frame.setStartTime(frame.startTime() + m_positionOffset);
1897 if (frame.endTime())
1898 frame.setEndTime(frame.endTime() + m_positionOffset);
1899 }
1900
1901 ComPtr<IMFMediaType> inputStreamType;
1902 if (SUCCEEDED(m_mixer->GetInputCurrentType(0, inputStreamType.GetAddressOf()))) {
1903 auto rotation = static_cast<MFVideoRotationFormat>(MFGetAttributeUINT32(inputStreamType.Get(), MF_MT_VIDEO_ROTATION, 0));
1904 switch (rotation) {
1905 case MFVideoRotationFormat_0: frame.setRotationAngle(QVideoFrame::Rotation0); break;
1906 case MFVideoRotationFormat_90: frame.setRotationAngle(QVideoFrame::Rotation90); break;
1907 case MFVideoRotationFormat_180: frame.setRotationAngle(QVideoFrame::Rotation180); break;
1908 case MFVideoRotationFormat_270: frame.setRotationAngle(QVideoFrame::Rotation270); break;
1909 default: frame.setRotationAngle(QVideoFrame::Rotation0);
1910 }
1911 }
1912
1913 m_videoSink->platformVideoSink()->setVideoFrame(frame);
1914}
1915
1917{
1918 m_positionOffset = position * 1000;
1919}
1920
1921HRESULT setDesiredSampleTime(IMFSample *sample, const LONGLONG &sampleTime, const LONGLONG &duration)
1922{
1923 if (!sample)
1924 return E_POINTER;
1925
1926 HRESULT hr = S_OK;
1927 IMFDesiredSample *desired = NULL;
1928
1929 hr = sample->QueryInterface(IID_PPV_ARGS(&desired));
1930 if (SUCCEEDED(hr))
1931 desired->SetDesiredSampleTimeAndDuration(sampleTime, duration);
1932
1933 qt_evr_safe_release(&desired);
1934 return hr;
1935}
1936
1938{
1939 if (!sample)
1940 return E_POINTER;
1941
1942 HRESULT hr = S_OK;
1943
1944 IMFDesiredSample *desired = NULL;
1945 IUnknown *unkSwapChain = NULL;
1946
1947 // We store some custom attributes on the sample, so we need to cache them
1948 // and reset them.
1949 //
1950 // This works around the fact that IMFDesiredSample::Clear() removes all of the
1951 // attributes from the sample.
1952
1953 UINT32 counter = MFGetAttributeUINT32(sample, MFSamplePresenter_SampleCounter, (UINT32)-1);
1954
1955 hr = sample->QueryInterface(IID_PPV_ARGS(&desired));
1956 if (SUCCEEDED(hr)) {
1957 desired->Clear();
1958
1959 hr = sample->SetUINT32(MFSamplePresenter_SampleCounter, counter);
1960 if (FAILED(hr))
1961 goto done;
1962 }
1963
1964done:
1965 qt_evr_safe_release(&unkSwapChain);
1966 qt_evr_safe_release(&desired);
1967 return hr;
1968}
1969
1970HRESULT setMixerSourceRect(IMFTransform *mixer, const MFVideoNormalizedRect &sourceRect)
1971{
1972 if (!mixer)
1973 return E_POINTER;
1974
1975 IMFAttributes *attributes = NULL;
1976
1977 HRESULT hr = mixer->GetAttributes(&attributes);
1978 if (SUCCEEDED(hr)) {
1979 hr = attributes->SetBlob(VIDEO_ZOOM_RECT, reinterpret_cast<const UINT8*>(&sourceRect),
1980 sizeof(sourceRect));
1981 attributes->Release();
1982 }
1983 return hr;
1984}
1985
1987{
1988 GUID majorType;
1989 if (FAILED(type->GetMajorType(&majorType)))
1991 if (majorType != MFMediaType_Video)
1993
1994 GUID subtype;
1995 if (FAILED(type->GetGUID(MF_MT_SUBTYPE, &subtype)))
1997
1999}
2000
HRESULT createVideoSamples(IMFMediaType *format, QList< IMFSample * > &videoSampleQueue, QSize frameSize)
void setSink(QVideoSink *sink)
QVideoFrame makeVideoFrame(IMFSample *sample)
QVideoFrameFormat videoSurfaceFormat() const
HRESULT getService(REFGUID guidService, REFIID riid, void **ppv)
HRESULT checkFormat(D3DFORMAT format)
void positionChanged(qint64 position)
STDMETHODIMP GetDeviceID(IID *deviceID) override
STDMETHODIMP GetSlowestRate(MFRATE_DIRECTION direction, BOOL thin, float *rate) override
STDMETHODIMP OnClockStop(MFTIME systemTime) override
STDMETHODIMP ReleaseServicePointers() override
STDMETHODIMP QueryInterface(REFIID riid, void **ppv) override
STDMETHODIMP OnClockRestart(MFTIME systemTime) override
STDMETHODIMP InitServicePointers(IMFTopologyServiceLookup *lookup) override
STDMETHODIMP GetService(REFGUID guidService, REFIID riid, LPVOID *ppvObject) override
void setCropRect(QRect cropRect)
STDMETHODIMP IsRateSupported(BOOL thin, float rate, float *nearestSupportedRate) override
STDMETHODIMP OnClockSetRate(MFTIME systemTime, float rate) override
STDMETHODIMP GetCurrentMediaType(IMFVideoMediaType **mediaType) override
void setSink(QVideoSink *sink)
bool event(QEvent *) override
This virtual function receives events to an object and should return true if the event e was recogniz...
STDMETHODIMP GetFastestRate(MFRATE_DIRECTION direction, BOOL thin, float *rate) override
STDMETHODIMP OnClockStart(MFTIME systemTime, LONGLONG clockStartOffset) override
STDMETHODIMP OnClockPause(MFTIME systemTime) override
STDMETHODIMP ProcessMessage(MFVP_MESSAGE_TYPE message, ULONG_PTR param) override
void presentSample(IMFSample *sample)
EVRCustomPresenter(QVideoSink *sink=0)
IMFSample * sample() const
PresentSampleEvent(IMFSample *sample)
static void postEvent(QObject *receiver, QEvent *event, int priority=Qt::NormalEventPriority)
\inmodule QtCore
Definition qcoreevent.h:45
Type
This enum type defines the valid event types in Qt.
Definition qcoreevent.h:51
Definition qlist.h:74
qsizetype size() const noexcept
Definition qlist.h:386
bool isEmpty() const noexcept
Definition qlist.h:390
value_type takeFirst()
Definition qlist.h:549
qsizetype count() const noexcept
Definition qlist.h:387
void prepend(rvalue_ref t)
Definition qlist.h:456
void append(parameter_type t)
Definition qlist.h:441
void clear()
Definition qlist.h:417
\inmodule QtCore
Definition qmutex.h:317
void unlock() noexcept
Unlocks the mutex.
Definition qmutex.h:293
void lock() noexcept
Locks the mutex.
Definition qmutex.h:290
\inmodule QtCore
Definition qobject.h:90
virtual bool event(QEvent *event)
This virtual function receives events to an object and should return true if the event e was recogniz...
Definition qobject.cpp:1363
QThread * thread() const
Returns the thread in which the object lives.
Definition qobject.cpp:1561
void deleteLater()
\threadsafe
Definition qobject.cpp:2352
virtual void setVideoFrame(const QVideoFrame &frame)
void enqueue(const T &t)
Adds value t to the tail of the queue.
Definition qqueue.h:18
T dequeue()
Removes the head item in the queue and returns it.
Definition qqueue.h:19
\inmodule QtCore\reentrant
Definition qrect.h:30
constexpr int height() const noexcept
Returns the height of the rectangle.
Definition qrect.h:238
constexpr bool isValid() const noexcept
Returns true if the rectangle is valid, otherwise returns false.
Definition qrect.h:169
constexpr int x() const noexcept
Returns the x-coordinate of the rectangle's left edge.
Definition qrect.h:184
constexpr QSize size() const noexcept
Returns the size of the rectangle.
Definition qrect.h:241
constexpr int width() const noexcept
Returns the width of the rectangle.
Definition qrect.h:235
constexpr int y() const noexcept
Returns the y-coordinate of the rectangle's top edge.
Definition qrect.h:187
static QThread * currentThread()
Definition qthread.cpp:966
bool isValid() const
Identifies if a video surface format has a valid pixel format and frame size.
PixelFormat
Enumerates video data types.
static constexpr int NPixelFormats
The QVideoFrame class represents a frame of video data.
Definition qvideoframe.h:26
The QVideoSink class represents a generic sink for video data.
Definition qvideosink.h:22
QPlatformVideoSink * platformVideoSink() const
HRESULT getSample(IMFSample **sample)
HRESULT initialize(QList< IMFSample * > &samples)
HRESULT returnSample(IMFSample *sample)
HRESULT stopScheduler()
Scheduler(EVRCustomPresenter *presenter)
HRESULT scheduleSample(IMFSample *sample, bool presentNow)
const LONGLONG & frameDuration() const
void setFrameRate(const MFRatio &fps)
const LONGLONG & lastSampleTime() const
void setClockRate(float rate)
HRESULT processSamplesInQueue(LONG *nextSleep)
static DWORD WINAPI schedulerThreadProc(LPVOID parameter)
HRESULT processSample(IMFSample *sample, LONG *nextSleep)
HRESULT startScheduler(IMFClock *clock)
#define this
Definition dialogs.cpp:9
double e
direction
static LONG MFTimeToMsec(const LONGLONG &time)
static HRESULT setDesiredSampleTime(IMFSample *sample, const LONGLONG &hnsSampleTime, const LONGLONG &hnsDuration)
static QVideoFrameFormat::PixelFormat pixelFormatFromMediaType(IMFMediaType *type)
static HRESULT setMixerSourceRect(IMFTransform *mixer, const MFVideoNormalizedRect &nrcSource)
static const DWORD SCHEDULER_TIMEOUT
bool qt_evr_setCustomPresenter(IUnknown *evr, EVRCustomPresenter *presenter)
QT_BEGIN_NAMESPACE static const MFRatio g_DefaultFrameRate
static const MFTIME ONE_SECOND
static const LONG ONE_MSEC
static HRESULT clearDesiredSampleTime(IMFSample *sample)
static QT_END_NAMESPACE const GUID MFSamplePresenter_SampleCounter
D3DFORMAT qt_evr_D3DFormatFromPixelFormat(QVideoFrameFormat::PixelFormat format)
bool qt_evr_areMediaTypesEqual(IMFMediaType *type1, IMFMediaType *type2)
QT_BEGIN_NAMESPACE HRESULT qt_evr_getFourCC(IMFMediaType *type, DWORD *fourCC)
bool qt_evr_isSampleTimePassed(IMFClock *clock, IMFSample *sample)
HRESULT qt_evr_validateVideoArea(const MFVideoArea &area, UINT32 width, UINT32 height)
HRESULT qt_evr_getFrameRate(IMFMediaType *pType, MFRatio *pRatio)
MFVideoArea qt_evr_makeMFArea(float x, float y, DWORD width, DWORD height)
static QT_BEGIN_NAMESPACE void qt_evr_safe_release(T **unk)
Token token
Definition keywords.cpp:444
Combined button and popup list for selecting options.
Q_MULTIMEDIA_EXPORT QVideoFrameFormat::PixelFormat pixelFormatFromMediaSubtype(const GUID &subtype)
#define DWORD_PTR
auto qFabs(T v)
Definition qmath.h:48
GLsizei samples
GLint GLsizei GLsizei height
GLenum GLuint GLintptr GLsizeiptr size
[1]
GLfloat GLfloat f
GLint GLsizei width
GLenum type
GLuint GLsizei const GLchar * message
GLenum const GLint * param
GLint GLsizei GLsizei GLenum format
GLuint counter
GLuint GLenum * rate
GLsizei GLenum GLboolean sink
GLuint64EXT * result
[6]
static qreal position(const QQuickItem *item, QQuickAnchors::Anchor anchorLine)
long long qint64
Definition qtypes.h:55
IUIViewSettingsInterop __RPC__in REFIID riid
struct tagMSG MSG
long HRESULT
#define QMM_PRESENTATION_CURRENT_POSITION
QByteArray compressed
QFrame frame
[0]
QSvgRenderer * renderer
[0]
Definition moc.h:24