8#include <private/qwindowsmultimediautils_p.h>
9#include <private/qplatformvideosink_p.h>
10#include <private/qwindowsmfdefs_p.h>
14#include <QtCore/qmutex.h>
15#include <QtCore/qvarlengtharray.h>
16#include <QtCore/qrect.h>
20#include <QtCore/qdebug.h>
47 if (!evr || !presenter)
53 if (SUCCEEDED(evr->QueryInterface(IID_PPV_ARGS(&
renderer)))) {
78 IMFSample *
sample()
const {
return m_sample; }
85 : m_presenter(presenter)
88 , m_schedulerThread(0)
89 , m_threadReadyEvent(0)
91 , m_playbackRate(1.0f)
92 , m_perFrameInterval(0)
101 for (
int i = 0;
i < m_scheduledSamples.
size(); ++
i)
102 m_scheduledSamples[
i]->Release();
103 m_scheduledSamples.
clear();
108 UINT64 AvgTimePerFrame = 0;
111 MFFrameRateToAverageTimePerFrame(fps.Numerator, fps.Denominator, &AvgTimePerFrame);
113 m_perFrameInterval = (MFTIME)AvgTimePerFrame;
116 m_perFrame_1_4th = m_perFrameInterval / 4;
121 if (m_schedulerThread)
139 m_threadReadyEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
140 if (!m_threadReadyEvent) {
141 hr = HRESULT_FROM_WIN32(GetLastError());
146 m_flushEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
148 hr = HRESULT_FROM_WIN32(GetLastError());
154 if (!m_schedulerThread) {
155 hr = HRESULT_FROM_WIN32(GetLastError());
160 hObjects[0] = m_threadReadyEvent;
161 hObjects[1] = m_schedulerThread;
162 dwWait = WaitForMultipleObjects(2, hObjects, FALSE, INFINITE);
163 if (WAIT_OBJECT_0 != dwWait) {
165 CloseHandle(m_schedulerThread);
166 m_schedulerThread = NULL;
176 if (m_threadReadyEvent) {
177 CloseHandle(m_threadReadyEvent);
178 m_threadReadyEvent = NULL;
185 if (!m_schedulerThread)
189 PostThreadMessage(m_threadID,
Terminate, 0, 0);
192 WaitForSingleObject(m_schedulerThread, INFINITE);
195 CloseHandle(m_schedulerThread);
196 m_schedulerThread = NULL;
198 CloseHandle(m_flushEvent);
203 for (
int i = 0;
i < m_scheduledSamples.
size(); ++
i)
204 m_scheduledSamples[
i]->Release();
205 m_scheduledSamples.
clear();
216 if (m_schedulerThread) {
218 PostThreadMessage(m_threadID,
Flush, 0 , 0);
222 HANDLE objects[] = { m_flushEvent, m_schedulerThread };
233 return m_scheduledSamples.
count() > 0;
238 if (!m_schedulerThread)
239 return MF_E_NOT_INITIALIZED;
242 DWORD dwExitCode = 0;
244 GetExitCodeThread(m_schedulerThread, &dwExitCode);
245 if (dwExitCode != STILL_ACTIVE)
248 if (presentNow || !m_clock) {
254 m_scheduledSamples.
enqueue(sample);
258 PostThreadMessage(m_threadID,
Schedule, 0, 0);
268 IMFSample *sample = NULL;
271 while (!m_scheduledSamples.
isEmpty()) {
273 sample = m_scheduledSamples.
dequeue();
283 if (FAILED(hr) || wait > 0)
301 LONGLONG hnsPresentationTime = 0;
302 LONGLONG hnsTimeNow = 0;
303 MFTIME hnsSystemTime = 0;
305 bool presentNow =
true;
311 hr = sample->GetSampleTime(&hnsPresentationTime);
316 hr = m_clock->GetCorrelatedTime(0, &hnsTimeNow, &hnsSystemTime);
320 LONGLONG hnsDelta = hnsPresentationTime - hnsTimeNow;
321 if (m_playbackRate < 0) {
324 hnsDelta = - hnsDelta;
327 if (hnsDelta < - m_perFrame_1_4th) {
330 }
else if (hnsDelta > (3 * m_perFrame_1_4th)) {
332 nextSleep =
MFTimeToMsec(hnsDelta - (3 * m_perFrame_1_4th));
336 if (m_playbackRate != 0)
337 nextSleep = (LONG)(nextSleep /
qFabs(m_playbackRate));
350 m_scheduledSamples.
prepend(sample);
354 *pNextSleep = nextSleep;
364 return scheduler->schedulerThreadProcPrivate();
367DWORD Scheduler::schedulerThreadProcPrivate()
371 LONG wait = INFINITE;
372 bool exitThread =
false;
376 PeekMessage(&msg, NULL, WM_USER, WM_USER, PM_NOREMOVE);
379 SetEvent(m_threadReadyEvent);
381 while (!exitThread) {
383 DWORD
result = MsgWaitForMultipleObjects(0, NULL, FALSE, wait, QS_POSTMESSAGE);
385 if (
result == WAIT_TIMEOUT) {
392 while (PeekMessage(&msg, NULL, 0, 0, PM_REMOVE)) {
393 bool processSamples =
true;
395 switch (msg.message) {
402 for (
int i = 0;
i < m_scheduledSamples.
size(); ++
i)
404 m_scheduledSamples.
clear();
407 SetEvent(m_flushEvent);
411 if (processSamples) {
415 processSamples = (wait != (LONG)INFINITE);
423 return (SUCCEEDED(hr) ? 0 : 1);
428 : m_initialized(
false)
442 return MF_E_NOT_INITIALIZED;
444 if (m_videoSampleQueue.
isEmpty())
445 return MF_E_SAMPLEALLOCATOR_EMPTY;
453 IMFSample *taken = m_videoSampleQueue.
takeFirst();
469 return MF_E_NOT_INITIALIZED;
471 m_videoSampleQueue.
append(sample);
482 return MF_E_INVALIDREQUEST;
485 for (
auto sample : std::as_const(
samples)) {
487 m_videoSampleQueue.
append(sample);
490 m_initialized =
true;
492 for (
auto sample : std::as_const(
samples))
502 for (
auto sample : std::as_const(m_videoSampleQueue))
504 m_videoSampleQueue.
clear();
505 m_initialized =
false;
515 , m_renderState(RenderShutdown)
518 , m_sampleNotify(
false)
521 , m_endStreaming(
false)
522 , m_playbackRate(1.0f)
526 , m_mediaEventSink(0)
529 , m_canRenderToSurface(
false)
530 , m_positionOffset(0)
533 m_sourceRect.top = 0;
534 m_sourceRect.left = 0;
535 m_sourceRect.bottom = 1;
536 m_sourceRect.right = 1;
545 m_samplePool.
clear();
552 delete m_presentEngine;
559 if (
riid == IID_IMFGetService) {
560 *ppvObject =
static_cast<IMFGetService*
>(
this);
561 }
else if (
riid == IID_IMFTopologyServiceLookupClient) {
562 *ppvObject =
static_cast<IMFTopologyServiceLookupClient*
>(
this);
563 }
else if (
riid == IID_IMFVideoDeviceID) {
564 *ppvObject =
static_cast<IMFVideoDeviceID*
>(
this);
565 }
else if (
riid == IID_IMFVideoPresenter) {
566 *ppvObject =
static_cast<IMFVideoPresenter*
>(
this);
567 }
else if (
riid == IID_IMFRateSupport) {
568 *ppvObject =
static_cast<IMFRateSupport*
>(
this);
569 }
else if (
riid == IID_IUnknown) {
570 *ppvObject =
static_cast<IUnknown*
>(
static_cast<IMFGetService*
>(
this));
571 }
else if (
riid == IID_IMFClockStateSink) {
572 *ppvObject =
static_cast<IMFClockStateSink*
>(
this);
575 return E_NOINTERFACE;
581ULONG EVRCustomPresenter::AddRef()
583 return InterlockedIncrement(&m_refCount);
586ULONG EVRCustomPresenter::Release()
588 ULONG uCount = InterlockedDecrement(&m_refCount);
602 if (guidService != MR_VIDEO_RENDER_SERVICE)
603 return MF_E_UNSUPPORTED_SERVICE;
619 *deviceID = IID_IDirect3DDevice9;
630 DWORD objectCount = 0;
632 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
636 return MF_E_INVALIDREQUEST;
645 lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0,
646 MR_VIDEO_RENDER_SERVICE, IID_PPV_ARGS(&m_clock),
653 hr = lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0,
654 MR_VIDEO_MIXER_SERVICE, IID_PPV_ARGS(&m_mixer),
662 hr = configureMixer(m_mixer);
669 hr = lookup->LookupService(MF_SERVICE_LOOKUP_GLOBAL, 0,
670 MR_VIDEO_RENDER_SERVICE, IID_PPV_ARGS(&m_mediaEventSink),
705 return m_presentEngine->
isValid() && m_canRenderToSurface;
712 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
714 hr = checkShutdown();
720 case MFVP_MESSAGE_FLUSH:
725 case MFVP_MESSAGE_INVALIDATEMEDIATYPE:
726 hr = renegotiateMediaType();
730 case MFVP_MESSAGE_PROCESSINPUTNOTIFY:
731 hr = processInputNotify();
735 case MFVP_MESSAGE_BEGINSTREAMING:
736 hr = beginStreaming();
740 case MFVP_MESSAGE_ENDSTREAMING:
745 case MFVP_MESSAGE_ENDOFSTREAM:
747 m_endStreaming =
true;
749 hr = checkEndOfStream();
753 case MFVP_MESSAGE_STEP:
754 hr = prepareFrameStep(DWORD(
param));
758 case MFVP_MESSAGE_CANCELSTEP:
759 hr = cancelFrameStep();
779 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
781 hr = checkShutdown();
786 return MF_E_NOT_INITIALIZED;
788 return m_mediaType->QueryInterface(IID_PPV_ARGS(mediaType));
793 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
815 hr = startFrameStep();
828 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
840 hr = startFrameStep();
852 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
872 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
889 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
897 if ((m_playbackRate == 0.0f) && (
rate != 0.0f)) {
899 for (
auto sample : std::as_const(m_frameStep.samples))
901 m_frameStep.samples.clear();
904 m_playbackRate =
rate;
917 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
934 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
936 float maxRate = 0.0f;
943 maxRate = getMaxRate(thin);
956 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
958 float maxRate = 0.0f;
959 float nearestRate =
rate;
967 maxRate = getMaxRate(thin);
971 hr = MF_E_UNSUPPORTED_RATE;
974 nearestRate = maxRate;
977 nearestRate = -nearestRate;
982 if (nearestSupportedRate)
983 *nearestSupportedRate = nearestRate;
990 const std::lock_guard<QRecursiveMutex> locker(m_mutex);
992 m_canRenderToSurface =
false;
1000 m_canRenderToSurface =
true;
1022 m_cropRect = cropRect;
1026HRESULT EVRCustomPresenter::configureMixer(IMFTransform *mixer)
1032HRESULT EVRCustomPresenter::renegotiateMediaType()
1035 bool foundMediaType =
false;
1037 IMFMediaType *mixerType = NULL;
1038 IMFMediaType *optimalType = NULL;
1041 return MF_E_INVALIDREQUEST;
1044 DWORD typeIndex = 0;
1045 while (!foundMediaType && (hr != MF_E_NO_MORE_TYPES)) {
1050 hr = m_mixer->GetOutputAvailableType(0, typeIndex++, &mixerType);
1059 hr = isMediaTypeSupported(mixerType);
1063 hr = createOptimalVideoType(mixerType, &optimalType);
1067 hr = m_mixer->SetOutputType(0, optimalType, MFT_SET_TYPE_TEST_ONLY);
1071 hr = setMediaType(optimalType);
1074 if (SUCCEEDED(hr)) {
1075 hr = m_mixer->SetOutputType(0, optimalType, 0);
1083 foundMediaType =
true;
1092HRESULT EVRCustomPresenter::flush()
1094 m_prerolled =
false;
1100 m_scheduler.
flush();
1103 for (
auto sample :
std::as_const(m_frameStep.
samples))
1105 m_frameStep.samples.clear();
1115HRESULT EVRCustomPresenter::processInputNotify()
1120 m_sampleNotify =
true;
1124 hr = MF_E_TRANSFORM_TYPE_NOT_SET;
1127 processOutputLoop();
1132HRESULT EVRCustomPresenter::beginStreaming()
1142HRESULT EVRCustomPresenter::endStreaming()
1152HRESULT EVRCustomPresenter::checkEndOfStream()
1154 if (!m_endStreaming) {
1159 if (m_sampleNotify) {
1170 notifyEvent(EC_COMPLETE, (LONG_PTR)S_OK, 0);
1171 m_endStreaming =
false;
1177HRESULT EVRCustomPresenter::prepareFrameStep(DWORD steps)
1182 m_frameStep.steps += steps;
1190 hr = startFrameStep();
1195HRESULT EVRCustomPresenter::startFrameStep()
1198 IMFSample *sample = NULL;
1206 while (!m_frameStep.samples.isEmpty() && (m_frameStep.state ==
FrameStepPending)) {
1207 sample = m_frameStep.samples.takeFirst();
1209 hr = deliverFrameStepSample(sample);
1222 while (!m_frameStep.samples.isEmpty()) {
1223 sample = m_frameStep.samples.takeFirst();
1225 hr = deliverSample(sample,
false);
1238HRESULT EVRCustomPresenter::completeFrameStep(IMFSample *sample)
1241 MFTIME sampleTime = 0;
1242 MFTIME systemTime = 0;
1246 m_frameStep.sampleNoRef = 0;
1249 notifyEvent(EC_STEP_COMPLETE, FALSE, 0);
1252 if (isScrubbing()) {
1254 hr = sample->GetSampleTime(&sampleTime);
1258 m_clock->GetCorrelatedTime(0, &sampleTime, &systemTime);
1263 notifyEvent(EC_SCRUB_TIME, DWORD(sampleTime), DWORD(((sampleTime) >> 32) & 0xffffffff));
1268HRESULT EVRCustomPresenter::cancelFrameStep()
1273 m_frameStep.steps = 0;
1274 m_frameStep.sampleNoRef = 0;
1280 notifyEvent(EC_STEP_COMPLETE, TRUE, 0);
1285HRESULT EVRCustomPresenter::createOptimalVideoType(IMFMediaType *proposedType, IMFMediaType **optimalType)
1290 ZeroMemory(&rcOutput,
sizeof(rcOutput));
1292 MFVideoArea displayArea;
1293 ZeroMemory(&displayArea,
sizeof(displayArea));
1295 IMFMediaType *mtOptimal = NULL;
1303 hr = MFCreateMediaType(&mtOptimal);
1307 hr = proposedType->CopyAllItems(mtOptimal);
1313 hr = proposedType->GetUINT64(MF_MT_FRAME_SIZE, &
size);
1318 rcOutput.left = m_cropRect.
x();
1319 rcOutput.top = m_cropRect.
y();
1320 rcOutput.right = m_cropRect.
x() + m_cropRect.
width();
1321 rcOutput.bottom = m_cropRect.
y() + m_cropRect.
height();
1323 m_sourceRect.left = float(m_cropRect.
x()) /
width;
1324 m_sourceRect.top = float(m_cropRect.
y()) /
height;
1325 m_sourceRect.right = float(m_cropRect.
x() + m_cropRect.
width()) /
width;
1326 m_sourceRect.bottom = float(m_cropRect.
y() + m_cropRect.
height()) /
height;
1329 configureMixer(m_mixer);
1333 rcOutput.right =
width;
1334 rcOutput.bottom =
height;
1339 rcOutput.bottom - rcOutput.top);
1341 hr = mtOptimal->SetUINT32(MF_MT_PAN_SCAN_ENABLED, FALSE);
1345 hr = mtOptimal->SetBlob(MF_MT_GEOMETRIC_APERTURE,
reinterpret_cast<UINT8*
>(&displayArea),
1346 sizeof(displayArea));
1353 hr = mtOptimal->SetBlob(MF_MT_PAN_SCAN_APERTURE,
reinterpret_cast<UINT8*
>(&displayArea),
1354 sizeof(displayArea));
1358 hr = mtOptimal->SetBlob(MF_MT_MINIMUM_DISPLAY_APERTURE,
reinterpret_cast<UINT8*
>(&displayArea),
1359 sizeof(displayArea));
1364 *optimalType = mtOptimal;
1365 (*optimalType)->AddRef();
1373HRESULT EVRCustomPresenter::setMediaType(IMFMediaType *mediaType)
1385 MFRatio fps = { 0, 0 };
1411 for (
auto sample :
std::as_const(sampleQueue)) {
1423 if (SUCCEEDED(
qt_evr_getFrameRate(mediaType, &fps)) && (fps.Numerator != 0) && (fps.Denominator != 0)) {
1433 m_mediaType = mediaType;
1434 m_mediaType->AddRef();
1444HRESULT EVRCustomPresenter::isMediaTypeSupported(IMFMediaType *proposed)
1446 D3DFORMAT d3dFormat = D3DFMT_UNKNOWN;
1448 MFVideoInterlaceMode interlaceMode = MFVideoInterlace_Unknown;
1449 MFVideoArea videoCropArea;
1459 return MF_E_INVALIDMEDIATYPE;
1462 hr = proposed->IsCompressedFormat(&
compressed);
1467 return MF_E_INVALIDMEDIATYPE;
1475 hr = proposed->GetUINT32(MF_MT_INTERLACE_MODE,
reinterpret_cast<UINT32*
>(&interlaceMode));
1479 if (interlaceMode != MFVideoInterlace_Progressive)
1480 return MF_E_INVALIDMEDIATYPE;
1482 hr = MFGetAttributeSize(proposed, MF_MT_FRAME_SIZE, &
width, &
height);
1490 if (SUCCEEDED(proposed->GetBlob(MF_MT_PAN_SCAN_APERTURE,
1491 reinterpret_cast<UINT8*
>(&videoCropArea),
1492 sizeof(videoCropArea),
nullptr))) {
1495 if (SUCCEEDED(proposed->GetBlob(MF_MT_GEOMETRIC_APERTURE,
1496 reinterpret_cast<UINT8*
>(&videoCropArea),
1497 sizeof(videoCropArea),
nullptr))) {
1500 if (SUCCEEDED(proposed->GetBlob(MF_MT_MINIMUM_DISPLAY_APERTURE,
1501 reinterpret_cast<UINT8*
>(&videoCropArea),
1502 sizeof(videoCropArea),
nullptr))) {
1508void EVRCustomPresenter::processOutputLoop()
1513 while (hr == S_OK) {
1515 if (!m_sampleNotify) {
1516 hr = MF_E_TRANSFORM_NEED_MORE_INPUT;
1521 hr = processOutput();
1527 if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
1533HRESULT EVRCustomPresenter::processOutput()
1537 LONGLONG mixerStartTime = 0, mixerEndTime = 0;
1538 MFTIME systemTime = 0;
1539 BOOL repaint = m_repaint;
1541 MFT_OUTPUT_DATA_BUFFER dataBuffer;
1542 ZeroMemory(&dataBuffer,
sizeof(dataBuffer));
1544 IMFSample *sample = NULL;
1549 if ((m_renderState !=
RenderStarted) && !m_repaint && m_prerolled)
1554 return MF_E_INVALIDREQUEST;
1558 if (hr == MF_E_SAMPLEALLOCATOR_EMPTY)
1578 m_clock->GetCorrelatedTime(0, &mixerStartTime, &systemTime);
1583 dataBuffer.dwStreamID = 0;
1584 dataBuffer.pSample = sample;
1585 dataBuffer.dwStatus = 0;
1587 hr = m_mixer->ProcessOutput(0, 1, &dataBuffer, &status);
1597 if (hr == MF_E_TRANSFORM_TYPE_NOT_SET) {
1599 hr = renegotiateMediaType();
1600 }
else if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
1603 }
else if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
1606 m_sampleNotify =
false;
1611 if (m_clock && !repaint) {
1615 m_clock->GetCorrelatedTime(0, &mixerEndTime, &systemTime);
1617 LONGLONG latencyTime = mixerEndTime - mixerStartTime;
1618 notifyEvent(EC_PROCESSING_LATENCY,
reinterpret_cast<LONG_PTR
>(&latencyTime), 0);
1622 hr = trackSample(sample);
1628 hr = deliverSample(sample, repaint);
1633 hr = deliverFrameStepSample(sample);
1649HRESULT EVRCustomPresenter::deliverSample(IMFSample *sample,
bool repaint)
1655 bool presentNow = ((m_renderState !=
RenderStarted) || isScrubbing() || repaint);
1663 notifyEvent(EC_ERRORABORT, hr, 0);
1669HRESULT EVRCustomPresenter::deliverFrameStepSample(IMFSample *sample)
1672 IUnknown *unk = NULL;
1682 m_frameStep.samples.append(sample);
1687 if (m_frameStep.steps > 0)
1688 m_frameStep.steps--;
1690 if (m_frameStep.steps > 0) {
1697 m_frameStep.samples.append(sample);
1700 hr = deliverSample(sample,
false);
1706 hr = sample->QueryInterface(IID_PPV_ARGS(&unk));
1710 m_frameStep.sampleNoRef =
reinterpret_cast<DWORD_PTR>(unk);
1725HRESULT EVRCustomPresenter::trackSample(IMFSample *sample)
1727 IMFTrackedSample *tracked = NULL;
1729 HRESULT hr = sample->QueryInterface(IID_PPV_ARGS(&tracked));
1732 hr = tracked->SetAllocator(&m_sampleFreeCB, NULL);
1738void EVRCustomPresenter::releaseResources()
1753 m_samplePool.
clear();
1758HRESULT EVRCustomPresenter::onSampleFree(IMFAsyncResult *
result)
1760 IUnknown *
object = NULL;
1761 IMFSample *sample = NULL;
1762 IUnknown *unk = NULL;
1770 hr =
object->QueryInterface(IID_PPV_ARGS(&sample));
1779 hr = sample->QueryInterface(IID_PPV_ARGS(&unk));
1783 if (m_frameStep.sampleNoRef ==
reinterpret_cast<DWORD_PTR>(unk)) {
1785 hr = completeFrameStep(sample);
1800 if (
token == m_tokenCounter) {
1803 if (SUCCEEDED(hr)) {
1805 processOutputLoop();
1813 notifyEvent(EC_ERRORABORT, hr, 0);
1820float EVRCustomPresenter::getMaxRate(
bool thin)
1829 float maxRate = FLT_MAX;
1830 MFRatio fps = { 0, 0 };
1831 UINT monitorRateHz = 0;
1833 if (!thin && m_mediaType) {
1837 if (fps.Denominator && fps.Numerator && monitorRateHz) {
1839 maxRate = (float)MulDiv(monitorRateHz, fps.Denominator, fps.Numerator);
1848 switch (
int(
e->type())) {
1894 if (m_positionOffset) {
1895 if (
frame.startTime())
1896 frame.setStartTime(
frame.startTime() + m_positionOffset);
1897 if (
frame.endTime())
1898 frame.setEndTime(
frame.endTime() + m_positionOffset);
1901 ComPtr<IMFMediaType> inputStreamType;
1902 if (SUCCEEDED(m_mixer->GetInputCurrentType(0, inputStreamType.GetAddressOf()))) {
1903 auto rotation =
static_cast<MFVideoRotationFormat
>(MFGetAttributeUINT32(inputStreamType.Get(), MF_MT_VIDEO_ROTATION, 0));
1918 m_positionOffset =
position * 1000;
1927 IMFDesiredSample *desired = NULL;
1929 hr = sample->QueryInterface(IID_PPV_ARGS(&desired));
1931 desired->SetDesiredSampleTimeAndDuration(sampleTime, duration);
1944 IMFDesiredSample *desired = NULL;
1945 IUnknown *unkSwapChain = NULL;
1955 hr = sample->QueryInterface(IID_PPV_ARGS(&desired));
1956 if (SUCCEEDED(hr)) {
1975 IMFAttributes *attributes = NULL;
1977 HRESULT hr = mixer->GetAttributes(&attributes);
1978 if (SUCCEEDED(hr)) {
1979 hr = attributes->SetBlob(VIDEO_ZOOM_RECT,
reinterpret_cast<const UINT8*
>(&sourceRect),
1980 sizeof(sourceRect));
1981 attributes->Release();
1989 if (FAILED(
type->GetMajorType(&majorType)))
1991 if (majorType != MFMediaType_Video)
1995 if (FAILED(
type->GetGUID(MF_MT_SUBTYPE, &subtype)))
HRESULT createVideoSamples(IMFMediaType *format, QList< IMFSample * > &videoSampleQueue, QSize frameSize)
void setSink(QVideoSink *sink)
QVideoFrame makeVideoFrame(IMFSample *sample)
QVideoFrameFormat videoSurfaceFormat() const
HRESULT getService(REFGUID guidService, REFIID riid, void **ppv)
HRESULT checkFormat(D3DFORMAT format)
void positionChanged(qint64 position)
STDMETHODIMP GetDeviceID(IID *deviceID) override
STDMETHODIMP GetSlowestRate(MFRATE_DIRECTION direction, BOOL thin, float *rate) override
STDMETHODIMP OnClockStop(MFTIME systemTime) override
STDMETHODIMP ReleaseServicePointers() override
STDMETHODIMP QueryInterface(REFIID riid, void **ppv) override
STDMETHODIMP OnClockRestart(MFTIME systemTime) override
STDMETHODIMP InitServicePointers(IMFTopologyServiceLookup *lookup) override
STDMETHODIMP GetService(REFGUID guidService, REFIID riid, LPVOID *ppvObject) override
void supportedFormatsChanged()
void setCropRect(QRect cropRect)
STDMETHODIMP IsRateSupported(BOOL thin, float rate, float *nearestSupportedRate) override
STDMETHODIMP OnClockSetRate(MFTIME systemTime, float rate) override
STDMETHODIMP GetCurrentMediaType(IMFVideoMediaType **mediaType) override
void setSink(QVideoSink *sink)
bool event(QEvent *) override
This virtual function receives events to an object and should return true if the event e was recogniz...
STDMETHODIMP GetFastestRate(MFRATE_DIRECTION direction, BOOL thin, float *rate) override
STDMETHODIMP OnClockStart(MFTIME systemTime, LONGLONG clockStartOffset) override
STDMETHODIMP OnClockPause(MFTIME systemTime) override
STDMETHODIMP ProcessMessage(MFVP_MESSAGE_TYPE message, ULONG_PTR param) override
void presentSample(IMFSample *sample)
~EVRCustomPresenter() override
EVRCustomPresenter(QVideoSink *sink=0)
IMFSample * sample() const
PresentSampleEvent(IMFSample *sample)
~PresentSampleEvent() override
static void postEvent(QObject *receiver, QEvent *event, int priority=Qt::NormalEventPriority)
Type
This enum type defines the valid event types in Qt.
qsizetype size() const noexcept
bool isEmpty() const noexcept
qsizetype count() const noexcept
void prepend(rvalue_ref t)
void append(parameter_type t)
void unlock() noexcept
Unlocks the mutex.
void lock() noexcept
Locks the mutex.
virtual bool event(QEvent *event)
This virtual function receives events to an object and should return true if the event e was recogniz...
QThread * thread() const
Returns the thread in which the object lives.
void deleteLater()
\threadsafe
void enqueue(const T &t)
Adds value t to the tail of the queue.
T dequeue()
Removes the head item in the queue and returns it.
\inmodule QtCore\reentrant
constexpr int height() const noexcept
Returns the height of the rectangle.
constexpr bool isValid() const noexcept
Returns true if the rectangle is valid, otherwise returns false.
constexpr int x() const noexcept
Returns the x-coordinate of the rectangle's left edge.
constexpr QSize size() const noexcept
Returns the size of the rectangle.
constexpr int width() const noexcept
Returns the width of the rectangle.
constexpr int y() const noexcept
Returns the y-coordinate of the rectangle's top edge.
static QThread * currentThread()
The QVideoFrame class represents a frame of video data.
The QVideoSink class represents a generic sink for video data.
QPlatformVideoSink * platformVideoSink() const
HRESULT getSample(IMFSample **sample)
HRESULT initialize(QList< IMFSample * > &samples)
HRESULT returnSample(IMFSample *sample)
Scheduler(EVRCustomPresenter *presenter)
HRESULT scheduleSample(IMFSample *sample, bool presentNow)
const LONGLONG & frameDuration() const
void setFrameRate(const MFRatio &fps)
const LONGLONG & lastSampleTime() const
void setClockRate(float rate)
HRESULT processSamplesInQueue(LONG *nextSleep)
bool areSamplesScheduled()
static DWORD WINAPI schedulerThreadProc(LPVOID parameter)
HRESULT processSample(IMFSample *sample, LONG *nextSleep)
HRESULT startScheduler(IMFClock *clock)
static LONG MFTimeToMsec(const LONGLONG &time)
static HRESULT setDesiredSampleTime(IMFSample *sample, const LONGLONG &hnsSampleTime, const LONGLONG &hnsDuration)
static QVideoFrameFormat::PixelFormat pixelFormatFromMediaType(IMFMediaType *type)
static HRESULT setMixerSourceRect(IMFTransform *mixer, const MFVideoNormalizedRect &nrcSource)
static const DWORD SCHEDULER_TIMEOUT
bool qt_evr_setCustomPresenter(IUnknown *evr, EVRCustomPresenter *presenter)
QT_BEGIN_NAMESPACE static const MFRatio g_DefaultFrameRate
static const MFTIME ONE_SECOND
static const LONG ONE_MSEC
static HRESULT clearDesiredSampleTime(IMFSample *sample)
static QT_END_NAMESPACE const GUID MFSamplePresenter_SampleCounter
D3DFORMAT qt_evr_D3DFormatFromPixelFormat(QVideoFrameFormat::PixelFormat format)
bool qt_evr_areMediaTypesEqual(IMFMediaType *type1, IMFMediaType *type2)
QT_BEGIN_NAMESPACE HRESULT qt_evr_getFourCC(IMFMediaType *type, DWORD *fourCC)
bool qt_evr_isSampleTimePassed(IMFClock *clock, IMFSample *sample)
HRESULT qt_evr_validateVideoArea(const MFVideoArea &area, UINT32 width, UINT32 height)
HRESULT qt_evr_getFrameRate(IMFMediaType *pType, MFRatio *pRatio)
MFVideoArea qt_evr_makeMFArea(float x, float y, DWORD width, DWORD height)
static QT_BEGIN_NAMESPACE void qt_evr_safe_release(T **unk)
Combined button and popup list for selecting options.
GLint GLsizei GLsizei height
GLenum GLuint GLintptr GLsizeiptr size
[1]
GLuint GLsizei const GLchar * message
GLenum const GLint * param
GLint GLsizei GLsizei GLenum format
GLsizei GLenum GLboolean sink
static qreal position(const QQuickItem *item, QQuickAnchors::Anchor anchorLine)
IUIViewSettingsInterop __RPC__in REFIID riid
#define QMM_PRESENTATION_CURRENT_POSITION
QSvgRenderer * renderer
[0]