Qt 6.x
The Qt SDK
Loading...
Searching...
No Matches
qffmpegvideobuffer.cpp
Go to the documentation of this file.
1// Copyright (C) 2021 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3
5#include "private/qvideotexturehelper_p.h"
6#include "qffmpeghwaccel_p.h"
7
8extern "C" {
9#include <libavutil/pixdesc.h>
10#include <libavutil/hdr_dynamic_metadata.h>
11#include <libavutil/mastering_display_metadata.h>
12}
13
14static bool isFrameFlipped(const AVFrame& frame) {
15 for (int i = 0; i < AV_NUM_DATA_POINTERS && frame.data[i]; ++i) {
16 if (frame.linesize[i] < 0)
17 return true;
18 }
19
20 return false;
21}
22
24
27{
28 if (frame->hw_frames_ctx) {
29 hwFrame = std::move(frame);
30 m_pixelFormat = toQtPixelFormat(QFFmpeg::HWAccel::format(hwFrame.get()));
31 return;
32 }
33
34 swFrame = std::move(frame);
35 m_pixelFormat = toQtPixelFormat(AVPixelFormat(swFrame->format));
36
38}
39
41
43{
44 Q_ASSERT(swFrame);
45 bool needsConversion = false;
46 auto pixelFormat = toQtPixelFormat(AVPixelFormat(swFrame->format), &needsConversion);
47 if (pixelFormat != m_pixelFormat || isFrameFlipped(*swFrame)) {
48 AVPixelFormat newFormat = toAVPixelFormat(m_pixelFormat);
49 // convert the format into something we can handle
50 SwsContext *c = sws_getContext(swFrame->width, swFrame->height, AVPixelFormat(swFrame->format),
51 swFrame->width, swFrame->height, newFormat,
52 SWS_BICUBIC, nullptr, nullptr, nullptr);
53
54 auto newFrame = QFFmpeg::makeAVFrame();
55 newFrame->width = swFrame->width;
56 newFrame->height = swFrame->height;
57 newFrame->format = newFormat;
58 av_frame_get_buffer(newFrame.get(), 0);
59
60 sws_scale(c, swFrame->data, swFrame->linesize, 0, swFrame->height, newFrame->data, newFrame->linesize);
61 if (frame == swFrame.get())
62 frame = newFrame.get();
63 swFrame = std::move(newFrame);
64 sws_freeContext(c);
65 }
66}
67
69{
70 textureConverter = converter;
71 textureConverter.init(hwFrame.get());
73}
74
76{
77 switch (frame->colorspace) {
78 default:
79 case AVCOL_SPC_UNSPECIFIED:
80 case AVCOL_SPC_RESERVED:
81 case AVCOL_SPC_FCC:
82 case AVCOL_SPC_SMPTE240M:
83 case AVCOL_SPC_YCGCO:
84 case AVCOL_SPC_SMPTE2085:
85 case AVCOL_SPC_CHROMA_DERIVED_NCL:
86 case AVCOL_SPC_CHROMA_DERIVED_CL:
87 case AVCOL_SPC_ICTCP: // BT.2100 ICtCp
89 case AVCOL_SPC_RGB:
91 case AVCOL_SPC_BT709:
93 case AVCOL_SPC_BT470BG: // BT601
94 case AVCOL_SPC_SMPTE170M: // Also BT601
96 case AVCOL_SPC_BT2020_NCL: // Non constant luminence
97 case AVCOL_SPC_BT2020_CL: // Constant luminence
99 }
100}
101
103{
104 switch (frame->color_trc) {
105 case AVCOL_TRC_BT709:
106 // The following three cases have transfer characteristics identical to BT709
107 case AVCOL_TRC_BT1361_ECG:
108 case AVCOL_TRC_BT2020_10:
109 case AVCOL_TRC_BT2020_12:
110 case AVCOL_TRC_SMPTE240M: // almost identical to bt709
112 case AVCOL_TRC_GAMMA22:
113 case AVCOL_TRC_SMPTE428 : // No idea, let's hope for the best...
114 case AVCOL_TRC_IEC61966_2_1: // sRGB, close enough to 2.2...
115 case AVCOL_TRC_IEC61966_2_4: // not quite, but probably close enough
117 case AVCOL_TRC_GAMMA28:
119 case AVCOL_TRC_SMPTE170M:
121 case AVCOL_TRC_LINEAR:
123 case AVCOL_TRC_SMPTE2084:
125 case AVCOL_TRC_ARIB_STD_B67:
127 default:
128 break;
129 }
131}
132
134{
135 switch (frame->color_range) {
136 case AVCOL_RANGE_MPEG:
138 case AVCOL_RANGE_JPEG:
140 default:
142 }
143}
144
146{
147 float maxNits = -1;
148 for (int i = 0; i < frame->nb_side_data; ++i) {
149 AVFrameSideData *sd = frame->side_data[i];
150 // TODO: Longer term we might want to also support HDR10+ dynamic metadata
151 if (sd->type == AV_FRAME_DATA_MASTERING_DISPLAY_METADATA) {
152 auto *data = reinterpret_cast<AVMasteringDisplayMetadata *>(sd->data);
153 auto maybeLum = QFFmpeg::mul(10'000., data->max_luminance);
154 if (maybeLum)
155 maxNits = float(maybeLum.value());
156 }
157 }
158 return maxNits;
159}
160
162{
163 return m_mode;
164}
165
167{
168 if (!swFrame) {
169 Q_ASSERT(hwFrame && hwFrame->hw_frames_ctx);
170 swFrame = QFFmpeg::makeAVFrame();
171 /* retrieve data from GPU to CPU */
172 int ret = av_hwframe_transfer_data(swFrame.get(), hwFrame.get(), 0);
173 if (ret < 0) {
174 qWarning() << "Error transferring the data to system memory:" << ret;
175 return {};
176 }
178 }
179
180 m_mode = mode;
181
184 mapData.nPlanes = desc->nplanes;
185 for (int i = 0; i < mapData.nPlanes; ++i) {
186 Q_ASSERT(swFrame->linesize[i] >= 0);
187
188 mapData.data[i] = swFrame->data[i];
189 mapData.bytesPerLine[i] = swFrame->linesize[i];
190 mapData.size[i] = mapData.bytesPerLine[i]*desc->heightForPlane(swFrame->height, i);
191 }
192 return mapData;
193}
194
196{
197 // nothing to do here for SW buffers
198}
199
200std::unique_ptr<QVideoFrameTextures> QFFmpegVideoBuffer::mapTextures(QRhi *)
201{
202 if (textures)
203 return {};
204 if (!hwFrame)
205 return {};
206 textures.reset(textureConverter.getTextures(hwFrame.get()));
207 if (!textures) {
208 static thread_local int lastFormat = 0;
209 if (std::exchange(lastFormat, hwFrame->format) != hwFrame->format) // prevent logging spam
210 qWarning() << " failed to get textures for frame; format:" << hwFrame->format
211 << "textureConverter" << (textureConverter.isNull() ? "null" : "not null");
212 }
213 return {};
214}
215
217{
218 return textures ? textures->textureHandle(plane) : 0;
219}
220
222{
223 return m_pixelFormat;
224}
225
227{
228 return QSize(frame->width, frame->height);
229}
230
231QVideoFrameFormat::PixelFormat QFFmpegVideoBuffer::toQtPixelFormat(AVPixelFormat avPixelFormat, bool *needsConversion)
232{
233 if (needsConversion)
234 *needsConversion = false;
235
236 switch (avPixelFormat) {
237 default:
238 break;
239 case AV_PIX_FMT_NONE:
240 Q_ASSERT(!"Invalid avPixelFormat!");
242 case AV_PIX_FMT_ARGB:
244 case AV_PIX_FMT_0RGB:
246 case AV_PIX_FMT_BGRA:
248 case AV_PIX_FMT_BGR0:
250 case AV_PIX_FMT_ABGR:
252 case AV_PIX_FMT_0BGR:
254 case AV_PIX_FMT_RGBA:
256 case AV_PIX_FMT_RGB0:
258
259 case AV_PIX_FMT_YUV422P:
261 case AV_PIX_FMT_YUV420P:
263 case AV_PIX_FMT_YUV420P10:
265 case AV_PIX_FMT_UYVY422:
267 case AV_PIX_FMT_YUYV422:
269 case AV_PIX_FMT_NV12:
271 case AV_PIX_FMT_NV21:
273 case AV_PIX_FMT_GRAY8:
275 case AV_PIX_FMT_GRAY16:
277
278 case AV_PIX_FMT_P010:
280 case AV_PIX_FMT_P016:
282 case AV_PIX_FMT_MEDIACODEC:
284 }
285
286 if (needsConversion)
287 *needsConversion = true;
288
289 const AVPixFmtDescriptor *descriptor = av_pix_fmt_desc_get(avPixelFormat);
290
291 if (descriptor->flags & AV_PIX_FMT_FLAG_RGB)
293
294 if (descriptor->comp[0].depth > 8)
297}
298
300{
301 switch (pixelFormat) {
302 default:
311 return AV_PIX_FMT_NONE;
313 // We're using the data from the converted QImage here, which is in BGRA.
314 return AV_PIX_FMT_BGRA;
317 return AV_PIX_FMT_ARGB;
319 return AV_PIX_FMT_0RGB;
322 return AV_PIX_FMT_BGRA;
324 return AV_PIX_FMT_BGR0;
326 return AV_PIX_FMT_ABGR;
328 return AV_PIX_FMT_0BGR;
330 return AV_PIX_FMT_RGBA;
332 return AV_PIX_FMT_RGB0;
333
335 return AV_PIX_FMT_YUV422P;
337 return AV_PIX_FMT_YUV420P;
339 return AV_PIX_FMT_YUV420P10;
341 return AV_PIX_FMT_UYVY422;
343 return AV_PIX_FMT_YUYV422;
345 return AV_PIX_FMT_NV12;
347 return AV_PIX_FMT_NV21;
349 return AV_PIX_FMT_GRAY8;
351 return AV_PIX_FMT_GRAY16;
352
354 return AV_PIX_FMT_P010;
356 return AV_PIX_FMT_P016;
357
359 return AV_PIX_FMT_MEDIACODEC;
360 }
361}
362
The QAbstractVideoBuffer class is an abstraction for video data. \inmodule QtMultimedia.
QVideoFrame::HandleType m_type
QVideoFrameFormat::PixelFormat pixelFormat() const
QVideoFrameFormat::ColorSpace colorSpace() const
void unmap() override
Releases the memory mapped by the map() function.
QFFmpegVideoBuffer(AVFrameUPtr frame)
MapData map(QVideoFrame::MapMode mode) override
Independently maps the planes of a video buffer to memory.
QVideoFrameFormat::ColorTransfer colorTransfer() const
QFFmpeg::AVFrameUPtr AVFrameUPtr
void setTextureConverter(const QFFmpeg::TextureConverter &converter)
virtual std::unique_ptr< QVideoFrameTextures > mapTextures(QRhi *) override
virtual quint64 textureHandle(int plane) const override
Returns a texture handle to the data buffer.
static AVPixelFormat toAVPixelFormat(QVideoFrameFormat::PixelFormat pixelFormat)
QVideoFrameFormat::ColorRange colorRange() const
static QVideoFrameFormat::PixelFormat toQtPixelFormat(AVPixelFormat avPixelFormat, bool *needsConversion=nullptr)
~QFFmpegVideoBuffer() override
QVideoFrame::MapMode mapMode() const override
static AVPixelFormat format(AVFrame *frame)
TextureSet * getTextures(AVFrame *frame)
void init(AVFrame *frame)
\inmodule QtGui
Definition qrhi.h:1767
\inmodule QtCore
Definition qsize.h:25
ColorSpace
Enumerates the color space of video frames.
ColorTransfer
\value ColorTransfer_Unknown The color transfer function is unknown.
PixelFormat
Enumerates video data types.
ColorRange
Describes the color range used by the video data.
The QVideoFrame class represents a frame of video data.
Definition qvideoframe.h:26
MapMode
Enumerates how a video buffer's data is mapped to system memory.
Definition qvideoframe.h:36
AVFrameUPtr makeAVFrame()
Definition qffmpeg_p.h:119
std::optional< qint64 > mul(qint64 a, AVRational b)
Definition qffmpeg_p.h:31
Combined button and popup list for selecting options.
const TextureDescription * textureDescription(QVideoFrameFormat::PixelFormat format)
static QDBusError::ErrorType get(const char *name)
static bool isFrameFlipped(const AVFrame &frame)
#define qWarning
Definition qlogging.h:162
return ret
GLenum mode
GLuint const GLuint GLuint const GLuint * textures
GLint GLsizei GLsizei GLenum GLenum GLsizei void * data
const GLubyte * c
static QAbstractVideoBuffer::MapData mapData(const camera_frame_nv12_t &frame, unsigned char *baseAddress)
#define Q_ASSERT(cond)
Definition qrandom.cpp:47
@ desc
unsigned long long quint64
Definition qtypes.h:56
QFrame frame
[0]