1 /*
2 * This file is part of FFmpeg.
3 *
4 * FFmpeg is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2.1 of the License, or (at your option) any later version.
8 *
9 * FFmpeg is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
13 *
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with FFmpeg; if not, write to the Free Software
16 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 */
18
19 #include <stdatomic.h>
20 #include <stdint.h>
21 #include <string.h>
22
23 #include <mfx/mfxvideo.h>
24
25 #include "config.h"
26
27 #if HAVE_PTHREADS
28 #include <pthread.h>
29 #endif
30
31 #define COBJMACROS
32 #if CONFIG_VAAPI
33 #include "hwcontext_vaapi.h"
34 #endif
35 #if CONFIG_D3D11VA
36 #include "hwcontext_d3d11va.h"
37 #endif
38 #if CONFIG_DXVA2
39 #include "hwcontext_dxva2.h"
40 #endif
41
42 #include "buffer.h"
43 #include "common.h"
44 #include "hwcontext.h"
45 #include "hwcontext_internal.h"
46 #include "hwcontext_qsv.h"
47 #include "mem.h"
48 #include "pixfmt.h"
49 #include "pixdesc.h"
50 #include "time.h"
51 #include "imgutils.h"
52
53 #define QSV_VERSION_ATLEAST(MAJOR, MINOR) \
54 (MFX_VERSION_MAJOR > (MAJOR) || \
55 MFX_VERSION_MAJOR == (MAJOR) && MFX_VERSION_MINOR >= (MINOR))
56
57 #define MFX_IMPL_VIA_MASK(impl) (0x0f00 & (impl))
58
59 typedef struct QSVDevicePriv {
60 AVBufferRef *child_device_ctx;
61 } QSVDevicePriv;
62
63 typedef struct QSVDeviceContext {
64 mfxHDL handle;
65 mfxHandleType handle_type;
66 mfxVersion ver;
67 mfxIMPL impl;
68
69 enum AVHWDeviceType child_device_type;
70 enum AVPixelFormat child_pix_fmt;
71 } QSVDeviceContext;
72
73 typedef struct QSVFramesContext {
74 mfxSession session_download;
75 atomic_int session_download_init;
76 mfxSession session_upload;
77 atomic_int session_upload_init;
78 #if HAVE_PTHREADS
79 pthread_mutex_t session_lock;
80 #endif
81
82 AVBufferRef *child_frames_ref;
83 mfxFrameSurface1 *surfaces_internal;
84 mfxHDLPair *handle_pairs_internal;
85 int nb_surfaces_used;
86
87 // used in the frame allocator for non-opaque surfaces
88 mfxMemId *mem_ids;
89 // used in the opaque alloc request for opaque surfaces
90 mfxFrameSurface1 **surface_ptrs;
91
92 mfxExtOpaqueSurfaceAlloc opaque_alloc;
93 mfxExtBuffer *ext_buffers[1];
94 AVFrame realigned_upload_frame;
95 AVFrame realigned_download_frame;
96 } QSVFramesContext;
97
98 static const struct {
99 enum AVPixelFormat pix_fmt;
100 uint32_t fourcc;
101 } supported_pixel_formats[] = {
102 { AV_PIX_FMT_NV12, MFX_FOURCC_NV12 },
103 { AV_PIX_FMT_BGRA, MFX_FOURCC_RGB4 },
104 { AV_PIX_FMT_P010, MFX_FOURCC_P010 },
105 { AV_PIX_FMT_PAL8, MFX_FOURCC_P8 },
106 #if CONFIG_VAAPI
107 { AV_PIX_FMT_YUYV422,
108 MFX_FOURCC_YUY2 },
109 { AV_PIX_FMT_Y210,
110 MFX_FOURCC_Y210 },
111 #endif
112 };
113
114 extern int ff_qsv_get_surface_base_handle(mfxFrameSurface1 *surf,
115 enum AVHWDeviceType base_dev_type,
116 void **base_handle);
117
118 /**
119 * Caller needs to allocate enough space for base_handle pointer.
120 **/
ff_qsv_get_surface_base_handle(mfxFrameSurface1 *surf, enum AVHWDeviceType base_dev_type, void **base_handle)121 int ff_qsv_get_surface_base_handle(mfxFrameSurface1 *surf,
122 enum AVHWDeviceType base_dev_type,
123 void **base_handle)
124 {
125 mfxHDLPair *handle_pair;
126 handle_pair = surf->Data.MemId;
127 switch (base_dev_type) {
128 #if CONFIG_VAAPI
129 case AV_HWDEVICE_TYPE_VAAPI:
130 base_handle[0] = handle_pair->first;
131 return 0;
132 #endif
133 #if CONFIG_D3D11VA
134 case AV_HWDEVICE_TYPE_D3D11VA:
135 base_handle[0] = handle_pair->first;
136 base_handle[1] = handle_pair->second;
137 return 0;
138 #endif
139 #if CONFIG_DXVA2
140 case AV_HWDEVICE_TYPE_DXVA2:
141 base_handle[0] = handle_pair->first;
142 return 0;
143 #endif
144 }
145 return AVERROR(EINVAL);
146 }
147
qsv_fourcc_from_pix_fmt(enum AVPixelFormat pix_fmt)148 static uint32_t qsv_fourcc_from_pix_fmt(enum AVPixelFormat pix_fmt)
149 {
150 int i;
151 for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++) {
152 if (supported_pixel_formats[i].pix_fmt == pix_fmt)
153 return supported_pixel_formats[i].fourcc;
154 }
155 return 0;
156 }
157
158 #if CONFIG_D3D11VA
qsv_get_d3d11va_bind_flags(int mem_type)159 static uint32_t qsv_get_d3d11va_bind_flags(int mem_type)
160 {
161 uint32_t bind_flags = 0;
162
163 if ((mem_type & MFX_MEMTYPE_VIDEO_MEMORY_ENCODER_TARGET) && (mem_type & MFX_MEMTYPE_INTERNAL_FRAME))
164 bind_flags = D3D11_BIND_DECODER | D3D11_BIND_VIDEO_ENCODER;
165 else
166 bind_flags = D3D11_BIND_DECODER;
167
168 if ((MFX_MEMTYPE_FROM_VPPOUT & mem_type) || (MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET & mem_type))
169 bind_flags = D3D11_BIND_RENDER_TARGET;
170
171 return bind_flags;
172 }
173 #endif
174
qsv_fill_border(AVFrame *dst, const AVFrame *src)175 static int qsv_fill_border(AVFrame *dst, const AVFrame *src)
176 {
177 const AVPixFmtDescriptor *desc;
178 int i, planes_nb = 0;
179 if (dst->format != src->format)
180 return AVERROR(EINVAL);
181
182 desc = av_pix_fmt_desc_get(dst->format);
183
184 for (i = 0; i < desc->nb_components; i++)
185 planes_nb = FFMAX(planes_nb, desc->comp[i].plane + 1);
186
187 for (i = 0; i < planes_nb; i++) {
188 int sheight, dheight, y;
189 ptrdiff_t swidth = av_image_get_linesize(src->format,
190 src->width,
191 i);
192 ptrdiff_t dwidth = av_image_get_linesize(dst->format,
193 dst->width,
194 i);
195 const AVComponentDescriptor comp = desc->comp[i];
196 if (swidth < 0 || dwidth < 0) {
197 av_log(NULL, AV_LOG_ERROR, "av_image_get_linesize failed\n");
198 return AVERROR(EINVAL);
199 }
200 sheight = src->height;
201 dheight = dst->height;
202 if (i) {
203 sheight = AV_CEIL_RSHIFT(src->height, desc->log2_chroma_h);
204 dheight = AV_CEIL_RSHIFT(dst->height, desc->log2_chroma_h);
205 }
206 //fill right padding
207 for (y = 0; y < sheight; y++) {
208 void *line_ptr = dst->data[i] + y*dst->linesize[i] + swidth;
209 av_memcpy_backptr(line_ptr,
210 comp.depth > 8 ? 2 : 1,
211 dwidth - swidth);
212 }
213 //fill bottom padding
214 for (y = sheight; y < dheight; y++) {
215 memcpy(dst->data[i]+y*dst->linesize[i],
216 dst->data[i]+(sheight-1)*dst->linesize[i],
217 dwidth);
218 }
219 }
220 return 0;
221 }
222
qsv_device_init(AVHWDeviceContext *ctx)223 static int qsv_device_init(AVHWDeviceContext *ctx)
224 {
225 AVQSVDeviceContext *hwctx = ctx->hwctx;
226 QSVDeviceContext *s = ctx->internal->priv;
227 int hw_handle_supported = 0;
228 mfxHandleType handle_type;
229 enum AVHWDeviceType device_type;
230 enum AVPixelFormat pix_fmt;
231 mfxStatus err;
232
233 err = MFXQueryIMPL(hwctx->session, &s->impl);
234 if (err == MFX_ERR_NONE)
235 err = MFXQueryVersion(hwctx->session, &s->ver);
236 if (err != MFX_ERR_NONE) {
237 av_log(ctx, AV_LOG_ERROR, "Error querying the session attributes\n");
238 return AVERROR_UNKNOWN;
239 }
240
241 if (MFX_IMPL_VIA_VAAPI == MFX_IMPL_VIA_MASK(s->impl)) {
242 #if CONFIG_VAAPI
243 handle_type = MFX_HANDLE_VA_DISPLAY;
244 device_type = AV_HWDEVICE_TYPE_VAAPI;
245 pix_fmt = AV_PIX_FMT_VAAPI;
246 hw_handle_supported = 1;
247 #endif
248 } else if (MFX_IMPL_VIA_D3D11 == MFX_IMPL_VIA_MASK(s->impl)) {
249 #if CONFIG_D3D11VA
250 handle_type = MFX_HANDLE_D3D11_DEVICE;
251 device_type = AV_HWDEVICE_TYPE_D3D11VA;
252 pix_fmt = AV_PIX_FMT_D3D11;
253 hw_handle_supported = 1;
254 #endif
255 } else if (MFX_IMPL_VIA_D3D9 == MFX_IMPL_VIA_MASK(s->impl)) {
256 #if CONFIG_DXVA2
257 handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
258 device_type = AV_HWDEVICE_TYPE_DXVA2;
259 pix_fmt = AV_PIX_FMT_DXVA2_VLD;
260 hw_handle_supported = 1;
261 #endif
262 }
263
264 if (hw_handle_supported) {
265 err = MFXVideoCORE_GetHandle(hwctx->session, handle_type, &s->handle);
266 if (err == MFX_ERR_NONE) {
267 s->handle_type = handle_type;
268 s->child_device_type = device_type;
269 s->child_pix_fmt = pix_fmt;
270 }
271 }
272 if (!s->handle) {
273 av_log(ctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
274 "from the session\n");
275 }
276 return 0;
277 }
278
qsv_frames_uninit(AVHWFramesContext *ctx)279 static void qsv_frames_uninit(AVHWFramesContext *ctx)
280 {
281 QSVFramesContext *s = ctx->internal->priv;
282
283 if (s->session_download) {
284 MFXVideoVPP_Close(s->session_download);
285 MFXClose(s->session_download);
286 }
287 s->session_download = NULL;
288 s->session_download_init = 0;
289
290 if (s->session_upload) {
291 MFXVideoVPP_Close(s->session_upload);
292 MFXClose(s->session_upload);
293 }
294 s->session_upload = NULL;
295 s->session_upload_init = 0;
296
297 #if HAVE_PTHREADS
298 pthread_mutex_destroy(&s->session_lock);
299 #endif
300
301 av_freep(&s->mem_ids);
302 av_freep(&s->surface_ptrs);
303 av_freep(&s->surfaces_internal);
304 av_freep(&s->handle_pairs_internal);
305 av_frame_unref(&s->realigned_upload_frame);
306 av_frame_unref(&s->realigned_download_frame);
307 av_buffer_unref(&s->child_frames_ref);
308 }
309
qsv_pool_release_dummy(void *opaque, uint8_t *data)310 static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
311 {
312 }
313
qsv_pool_alloc(void *opaque, size_t size)314 static AVBufferRef *qsv_pool_alloc(void *opaque, size_t size)
315 {
316 AVHWFramesContext *ctx = (AVHWFramesContext*)opaque;
317 QSVFramesContext *s = ctx->internal->priv;
318 AVQSVFramesContext *hwctx = ctx->hwctx;
319
320 if (s->nb_surfaces_used < hwctx->nb_surfaces) {
321 s->nb_surfaces_used++;
322 return av_buffer_create((uint8_t*)(s->surfaces_internal + s->nb_surfaces_used - 1),
323 sizeof(*hwctx->surfaces), qsv_pool_release_dummy, NULL, 0);
324 }
325
326 return NULL;
327 }
328
qsv_init_child_ctx(AVHWFramesContext *ctx)329 static int qsv_init_child_ctx(AVHWFramesContext *ctx)
330 {
331 AVQSVFramesContext *hwctx = ctx->hwctx;
332 QSVFramesContext *s = ctx->internal->priv;
333 QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
334
335 AVBufferRef *child_device_ref = NULL;
336 AVBufferRef *child_frames_ref = NULL;
337
338 AVHWDeviceContext *child_device_ctx;
339 AVHWFramesContext *child_frames_ctx;
340
341 int i, ret = 0;
342
343 if (!device_priv->handle) {
344 av_log(ctx, AV_LOG_ERROR,
345 "Cannot create a non-opaque internal surface pool without "
346 "a hardware handle\n");
347 return AVERROR(EINVAL);
348 }
349
350 child_device_ref = av_hwdevice_ctx_alloc(device_priv->child_device_type);
351 if (!child_device_ref)
352 return AVERROR(ENOMEM);
353 child_device_ctx = (AVHWDeviceContext*)child_device_ref->data;
354
355 #if CONFIG_VAAPI
356 if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
357 AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
358 child_device_hwctx->display = (VADisplay)device_priv->handle;
359 }
360 #endif
361 #if CONFIG_D3D11VA
362 if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
363 AVD3D11VADeviceContext *child_device_hwctx = child_device_ctx->hwctx;
364 ID3D11Device_AddRef((ID3D11Device*)device_priv->handle);
365 child_device_hwctx->device = (ID3D11Device*)device_priv->handle;
366 }
367 #endif
368 #if CONFIG_DXVA2
369 if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
370 AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
371 child_device_hwctx->devmgr = (IDirect3DDeviceManager9*)device_priv->handle;
372 }
373 #endif
374
375 ret = av_hwdevice_ctx_init(child_device_ref);
376 if (ret < 0) {
377 av_log(ctx, AV_LOG_ERROR, "Error initializing a child device context\n");
378 goto fail;
379 }
380
381 child_frames_ref = av_hwframe_ctx_alloc(child_device_ref);
382 if (!child_frames_ref) {
383 ret = AVERROR(ENOMEM);
384 goto fail;
385 }
386 child_frames_ctx = (AVHWFramesContext*)child_frames_ref->data;
387
388 child_frames_ctx->format = device_priv->child_pix_fmt;
389 child_frames_ctx->sw_format = ctx->sw_format;
390 child_frames_ctx->initial_pool_size = ctx->initial_pool_size;
391 child_frames_ctx->width = FFALIGN(ctx->width, 16);
392 child_frames_ctx->height = FFALIGN(ctx->height, 16);
393
394 #if CONFIG_D3D11VA
395 if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
396 AVD3D11VAFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
397 if (hwctx->frame_type == 0)
398 hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
399 if (hwctx->frame_type & MFX_MEMTYPE_SHARED_RESOURCE)
400 child_frames_hwctx->MiscFlags = D3D11_RESOURCE_MISC_SHARED;
401 child_frames_hwctx->BindFlags = qsv_get_d3d11va_bind_flags(hwctx->frame_type);
402 }
403 #endif
404 #if CONFIG_DXVA2
405 if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
406 AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
407 if (hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)
408 child_frames_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
409 else
410 child_frames_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
411 }
412 #endif
413
414 ret = av_hwframe_ctx_init(child_frames_ref);
415 if (ret < 0) {
416 av_log(ctx, AV_LOG_ERROR, "Error initializing a child frames context\n");
417 goto fail;
418 }
419
420 #if CONFIG_VAAPI
421 if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
422 AVVAAPIFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
423 for (i = 0; i < ctx->initial_pool_size; i++) {
424 s->handle_pairs_internal[i].first = child_frames_hwctx->surface_ids + i;
425 s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
426 s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
427 }
428 hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
429 }
430 #endif
431 #if CONFIG_D3D11VA
432 if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
433 AVD3D11VAFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
434 for (i = 0; i < ctx->initial_pool_size; i++) {
435 s->handle_pairs_internal[i].first = (mfxMemId)child_frames_hwctx->texture_infos[i].texture;
436 if(child_frames_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
437 s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
438 } else {
439 s->handle_pairs_internal[i].second = (mfxMemId)child_frames_hwctx->texture_infos[i].index;
440 }
441 s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
442 }
443 if (child_frames_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
444 hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
445 } else {
446 hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
447 }
448 }
449 #endif
450 #if CONFIG_DXVA2
451 if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
452 AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
453 for (i = 0; i < ctx->initial_pool_size; i++) {
454 s->handle_pairs_internal[i].first = (mfxMemId)child_frames_hwctx->surfaces[i];
455 s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
456 s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
457 }
458 if (child_frames_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
459 hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
460 else
461 hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
462 }
463 #endif
464
465 s->child_frames_ref = child_frames_ref;
466 child_frames_ref = NULL;
467
468 fail:
469 av_buffer_unref(&child_device_ref);
470 av_buffer_unref(&child_frames_ref);
471 return ret;
472 }
473
qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf)474 static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf)
475 {
476 const AVPixFmtDescriptor *desc;
477 uint32_t fourcc;
478
479 desc = av_pix_fmt_desc_get(ctx->sw_format);
480 if (!desc)
481 return AVERROR(EINVAL);
482
483 fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
484 if (!fourcc)
485 return AVERROR(EINVAL);
486
487 surf->Info.BitDepthLuma = desc->comp[0].depth;
488 surf->Info.BitDepthChroma = desc->comp[0].depth;
489 surf->Info.Shift = desc->comp[0].depth > 8;
490
491 if (desc->log2_chroma_w && desc->log2_chroma_h)
492 surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
493 else if (desc->log2_chroma_w)
494 surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV422;
495 else
496 surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV444;
497
498 surf->Info.FourCC = fourcc;
499 surf->Info.Width = FFALIGN(ctx->width, 16);
500 surf->Info.CropW = ctx->width;
501 surf->Info.Height = FFALIGN(ctx->height, 16);
502 surf->Info.CropH = ctx->height;
503 surf->Info.FrameRateExtN = 25;
504 surf->Info.FrameRateExtD = 1;
505 surf->Info.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
506
507 return 0;
508 }
509
qsv_init_pool(AVHWFramesContext *ctx, uint32_t fourcc)510 static int qsv_init_pool(AVHWFramesContext *ctx, uint32_t fourcc)
511 {
512 QSVFramesContext *s = ctx->internal->priv;
513 AVQSVFramesContext *frames_hwctx = ctx->hwctx;
514
515 int i, ret = 0;
516
517 if (ctx->initial_pool_size <= 0) {
518 av_log(ctx, AV_LOG_ERROR, "QSV requires a fixed frame pool size\n");
519 return AVERROR(EINVAL);
520 }
521
522 s->handle_pairs_internal = av_calloc(ctx->initial_pool_size,
523 sizeof(*s->handle_pairs_internal));
524 if (!s->handle_pairs_internal)
525 return AVERROR(ENOMEM);
526
527 s->surfaces_internal = av_calloc(ctx->initial_pool_size,
528 sizeof(*s->surfaces_internal));
529 if (!s->surfaces_internal)
530 return AVERROR(ENOMEM);
531
532 for (i = 0; i < ctx->initial_pool_size; i++) {
533 ret = qsv_init_surface(ctx, &s->surfaces_internal[i]);
534 if (ret < 0)
535 return ret;
536 }
537
538 if (!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)) {
539 ret = qsv_init_child_ctx(ctx);
540 if (ret < 0)
541 return ret;
542 }
543
544 ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(mfxFrameSurface1),
545 ctx, qsv_pool_alloc, NULL);
546 if (!ctx->internal->pool_internal)
547 return AVERROR(ENOMEM);
548
549 frames_hwctx->surfaces = s->surfaces_internal;
550 frames_hwctx->nb_surfaces = ctx->initial_pool_size;
551
552 return 0;
553 }
554
frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req, mfxFrameAllocResponse *resp)555 static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
556 mfxFrameAllocResponse *resp)
557 {
558 AVHWFramesContext *ctx = pthis;
559 QSVFramesContext *s = ctx->internal->priv;
560 AVQSVFramesContext *hwctx = ctx->hwctx;
561 mfxFrameInfo *i = &req->Info;
562 mfxFrameInfo *i1 = &hwctx->surfaces[0].Info;
563
564 if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
565 !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
566 !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
567 return MFX_ERR_UNSUPPORTED;
568 if (i->Width > i1->Width || i->Height > i1->Height ||
569 i->FourCC != i1->FourCC || i->ChromaFormat != i1->ChromaFormat) {
570 av_log(ctx, AV_LOG_ERROR, "Mismatching surface properties in an "
571 "allocation request: %dx%d %d %d vs %dx%d %d %d\n",
572 i->Width, i->Height, i->FourCC, i->ChromaFormat,
573 i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
574 return MFX_ERR_UNSUPPORTED;
575 }
576
577 resp->mids = s->mem_ids;
578 resp->NumFrameActual = hwctx->nb_surfaces;
579
580 return MFX_ERR_NONE;
581 }
582
frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)583 static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
584 {
585 return MFX_ERR_NONE;
586 }
587
frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)588 static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
589 {
590 return MFX_ERR_UNSUPPORTED;
591 }
592
frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)593 static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
594 {
595 return MFX_ERR_UNSUPPORTED;
596 }
597
frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)598 static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
599 {
600 mfxHDLPair *pair_dst = (mfxHDLPair*)hdl;
601 mfxHDLPair *pair_src = (mfxHDLPair*)mid;
602
603 pair_dst->first = pair_src->first;
604
605 if (pair_src->second != (mfxMemId)MFX_INFINITE)
606 pair_dst->second = pair_src->second;
607 return MFX_ERR_NONE;
608 }
609
qsv_init_internal_session(AVHWFramesContext *ctx, mfxSession *session, int upload)610 static int qsv_init_internal_session(AVHWFramesContext *ctx,
611 mfxSession *session, int upload)
612 {
613 QSVFramesContext *s = ctx->internal->priv;
614 AVQSVFramesContext *frames_hwctx = ctx->hwctx;
615 QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
616 int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
617
618 mfxFrameAllocator frame_allocator = {
619 .pthis = ctx,
620 .Alloc = frame_alloc,
621 .Lock = frame_lock,
622 .Unlock = frame_unlock,
623 .GetHDL = frame_get_hdl,
624 .Free = frame_free,
625 };
626
627 mfxVideoParam par;
628 mfxStatus err;
629
630 err = MFXInit(device_priv->impl, &device_priv->ver, session);
631 if (err != MFX_ERR_NONE) {
632 av_log(ctx, AV_LOG_ERROR, "Error initializing an internal session\n");
633 return AVERROR_UNKNOWN;
634 }
635
636 if (device_priv->handle) {
637 err = MFXVideoCORE_SetHandle(*session, device_priv->handle_type,
638 device_priv->handle);
639 if (err != MFX_ERR_NONE)
640 return AVERROR_UNKNOWN;
641 }
642
643 if (!opaque) {
644 err = MFXVideoCORE_SetFrameAllocator(*session, &frame_allocator);
645 if (err != MFX_ERR_NONE)
646 return AVERROR_UNKNOWN;
647 }
648
649 memset(&par, 0, sizeof(par));
650
651 if (opaque) {
652 par.ExtParam = s->ext_buffers;
653 par.NumExtParam = FF_ARRAY_ELEMS(s->ext_buffers);
654 par.IOPattern = upload ? MFX_IOPATTERN_OUT_OPAQUE_MEMORY :
655 MFX_IOPATTERN_IN_OPAQUE_MEMORY;
656 } else {
657 par.IOPattern = upload ? MFX_IOPATTERN_OUT_VIDEO_MEMORY :
658 MFX_IOPATTERN_IN_VIDEO_MEMORY;
659 }
660
661 par.IOPattern |= upload ? MFX_IOPATTERN_IN_SYSTEM_MEMORY :
662 MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
663 par.AsyncDepth = 1;
664
665 par.vpp.In = frames_hwctx->surfaces[0].Info;
666
667 /* Apparently VPP requires the frame rate to be set to some value, otherwise
668 * init will fail (probably for the framerate conversion filter). Since we
669 * are only doing data upload/download here, we just invent an arbitrary
670 * value */
671 par.vpp.In.FrameRateExtN = 25;
672 par.vpp.In.FrameRateExtD = 1;
673 par.vpp.Out = par.vpp.In;
674
675 err = MFXVideoVPP_Init(*session, &par);
676 if (err != MFX_ERR_NONE) {
677 av_log(ctx, AV_LOG_VERBOSE, "Error opening the internal VPP session."
678 "Surface upload/download will not be possible\n");
679 MFXClose(*session);
680 *session = NULL;
681 }
682
683 return 0;
684 }
685
qsv_frames_init(AVHWFramesContext *ctx)686 static int qsv_frames_init(AVHWFramesContext *ctx)
687 {
688 QSVFramesContext *s = ctx->internal->priv;
689 AVQSVFramesContext *frames_hwctx = ctx->hwctx;
690
691 int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
692
693 uint32_t fourcc;
694 int i, ret;
695
696 fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
697 if (!fourcc) {
698 av_log(ctx, AV_LOG_ERROR, "Unsupported pixel format\n");
699 return AVERROR(ENOSYS);
700 }
701
702 if (!ctx->pool) {
703 ret = qsv_init_pool(ctx, fourcc);
704 if (ret < 0) {
705 av_log(ctx, AV_LOG_ERROR, "Error creating an internal frame pool\n");
706 return ret;
707 }
708 }
709
710 if (opaque) {
711 s->surface_ptrs = av_calloc(frames_hwctx->nb_surfaces,
712 sizeof(*s->surface_ptrs));
713 if (!s->surface_ptrs)
714 return AVERROR(ENOMEM);
715
716 for (i = 0; i < frames_hwctx->nb_surfaces; i++)
717 s->surface_ptrs[i] = frames_hwctx->surfaces + i;
718
719 s->opaque_alloc.In.Surfaces = s->surface_ptrs;
720 s->opaque_alloc.In.NumSurface = frames_hwctx->nb_surfaces;
721 s->opaque_alloc.In.Type = frames_hwctx->frame_type;
722
723 s->opaque_alloc.Out = s->opaque_alloc.In;
724
725 s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
726 s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
727
728 s->ext_buffers[0] = (mfxExtBuffer*)&s->opaque_alloc;
729 } else {
730 s->mem_ids = av_calloc(frames_hwctx->nb_surfaces, sizeof(*s->mem_ids));
731 if (!s->mem_ids)
732 return AVERROR(ENOMEM);
733
734 for (i = 0; i < frames_hwctx->nb_surfaces; i++)
735 s->mem_ids[i] = frames_hwctx->surfaces[i].Data.MemId;
736 }
737
738 s->session_download = NULL;
739 s->session_upload = NULL;
740
741 s->session_download_init = 0;
742 s->session_upload_init = 0;
743
744 #if HAVE_PTHREADS
745 pthread_mutex_init(&s->session_lock, NULL);
746 #endif
747
748 return 0;
749 }
750
qsv_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)751 static int qsv_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
752 {
753 frame->buf[0] = av_buffer_pool_get(ctx->pool);
754 if (!frame->buf[0])
755 return AVERROR(ENOMEM);
756
757 frame->data[3] = frame->buf[0]->data;
758 frame->format = AV_PIX_FMT_QSV;
759 frame->width = ctx->width;
760 frame->height = ctx->height;
761
762 return 0;
763 }
764
qsv_transfer_get_formats(AVHWFramesContext *ctx, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats)765 static int qsv_transfer_get_formats(AVHWFramesContext *ctx,
766 enum AVHWFrameTransferDirection dir,
767 enum AVPixelFormat **formats)
768 {
769 enum AVPixelFormat *fmts;
770
771 fmts = av_malloc_array(2, sizeof(*fmts));
772 if (!fmts)
773 return AVERROR(ENOMEM);
774
775 fmts[0] = ctx->sw_format;
776 fmts[1] = AV_PIX_FMT_NONE;
777
778 *formats = fmts;
779
780 return 0;
781 }
782
qsv_frames_derive_from(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)783 static int qsv_frames_derive_from(AVHWFramesContext *dst_ctx,
784 AVHWFramesContext *src_ctx, int flags)
785 {
786 AVQSVFramesContext *src_hwctx = src_ctx->hwctx;
787 int i;
788
789 switch (dst_ctx->device_ctx->type) {
790 #if CONFIG_VAAPI
791 case AV_HWDEVICE_TYPE_VAAPI:
792 {
793 AVVAAPIFramesContext *dst_hwctx = dst_ctx->hwctx;
794 dst_hwctx->surface_ids = av_calloc(src_hwctx->nb_surfaces,
795 sizeof(*dst_hwctx->surface_ids));
796 if (!dst_hwctx->surface_ids)
797 return AVERROR(ENOMEM);
798 for (i = 0; i < src_hwctx->nb_surfaces; i++) {
799 mfxHDLPair *pair = (mfxHDLPair*)src_hwctx->surfaces[i].Data.MemId;
800 dst_hwctx->surface_ids[i] = *(VASurfaceID*)pair->first;
801 }
802 dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
803 }
804 break;
805 #endif
806 #if CONFIG_D3D11VA
807 case AV_HWDEVICE_TYPE_D3D11VA:
808 {
809 D3D11_TEXTURE2D_DESC texDesc;
810 dst_ctx->initial_pool_size = src_ctx->initial_pool_size;
811 AVD3D11VAFramesContext *dst_hwctx = dst_ctx->hwctx;
812 dst_hwctx->texture_infos = av_calloc(src_hwctx->nb_surfaces,
813 sizeof(*dst_hwctx->texture_infos));
814 if (!dst_hwctx->texture_infos)
815 return AVERROR(ENOMEM);
816 if (src_hwctx->frame_type & MFX_MEMTYPE_SHARED_RESOURCE)
817 dst_hwctx->MiscFlags = D3D11_RESOURCE_MISC_SHARED;
818 for (i = 0; i < src_hwctx->nb_surfaces; i++) {
819 mfxHDLPair *pair = (mfxHDLPair*)src_hwctx->surfaces[i].Data.MemId;
820 dst_hwctx->texture_infos[i].texture = (ID3D11Texture2D*)pair->first;
821 dst_hwctx->texture_infos[i].index = pair->second == (mfxMemId)MFX_INFINITE ? (intptr_t)0 : (intptr_t)pair->second;
822 }
823 ID3D11Texture2D_GetDesc(dst_hwctx->texture_infos[0].texture, &texDesc);
824 dst_hwctx->BindFlags = texDesc.BindFlags;
825 }
826 break;
827 #endif
828 #if CONFIG_DXVA2
829 case AV_HWDEVICE_TYPE_DXVA2:
830 {
831 AVDXVA2FramesContext *dst_hwctx = dst_ctx->hwctx;
832 dst_hwctx->surfaces = av_calloc(src_hwctx->nb_surfaces,
833 sizeof(*dst_hwctx->surfaces));
834 if (!dst_hwctx->surfaces)
835 return AVERROR(ENOMEM);
836 for (i = 0; i < src_hwctx->nb_surfaces; i++) {
837 mfxHDLPair *pair = (mfxHDLPair*)src_hwctx->surfaces[i].Data.MemId;
838 dst_hwctx->surfaces[i] = (IDirect3DSurface9*)pair->first;
839 }
840 dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
841 if (src_hwctx->frame_type == MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET)
842 dst_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
843 else
844 dst_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
845 }
846 break;
847 #endif
848 default:
849 return AVERROR(ENOSYS);
850 }
851
852 return 0;
853 }
854
qsv_map_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src, int flags)855 static int qsv_map_from(AVHWFramesContext *ctx,
856 AVFrame *dst, const AVFrame *src, int flags)
857 {
858 QSVFramesContext *s = ctx->internal->priv;
859 mfxFrameSurface1 *surf = (mfxFrameSurface1*)src->data[3];
860 AVHWFramesContext *child_frames_ctx;
861 const AVPixFmtDescriptor *desc;
862 uint8_t *child_data;
863 AVFrame *dummy;
864 int ret = 0;
865
866 if (!s->child_frames_ref)
867 return AVERROR(ENOSYS);
868 child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
869
870 switch (child_frames_ctx->device_ctx->type) {
871 #if CONFIG_VAAPI
872 case AV_HWDEVICE_TYPE_VAAPI:
873 {
874 mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
875 /* pair->first is *VASurfaceID while data[3] in vaapi frame is VASurfaceID, so
876 * we need this casting for vaapi.
877 * Add intptr_t to force cast from VASurfaceID(uint) type to pointer(long) type
878 * to avoid compile warning */
879 child_data = (uint8_t*)(intptr_t)*(VASurfaceID*)pair->first;
880 break;
881 }
882 #endif
883 #if CONFIG_D3D11VA
884 case AV_HWDEVICE_TYPE_D3D11VA:
885 {
886 mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
887 child_data = pair->first;
888 break;
889 }
890 #endif
891 #if CONFIG_DXVA2
892 case AV_HWDEVICE_TYPE_DXVA2:
893 {
894 mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
895 child_data = pair->first;
896 break;
897 }
898 #endif
899 default:
900 return AVERROR(ENOSYS);
901 }
902
903 if (dst->format == child_frames_ctx->format) {
904 ret = ff_hwframe_map_create(s->child_frames_ref,
905 dst, src, NULL, NULL);
906 if (ret < 0)
907 return ret;
908
909 dst->width = src->width;
910 dst->height = src->height;
911
912 if (child_frames_ctx->device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
913 mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
914 dst->data[0] = pair->first;
915 dst->data[1] = pair->second == (mfxMemId)MFX_INFINITE ? (uint8_t *)0 : pair->second;
916 } else {
917 dst->data[3] = child_data;
918 }
919
920 return 0;
921 }
922
923 desc = av_pix_fmt_desc_get(dst->format);
924 if (desc && desc->flags & AV_PIX_FMT_FLAG_HWACCEL) {
925 // This only supports mapping to software.
926 return AVERROR(ENOSYS);
927 }
928
929 dummy = av_frame_alloc();
930 if (!dummy)
931 return AVERROR(ENOMEM);
932
933 dummy->buf[0] = av_buffer_ref(src->buf[0]);
934 dummy->hw_frames_ctx = av_buffer_ref(s->child_frames_ref);
935 if (!dummy->buf[0] || !dummy->hw_frames_ctx)
936 goto fail;
937
938 dummy->format = child_frames_ctx->format;
939 dummy->width = src->width;
940 dummy->height = src->height;
941
942 if (child_frames_ctx->device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
943 mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
944 dummy->data[0] = pair->first;
945 dummy->data[1] = pair->second == (mfxMemId)MFX_INFINITE ? (uint8_t *)0 : pair->second;
946 } else {
947 dummy->data[3] = child_data;
948 }
949
950 ret = av_hwframe_map(dst, dummy, flags);
951
952 fail:
953 av_frame_free(&dummy);
954
955 return ret;
956 }
957
qsv_transfer_data_child(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)958 static int qsv_transfer_data_child(AVHWFramesContext *ctx, AVFrame *dst,
959 const AVFrame *src)
960 {
961 QSVFramesContext *s = ctx->internal->priv;
962 AVHWFramesContext *child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
963 int download = !!src->hw_frames_ctx;
964 mfxFrameSurface1 *surf = (mfxFrameSurface1*)(download ? src->data[3] : dst->data[3]);
965
966 AVFrame *dummy;
967 int ret;
968
969 dummy = av_frame_alloc();
970 if (!dummy)
971 return AVERROR(ENOMEM);
972
973 dummy->format = child_frames_ctx->format;
974 dummy->width = src->width;
975 dummy->height = src->height;
976 dummy->buf[0] = download ? src->buf[0] : dst->buf[0];
977 dummy->data[3] = surf->Data.MemId;
978 dummy->hw_frames_ctx = s->child_frames_ref;
979
980 ret = download ? av_hwframe_transfer_data(dst, dummy, 0) :
981 av_hwframe_transfer_data(dummy, src, 0);
982
983 dummy->buf[0] = NULL;
984 dummy->data[3] = NULL;
985 dummy->hw_frames_ctx = NULL;
986
987 av_frame_free(&dummy);
988
989 return ret;
990 }
991
map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)992 static int map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
993 {
994 switch (frame->format) {
995 case AV_PIX_FMT_NV12:
996 case AV_PIX_FMT_P010:
997 surface->Data.Y = frame->data[0];
998 surface->Data.UV = frame->data[1];
999 break;
1000
1001 case AV_PIX_FMT_YUV420P:
1002 surface->Data.Y = frame->data[0];
1003 surface->Data.U = frame->data[1];
1004 surface->Data.V = frame->data[2];
1005 break;
1006
1007 case AV_PIX_FMT_BGRA:
1008 surface->Data.B = frame->data[0];
1009 surface->Data.G = frame->data[0] + 1;
1010 surface->Data.R = frame->data[0] + 2;
1011 surface->Data.A = frame->data[0] + 3;
1012 break;
1013 #if CONFIG_VAAPI
1014 case AV_PIX_FMT_YUYV422:
1015 surface->Data.Y = frame->data[0];
1016 surface->Data.U = frame->data[0] + 1;
1017 surface->Data.V = frame->data[0] + 3;
1018 break;
1019
1020 case AV_PIX_FMT_Y210:
1021 surface->Data.Y16 = (mfxU16 *)frame->data[0];
1022 surface->Data.U16 = (mfxU16 *)frame->data[0] + 1;
1023 surface->Data.V16 = (mfxU16 *)frame->data[0] + 3;
1024 break;
1025 #endif
1026 default:
1027 return MFX_ERR_UNSUPPORTED;
1028 }
1029 surface->Data.Pitch = frame->linesize[0];
1030 surface->Data.TimeStamp = frame->pts;
1031
1032 return 0;
1033 }
1034
qsv_internal_session_check_init(AVHWFramesContext *ctx, int upload)1035 static int qsv_internal_session_check_init(AVHWFramesContext *ctx, int upload)
1036 {
1037 QSVFramesContext *s = ctx->internal->priv;
1038 atomic_int *inited = upload ? &s->session_upload_init : &s->session_download_init;
1039 mfxSession *session = upload ? &s->session_upload : &s->session_download;
1040 int ret = 0;
1041
1042 if (atomic_load(inited))
1043 return 0;
1044
1045 #if HAVE_PTHREADS
1046 pthread_mutex_lock(&s->session_lock);
1047 #endif
1048
1049 if (!atomic_load(inited)) {
1050 ret = qsv_init_internal_session(ctx, session, upload);
1051 atomic_store(inited, 1);
1052 }
1053
1054 #if HAVE_PTHREADS
1055 pthread_mutex_unlock(&s->session_lock);
1056 #endif
1057
1058 return ret;
1059 }
1060
qsv_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)1061 static int qsv_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst,
1062 const AVFrame *src)
1063 {
1064 QSVFramesContext *s = ctx->internal->priv;
1065 mfxFrameSurface1 out = {{ 0 }};
1066 mfxFrameSurface1 *in = (mfxFrameSurface1*)src->data[3];
1067
1068 mfxSyncPoint sync = NULL;
1069 mfxStatus err;
1070 int ret = 0;
1071 /* download to temp frame if the output is not padded as libmfx requires */
1072 AVFrame *tmp_frame = &s->realigned_download_frame;
1073 AVFrame *dst_frame;
1074 int realigned = 0;
1075
1076 ret = qsv_internal_session_check_init(ctx, 0);
1077 if (ret < 0)
1078 return ret;
1079
1080 /* According to MSDK spec for mfxframeinfo, "Width must be a multiple of 16.
1081 * Height must be a multiple of 16 for progressive frame sequence and a
1082 * multiple of 32 otherwise.", so allign all frames to 16 before downloading. */
1083 if (dst->height & 15 || dst->linesize[0] & 15) {
1084 realigned = 1;
1085 if (tmp_frame->format != dst->format ||
1086 tmp_frame->width != FFALIGN(dst->linesize[0], 16) ||
1087 tmp_frame->height != FFALIGN(dst->height, 16)) {
1088 av_frame_unref(tmp_frame);
1089
1090 tmp_frame->format = dst->format;
1091 tmp_frame->width = FFALIGN(dst->linesize[0], 16);
1092 tmp_frame->height = FFALIGN(dst->height, 16);
1093 ret = av_frame_get_buffer(tmp_frame, 0);
1094 if (ret < 0)
1095 return ret;
1096 }
1097 }
1098
1099 dst_frame = realigned ? tmp_frame : dst;
1100
1101 if (!s->session_download) {
1102 if (s->child_frames_ref)
1103 return qsv_transfer_data_child(ctx, dst_frame, src);
1104
1105 av_log(ctx, AV_LOG_ERROR, "Surface download not possible\n");
1106 return AVERROR(ENOSYS);
1107 }
1108
1109 out.Info = in->Info;
1110 map_frame_to_surface(dst_frame, &out);
1111
1112 do {
1113 err = MFXVideoVPP_RunFrameVPPAsync(s->session_download, in, &out, NULL, &sync);
1114 if (err == MFX_WRN_DEVICE_BUSY)
1115 av_usleep(1);
1116 } while (err == MFX_WRN_DEVICE_BUSY);
1117
1118 if (err < 0 || !sync) {
1119 av_log(ctx, AV_LOG_ERROR, "Error downloading the surface\n");
1120 return AVERROR_UNKNOWN;
1121 }
1122
1123 do {
1124 err = MFXVideoCORE_SyncOperation(s->session_download, sync, 1000);
1125 } while (err == MFX_WRN_IN_EXECUTION);
1126 if (err < 0) {
1127 av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation: %d\n", err);
1128 return AVERROR_UNKNOWN;
1129 }
1130
1131 if (realigned) {
1132 tmp_frame->width = dst->width;
1133 tmp_frame->height = dst->height;
1134 ret = av_frame_copy(dst, tmp_frame);
1135 tmp_frame->width = FFALIGN(dst->linesize[0], 16);
1136 tmp_frame->height = FFALIGN(dst->height, 16);
1137 if (ret < 0)
1138 return ret;
1139 }
1140
1141 return 0;
1142 }
1143
qsv_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)1144 static int qsv_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst,
1145 const AVFrame *src)
1146 {
1147 QSVFramesContext *s = ctx->internal->priv;
1148 mfxFrameSurface1 in = {{ 0 }};
1149 mfxFrameSurface1 *out = (mfxFrameSurface1*)dst->data[3];
1150 mfxFrameInfo tmp_info;
1151
1152 mfxSyncPoint sync = NULL;
1153 mfxStatus err;
1154 int ret = 0;
1155 /* make a copy if the input is not padded as libmfx requires */
1156 AVFrame *tmp_frame = &s->realigned_upload_frame;
1157 const AVFrame *src_frame;
1158 int realigned = 0;
1159
1160 ret = qsv_internal_session_check_init(ctx, 1);
1161 if (ret < 0)
1162 return ret;
1163
1164 /* According to MSDK spec for mfxframeinfo, "Width must be a multiple of 16.
1165 * Height must be a multiple of 16 for progressive frame sequence and a
1166 * multiple of 32 otherwise.", so allign all frames to 16 before uploading. */
1167 if (src->height & 15 || src->linesize[0] & 15) {
1168 realigned = 1;
1169 if (tmp_frame->format != src->format ||
1170 tmp_frame->width != FFALIGN(src->width, 16) ||
1171 tmp_frame->height != FFALIGN(src->height, 16)) {
1172 av_frame_unref(tmp_frame);
1173
1174 tmp_frame->format = src->format;
1175 tmp_frame->width = FFALIGN(src->width, 16);
1176 tmp_frame->height = FFALIGN(src->height, 16);
1177 ret = av_frame_get_buffer(tmp_frame, 0);
1178 if (ret < 0)
1179 return ret;
1180 }
1181 ret = av_frame_copy(tmp_frame, src);
1182 if (ret < 0) {
1183 av_frame_unref(tmp_frame);
1184 return ret;
1185 }
1186 ret = qsv_fill_border(tmp_frame, src);
1187 if (ret < 0) {
1188 av_frame_unref(tmp_frame);
1189 return ret;
1190 }
1191
1192 tmp_info = out->Info;
1193 out->Info.CropW = FFMIN(out->Info.Width, tmp_frame->width);
1194 out->Info.CropH = FFMIN(out->Info.Height, tmp_frame->height);
1195 }
1196
1197 src_frame = realigned ? tmp_frame : src;
1198
1199 if (!s->session_upload) {
1200 if (s->child_frames_ref)
1201 return qsv_transfer_data_child(ctx, dst, src_frame);
1202
1203 av_log(ctx, AV_LOG_ERROR, "Surface upload not possible\n");
1204 return AVERROR(ENOSYS);
1205 }
1206
1207 in.Info = out->Info;
1208 map_frame_to_surface(src_frame, &in);
1209
1210 do {
1211 err = MFXVideoVPP_RunFrameVPPAsync(s->session_upload, &in, out, NULL, &sync);
1212 if (err == MFX_WRN_DEVICE_BUSY)
1213 av_usleep(1);
1214 } while (err == MFX_WRN_DEVICE_BUSY);
1215
1216 if (err < 0 || !sync) {
1217 av_log(ctx, AV_LOG_ERROR, "Error uploading the surface\n");
1218 return AVERROR_UNKNOWN;
1219 }
1220
1221 do {
1222 err = MFXVideoCORE_SyncOperation(s->session_upload, sync, 1000);
1223 } while (err == MFX_WRN_IN_EXECUTION);
1224 if (err < 0) {
1225 av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation\n");
1226 return AVERROR_UNKNOWN;
1227 }
1228
1229 if (realigned) {
1230 out->Info.CropW = tmp_info.CropW;
1231 out->Info.CropH = tmp_info.CropH;
1232 }
1233
1234 return 0;
1235 }
1236
qsv_frames_derive_to(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)1237 static int qsv_frames_derive_to(AVHWFramesContext *dst_ctx,
1238 AVHWFramesContext *src_ctx, int flags)
1239 {
1240 QSVFramesContext *s = dst_ctx->internal->priv;
1241 AVQSVFramesContext *dst_hwctx = dst_ctx->hwctx;
1242 int i;
1243
1244 if (src_ctx->initial_pool_size == 0) {
1245 av_log(dst_ctx, AV_LOG_ERROR, "Only fixed-size pools can be "
1246 "mapped to QSV frames.\n");
1247 return AVERROR(EINVAL);
1248 }
1249
1250 switch (src_ctx->device_ctx->type) {
1251 #if CONFIG_VAAPI
1252 case AV_HWDEVICE_TYPE_VAAPI:
1253 {
1254 AVVAAPIFramesContext *src_hwctx = src_ctx->hwctx;
1255 s->handle_pairs_internal = av_calloc(src_ctx->initial_pool_size,
1256 sizeof(*s->handle_pairs_internal));
1257 if (!s->handle_pairs_internal)
1258 return AVERROR(ENOMEM);
1259 s->surfaces_internal = av_calloc(src_hwctx->nb_surfaces,
1260 sizeof(*s->surfaces_internal));
1261 if (!s->surfaces_internal)
1262 return AVERROR(ENOMEM);
1263 for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1264 qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
1265 s->handle_pairs_internal[i].first = src_hwctx->surface_ids + i;
1266 s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
1267 s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
1268 }
1269 dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
1270 dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
1271 }
1272 break;
1273 #endif
1274 #if CONFIG_D3D11VA
1275 case AV_HWDEVICE_TYPE_D3D11VA:
1276 {
1277 AVD3D11VAFramesContext *src_hwctx = src_ctx->hwctx;
1278 s->handle_pairs_internal = av_calloc(src_ctx->initial_pool_size,
1279 sizeof(*s->handle_pairs_internal));
1280 if (!s->handle_pairs_internal)
1281 return AVERROR(ENOMEM);
1282 s->surfaces_internal = av_calloc(src_ctx->initial_pool_size,
1283 sizeof(*s->surfaces_internal));
1284 if (!s->surfaces_internal)
1285 return AVERROR(ENOMEM);
1286 for (i = 0; i < src_ctx->initial_pool_size; i++) {
1287 qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
1288 s->handle_pairs_internal[i].first = (mfxMemId)src_hwctx->texture_infos[i].texture;
1289 if (src_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
1290 s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
1291 } else {
1292 s->handle_pairs_internal[i].second = (mfxMemId)src_hwctx->texture_infos[i].index;
1293 }
1294 s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
1295 }
1296 dst_hwctx->nb_surfaces = src_ctx->initial_pool_size;
1297 if (src_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
1298 dst_hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
1299 } else {
1300 dst_hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
1301 }
1302 }
1303 break;
1304 #endif
1305 #if CONFIG_DXVA2
1306 case AV_HWDEVICE_TYPE_DXVA2:
1307 {
1308 AVDXVA2FramesContext *src_hwctx = src_ctx->hwctx;
1309 s->handle_pairs_internal = av_calloc(src_ctx->initial_pool_size,
1310 sizeof(*s->handle_pairs_internal));
1311 if (!s->handle_pairs_internal)
1312 return AVERROR(ENOMEM);
1313 s->surfaces_internal = av_calloc(src_hwctx->nb_surfaces,
1314 sizeof(*s->surfaces_internal));
1315 if (!s->surfaces_internal)
1316 return AVERROR(ENOMEM);
1317 for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1318 qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
1319 s->handle_pairs_internal[i].first = (mfxMemId)src_hwctx->surfaces[i];
1320 s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
1321 s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
1322 }
1323 dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
1324 if (src_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
1325 dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
1326 else
1327 dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
1328 }
1329 break;
1330 #endif
1331 default:
1332 return AVERROR(ENOSYS);
1333 }
1334
1335 dst_hwctx->surfaces = s->surfaces_internal;
1336
1337 return 0;
1338 }
1339
qsv_map_to(AVHWFramesContext *dst_ctx, AVFrame *dst, const AVFrame *src, int flags)1340 static int qsv_map_to(AVHWFramesContext *dst_ctx,
1341 AVFrame *dst, const AVFrame *src, int flags)
1342 {
1343 AVQSVFramesContext *hwctx = dst_ctx->hwctx;
1344 int i, err, index = -1;
1345
1346 for (i = 0; i < hwctx->nb_surfaces && index < 0; i++) {
1347 switch(src->format) {
1348 #if CONFIG_VAAPI
1349 case AV_PIX_FMT_VAAPI:
1350 {
1351 mfxHDLPair *pair = (mfxHDLPair*)hwctx->surfaces[i].Data.MemId;
1352 if (*(VASurfaceID*)pair->first == (VASurfaceID)src->data[3]) {
1353 index = i;
1354 break;
1355 }
1356 }
1357 #endif
1358 #if CONFIG_D3D11VA
1359 case AV_PIX_FMT_D3D11:
1360 {
1361 mfxHDLPair *pair = (mfxHDLPair*)hwctx->surfaces[i].Data.MemId;
1362 if (pair->first == src->data[0]
1363 && (pair->second == src->data[1]
1364 || (pair->second == (mfxMemId)MFX_INFINITE && src->data[1] == (uint8_t *)0))) {
1365 index = i;
1366 break;
1367 }
1368 }
1369 #endif
1370 #if CONFIG_DXVA2
1371 case AV_PIX_FMT_DXVA2_VLD:
1372 {
1373 mfxHDLPair *pair = (mfxHDLPair*)hwctx->surfaces[i].Data.MemId;
1374 if (pair->first == src->data[3]) {
1375 index = i;
1376 break;
1377 }
1378 }
1379 #endif
1380 }
1381 }
1382 if (index < 0) {
1383 av_log(dst_ctx, AV_LOG_ERROR, "Trying to map from a surface which "
1384 "is not in the mapped frames context.\n");
1385 return AVERROR(EINVAL);
1386 }
1387
1388 err = ff_hwframe_map_create(dst->hw_frames_ctx,
1389 dst, src, NULL, NULL);
1390 if (err)
1391 return err;
1392
1393 dst->width = src->width;
1394 dst->height = src->height;
1395 dst->data[3] = (uint8_t*)&hwctx->surfaces[index];
1396
1397 return 0;
1398 }
1399
qsv_frames_get_constraints(AVHWDeviceContext *ctx, const void *hwconfig, AVHWFramesConstraints *constraints)1400 static int qsv_frames_get_constraints(AVHWDeviceContext *ctx,
1401 const void *hwconfig,
1402 AVHWFramesConstraints *constraints)
1403 {
1404 int i;
1405
1406 constraints->valid_sw_formats = av_malloc_array(FF_ARRAY_ELEMS(supported_pixel_formats) + 1,
1407 sizeof(*constraints->valid_sw_formats));
1408 if (!constraints->valid_sw_formats)
1409 return AVERROR(ENOMEM);
1410
1411 for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++)
1412 constraints->valid_sw_formats[i] = supported_pixel_formats[i].pix_fmt;
1413 constraints->valid_sw_formats[FF_ARRAY_ELEMS(supported_pixel_formats)] = AV_PIX_FMT_NONE;
1414
1415 constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
1416 if (!constraints->valid_hw_formats)
1417 return AVERROR(ENOMEM);
1418
1419 constraints->valid_hw_formats[0] = AV_PIX_FMT_QSV;
1420 constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
1421
1422 return 0;
1423 }
1424
qsv_device_free(AVHWDeviceContext *ctx)1425 static void qsv_device_free(AVHWDeviceContext *ctx)
1426 {
1427 AVQSVDeviceContext *hwctx = ctx->hwctx;
1428 QSVDevicePriv *priv = ctx->user_opaque;
1429
1430 if (hwctx->session)
1431 MFXClose(hwctx->session);
1432
1433 av_buffer_unref(&priv->child_device_ctx);
1434 av_freep(&priv);
1435 }
1436
choose_implementation(const char *device, enum AVHWDeviceType child_device_type)1437 static mfxIMPL choose_implementation(const char *device, enum AVHWDeviceType child_device_type)
1438 {
1439 static const struct {
1440 const char *name;
1441 mfxIMPL impl;
1442 } impl_map[] = {
1443 { "auto", MFX_IMPL_AUTO },
1444 { "sw", MFX_IMPL_SOFTWARE },
1445 { "hw", MFX_IMPL_HARDWARE },
1446 { "auto_any", MFX_IMPL_AUTO_ANY },
1447 { "hw_any", MFX_IMPL_HARDWARE_ANY },
1448 { "hw2", MFX_IMPL_HARDWARE2 },
1449 { "hw3", MFX_IMPL_HARDWARE3 },
1450 { "hw4", MFX_IMPL_HARDWARE4 },
1451 };
1452
1453 mfxIMPL impl = MFX_IMPL_AUTO_ANY;
1454 int i;
1455
1456 if (device) {
1457 for (i = 0; i < FF_ARRAY_ELEMS(impl_map); i++)
1458 if (!strcmp(device, impl_map[i].name)) {
1459 impl = impl_map[i].impl;
1460 break;
1461 }
1462 if (i == FF_ARRAY_ELEMS(impl_map))
1463 impl = strtol(device, NULL, 0);
1464 }
1465
1466 if (impl != MFX_IMPL_SOFTWARE) {
1467 if (child_device_type == AV_HWDEVICE_TYPE_D3D11VA)
1468 impl |= MFX_IMPL_VIA_D3D11;
1469 else if (child_device_type == AV_HWDEVICE_TYPE_DXVA2)
1470 impl |= MFX_IMPL_VIA_D3D9;
1471 }
1472
1473 return impl;
1474 }
1475
qsv_device_derive_from_child(AVHWDeviceContext *ctx, mfxIMPL implementation, AVHWDeviceContext *child_device_ctx, int flags)1476 static int qsv_device_derive_from_child(AVHWDeviceContext *ctx,
1477 mfxIMPL implementation,
1478 AVHWDeviceContext *child_device_ctx,
1479 int flags)
1480 {
1481 AVQSVDeviceContext *hwctx = ctx->hwctx;
1482
1483 mfxVersion ver = { { 3, 1 } };
1484 mfxHDL handle;
1485 mfxHandleType handle_type;
1486 mfxStatus err;
1487 int ret;
1488
1489 switch (child_device_ctx->type) {
1490 #if CONFIG_VAAPI
1491 case AV_HWDEVICE_TYPE_VAAPI:
1492 {
1493 AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
1494 handle_type = MFX_HANDLE_VA_DISPLAY;
1495 handle = (mfxHDL)child_device_hwctx->display;
1496 }
1497 break;
1498 #endif
1499 #if CONFIG_D3D11VA
1500 case AV_HWDEVICE_TYPE_D3D11VA:
1501 {
1502 AVD3D11VADeviceContext *child_device_hwctx = child_device_ctx->hwctx;
1503 handle_type = MFX_HANDLE_D3D11_DEVICE;
1504 handle = (mfxHDL)child_device_hwctx->device;
1505 }
1506 break;
1507 #endif
1508 #if CONFIG_DXVA2
1509 case AV_HWDEVICE_TYPE_DXVA2:
1510 {
1511 AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
1512 handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
1513 handle = (mfxHDL)child_device_hwctx->devmgr;
1514 }
1515 break;
1516 #endif
1517 default:
1518 ret = AVERROR(ENOSYS);
1519 goto fail;
1520 }
1521
1522 err = MFXInit(implementation, &ver, &hwctx->session);
1523 if (err != MFX_ERR_NONE) {
1524 av_log(ctx, AV_LOG_ERROR, "Error initializing an MFX session: "
1525 "%d.\n", err);
1526 ret = AVERROR_UNKNOWN;
1527 goto fail;
1528 }
1529
1530 err = MFXQueryVersion(hwctx->session, &ver);
1531 if (err != MFX_ERR_NONE) {
1532 av_log(ctx, AV_LOG_ERROR, "Error querying an MFX session: %d.\n", err);
1533 ret = AVERROR_UNKNOWN;
1534 goto fail;
1535 }
1536
1537 av_log(ctx, AV_LOG_VERBOSE,
1538 "Initialize MFX session: API version is %d.%d, implementation version is %d.%d\n",
1539 MFX_VERSION_MAJOR, MFX_VERSION_MINOR, ver.Major, ver.Minor);
1540
1541 MFXClose(hwctx->session);
1542
1543 err = MFXInit(implementation, &ver, &hwctx->session);
1544 if (err != MFX_ERR_NONE) {
1545 av_log(ctx, AV_LOG_ERROR,
1546 "Error initializing an MFX session: %d.\n", err);
1547 ret = AVERROR_UNKNOWN;
1548 goto fail;
1549 }
1550
1551 err = MFXVideoCORE_SetHandle(hwctx->session, handle_type, handle);
1552 if (err != MFX_ERR_NONE) {
1553 av_log(ctx, AV_LOG_ERROR, "Error setting child device handle: "
1554 "%d\n", err);
1555 ret = AVERROR_UNKNOWN;
1556 goto fail;
1557 }
1558
1559 return 0;
1560
1561 fail:
1562 if (hwctx->session)
1563 MFXClose(hwctx->session);
1564 return ret;
1565 }
1566
qsv_device_derive(AVHWDeviceContext *ctx, AVHWDeviceContext *child_device_ctx, AVDictionary *opts, int flags)1567 static int qsv_device_derive(AVHWDeviceContext *ctx,
1568 AVHWDeviceContext *child_device_ctx,
1569 AVDictionary *opts, int flags)
1570 {
1571 mfxIMPL impl;
1572 impl = choose_implementation("hw_any", child_device_ctx->type);
1573 return qsv_device_derive_from_child(ctx, impl,
1574 child_device_ctx, flags);
1575 }
1576
qsv_device_create(AVHWDeviceContext *ctx, const char *device, AVDictionary *opts, int flags)1577 static int qsv_device_create(AVHWDeviceContext *ctx, const char *device,
1578 AVDictionary *opts, int flags)
1579 {
1580 QSVDevicePriv *priv;
1581 enum AVHWDeviceType child_device_type;
1582 AVHWDeviceContext *child_device;
1583 AVDictionary *child_device_opts;
1584 AVDictionaryEntry *e;
1585
1586 mfxIMPL impl;
1587 int ret;
1588
1589 priv = av_mallocz(sizeof(*priv));
1590 if (!priv)
1591 return AVERROR(ENOMEM);
1592
1593 ctx->user_opaque = priv;
1594 ctx->free = qsv_device_free;
1595
1596 e = av_dict_get(opts, "child_device_type", NULL, 0);
1597 if (e) {
1598 child_device_type = av_hwdevice_find_type_by_name(e->value);
1599 if (child_device_type == AV_HWDEVICE_TYPE_NONE) {
1600 av_log(ctx, AV_LOG_ERROR, "Unknown child device type "
1601 "\"%s\".\n", e->value);
1602 return AVERROR(EINVAL);
1603 }
1604 } else if (CONFIG_VAAPI) {
1605 child_device_type = AV_HWDEVICE_TYPE_VAAPI;
1606 } else if (CONFIG_DXVA2) {
1607 av_log(NULL, AV_LOG_WARNING,
1608 "WARNING: defaulting child_device_type to AV_HWDEVICE_TYPE_DXVA2 for compatibility "
1609 "with old commandlines. This behaviour will be removed "
1610 "in the future. Please explicitly set device type via \"-init_hw_device\" option.\n");
1611 child_device_type = AV_HWDEVICE_TYPE_DXVA2;
1612 } else if (CONFIG_D3D11VA) {
1613 child_device_type = AV_HWDEVICE_TYPE_D3D11VA;
1614 } else {
1615 av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
1616 return AVERROR(ENOSYS);
1617 }
1618
1619 child_device_opts = NULL;
1620 switch (child_device_type) {
1621 #if CONFIG_VAAPI
1622 case AV_HWDEVICE_TYPE_VAAPI:
1623 {
1624 // libmfx does not actually implement VAAPI properly, rather it
1625 // depends on the specific behaviour of a matching iHD driver when
1626 // used on recent Intel hardware. Set options to the VAAPI device
1627 // creation so that we should pick a usable setup by default if
1628 // possible, even when multiple devices and drivers are available.
1629 av_dict_set(&child_device_opts, "kernel_driver", "i915", 0);
1630 av_dict_set(&child_device_opts, "driver", "iHD", 0);
1631 }
1632 break;
1633 #endif
1634 #if CONFIG_D3D11VA
1635 case AV_HWDEVICE_TYPE_D3D11VA:
1636 break;
1637 #endif
1638 #if CONFIG_DXVA2
1639 case AV_HWDEVICE_TYPE_DXVA2:
1640 break;
1641 #endif
1642 default:
1643 {
1644 av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
1645 return AVERROR(ENOSYS);
1646 }
1647 break;
1648 }
1649
1650 e = av_dict_get(opts, "child_device", NULL, 0);
1651 ret = av_hwdevice_ctx_create(&priv->child_device_ctx, child_device_type,
1652 e ? e->value : NULL, child_device_opts, 0);
1653
1654 av_dict_free(&child_device_opts);
1655 if (ret < 0)
1656 return ret;
1657
1658 child_device = (AVHWDeviceContext*)priv->child_device_ctx->data;
1659
1660 impl = choose_implementation(device, child_device_type);
1661
1662 return qsv_device_derive_from_child(ctx, impl, child_device, 0);
1663 }
1664
1665 const HWContextType ff_hwcontext_type_qsv = {
1666 .type = AV_HWDEVICE_TYPE_QSV,
1667 .name = "QSV",
1668
1669 .device_hwctx_size = sizeof(AVQSVDeviceContext),
1670 .device_priv_size = sizeof(QSVDeviceContext),
1671 .frames_hwctx_size = sizeof(AVQSVFramesContext),
1672 .frames_priv_size = sizeof(QSVFramesContext),
1673
1674 .device_create = qsv_device_create,
1675 .device_derive = qsv_device_derive,
1676 .device_init = qsv_device_init,
1677 .frames_get_constraints = qsv_frames_get_constraints,
1678 .frames_init = qsv_frames_init,
1679 .frames_uninit = qsv_frames_uninit,
1680 .frames_get_buffer = qsv_get_buffer,
1681 .transfer_get_formats = qsv_transfer_get_formats,
1682 .transfer_data_to = qsv_transfer_data_to,
1683 .transfer_data_from = qsv_transfer_data_from,
1684 .map_to = qsv_map_to,
1685 .map_from = qsv_map_from,
1686 .frames_derive_to = qsv_frames_derive_to,
1687 .frames_derive_from = qsv_frames_derive_from,
1688
1689 .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_QSV, AV_PIX_FMT_NONE },
1690 };
1691