FFmpeg
hwcontext_amf.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "mem.h"
20 #include "buffer.h"
21 #include "pixfmt.h"
22 #include "pixdesc.h"
23 #include "imgutils.h"
24 #include "hwcontext.h"
25 #include "hwcontext_amf.h"
26 #include "hwcontext_internal.h"
27 #include "hwcontext_amf_internal.h"
28 
29 #include "libavutil/thread.h"
30 #include "libavutil/avassert.h"
31 
32 #include <AMF/core/Surface.h>
33 #include <AMF/core/Trace.h>
34 
35 #if CONFIG_VULKAN
36 #include "hwcontext_vulkan.h"
37 #endif
38 #if CONFIG_D3D11VA
40 #endif
41 #if CONFIG_D3D12VA
43 #endif
44 #if CONFIG_DXVA2
45 #define COBJMACROS
47 #endif
48 #ifdef _WIN32
49 #include "compat/w32dlfcn.h"
50 #else
51 #include <dlfcn.h>
52 #endif
53 #define FFMPEG_AMF_WRITER_ID L"ffmpeg_amf"
54 
55 static void amf_lock_default(void *opaque)
56 {
57  ff_mutex_lock((AVMutex*)opaque);
58 }
59 
60 static void amf_unlock_default(void *opaque)
61 {
62  ff_mutex_unlock((AVMutex*)opaque);
63 }
64 
65 typedef struct AmfTraceWriter {
66  AMFTraceWriterVtbl *vtblp;
67  void *avctx;
68  AMFTraceWriterVtbl vtbl;
70 
71 static void AMF_CDECL_CALL AMFTraceWriter_Write(AMFTraceWriter *pThis,
72  const wchar_t *scope, const wchar_t *message)
73 {
74  AmfTraceWriter *tracer = (AmfTraceWriter*)pThis;
75  av_log(tracer->avctx, AV_LOG_DEBUG, "%ls: %ls", scope, message); // \n is provided from AMF
76 }
77 
78 static void AMF_CDECL_CALL AMFTraceWriter_Flush(AMFTraceWriter *pThis)
79 {
80 }
81 
82 static AmfTraceWriter * amf_writer_alloc(void *avctx)
83 {
84  AmfTraceWriter * writer = av_mallocz(sizeof(AmfTraceWriter));
85  if (!writer)
86  return NULL;
87 
88  writer->vtblp = &writer->vtbl;
89  writer->vtblp->Write = AMFTraceWriter_Write;
90  writer->vtblp->Flush = AMFTraceWriter_Flush;
91  writer->avctx = avctx;
92 
93  return writer;
94 }
95 
96 static void amf_writer_free(void *opaque)
97 {
98  AmfTraceWriter *writer = (AmfTraceWriter *)opaque;
99  av_freep(&writer);
100 }
101 
102 /**
103  * We still need AVHWFramesContext to utilize our hardware memory
104  * otherwise, we will receive the error "HW format requires hw_frames_ctx to be non-NULL".
105  * (libavfilter\buffersrc.c function query_formats)
106 */
107 typedef struct {
108  void *dummy;
110 
111 typedef struct AVAMFFormatMap {
113  enum AMF_SURFACE_FORMAT amf_format;
114 } FormatMap;
115 
116 const FormatMap format_map[] =
117 {
118  { AV_PIX_FMT_NONE, AMF_SURFACE_UNKNOWN },
119  { AV_PIX_FMT_NV12, AMF_SURFACE_NV12 },
120  { AV_PIX_FMT_BGR0, AMF_SURFACE_BGRA },
121  { AV_PIX_FMT_RGB0, AMF_SURFACE_RGBA },
122  { AV_PIX_FMT_BGRA, AMF_SURFACE_BGRA },
123  { AV_PIX_FMT_ARGB, AMF_SURFACE_ARGB },
124  { AV_PIX_FMT_RGBA, AMF_SURFACE_RGBA },
125  { AV_PIX_FMT_GRAY8, AMF_SURFACE_GRAY8 },
126  { AV_PIX_FMT_YUV420P, AMF_SURFACE_YUV420P },
127  { AV_PIX_FMT_YUYV422, AMF_SURFACE_YUY2 },
128  { AV_PIX_FMT_P010, AMF_SURFACE_P010 },
129  { AV_PIX_FMT_X2BGR10, AMF_SURFACE_R10G10B10A2 },
130  { AV_PIX_FMT_RGBAF16, AMF_SURFACE_RGBA_F16},
131 };
132 
133 enum AMF_SURFACE_FORMAT av_av_to_amf_format(enum AVPixelFormat fmt)
134 {
135  int i;
136  for (i = 0; i < amf_countof(format_map); i++) {
137  if (format_map[i].av_format == fmt) {
138  return format_map[i].amf_format;
139  }
140  }
141  return AMF_SURFACE_UNKNOWN;
142 }
143 
144 enum AVPixelFormat av_amf_to_av_format(enum AMF_SURFACE_FORMAT fmt)
145 {
146  int i;
147  for (i = 0; i < amf_countof(format_map); i++) {
148  if (format_map[i].amf_format == fmt) {
149  return format_map[i].av_format;
150  }
151  }
152  return AV_PIX_FMT_NONE;
153 }
154 
155 enum AMF_VIDEO_CONVERTER_COLOR_PROFILE_ENUM av_amf_get_color_profile(enum AVColorRange color_range, enum AVColorSpace color_space)
156 {
157  switch (color_space) {
158  case AVCOL_SPC_SMPTE170M:
159  if (color_range == AVCOL_RANGE_JPEG) {
160  return AMF_VIDEO_CONVERTER_COLOR_PROFILE_FULL_601;
161  } else {
162  return AMF_VIDEO_CONVERTER_COLOR_PROFILE_601;
163  }
164  break;
165  case AVCOL_SPC_BT709:
166  if (color_range == AVCOL_RANGE_JPEG) {
167  return AMF_VIDEO_CONVERTER_COLOR_PROFILE_FULL_709;
168  } else {
169  return AMF_VIDEO_CONVERTER_COLOR_PROFILE_709;
170  }
171  break;
173  case AVCOL_SPC_BT2020_CL:
174  if (color_range == AVCOL_RANGE_JPEG) {
175  return AMF_VIDEO_CONVERTER_COLOR_PROFILE_FULL_2020;
176  } else {
177  return AMF_VIDEO_CONVERTER_COLOR_PROFILE_2020;
178  }
179  break;
180 
181  default:
182  return AMF_VIDEO_CONVERTER_COLOR_PROFILE_UNKNOWN;
183  }
184 }
185 
186 int av_amf_display_mastering_meta_to_hdrmeta(const AVMasteringDisplayMetadata *display_meta, AMFHDRMetadata *hdrmeta)
187 {
188  if (!display_meta || !hdrmeta)
189  return AVERROR(EINVAL);
190 
191  if (display_meta->has_luminance) {
192  const unsigned int luma_den = 10000;
193  hdrmeta->maxMasteringLuminance =
194  (amf_uint32)(luma_den * av_q2d(display_meta->max_luminance));
195  hdrmeta->minMasteringLuminance =
196  FFMIN((amf_uint32)(luma_den * av_q2d(display_meta->min_luminance)), hdrmeta->maxMasteringLuminance);
197  }
198 
199  if (display_meta->has_primaries) {
200  const unsigned int chroma_den = 50000;
201  hdrmeta->redPrimary[0] =
202  FFMIN((amf_uint16)(chroma_den * av_q2d(display_meta->display_primaries[0][0])), chroma_den);
203  hdrmeta->redPrimary[1] =
204  FFMIN((amf_uint16)(chroma_den * av_q2d(display_meta->display_primaries[0][1])), chroma_den);
205  hdrmeta->greenPrimary[0] =
206  FFMIN((amf_uint16)(chroma_den * av_q2d(display_meta->display_primaries[1][0])), chroma_den);
207  hdrmeta->greenPrimary[1] =
208  FFMIN((amf_uint16)(chroma_den * av_q2d(display_meta->display_primaries[1][1])), chroma_den);
209  hdrmeta->bluePrimary[0] =
210  FFMIN((amf_uint16)(chroma_den * av_q2d(display_meta->display_primaries[2][0])), chroma_den);
211  hdrmeta->bluePrimary[1] =
212  FFMIN((amf_uint16)(chroma_den * av_q2d(display_meta->display_primaries[2][1])), chroma_den);
213  hdrmeta->whitePoint[0] =
214  FFMIN((amf_uint16)(chroma_den * av_q2d(display_meta->white_point[0])), chroma_den);
215  hdrmeta->whitePoint[1] =
216  FFMIN((amf_uint16)(chroma_den * av_q2d(display_meta->white_point[1])), chroma_den);
217  }
218 
219  return 0;
220 }
221 
222 int av_amf_light_metadata_to_hdrmeta(const AVContentLightMetadata *light_meta, AMFHDRMetadata *hdrmeta)
223 {
224  if (!light_meta || !hdrmeta)
225  return AVERROR(EINVAL);
226 
227  hdrmeta->maxContentLightLevel = (amf_uint16)light_meta->MaxCLL;
228  hdrmeta->maxFrameAverageLightLevel = (amf_uint16)light_meta->MaxFALL;
229 
230  return 0;
231 }
232 
233 int av_amf_extract_hdr_metadata(const AVFrame *frame, AMFHDRMetadata *hdrmeta)
234 {
235  AVFrameSideData *sidedata;
236  AVContentLightMetadata *content_light = NULL;
237  AVMasteringDisplayMetadata *mastering_display = NULL;
238 
239  if (!frame || !hdrmeta)
240  return AVERROR(EINVAL);
241 
243  if (sidedata) {
244  mastering_display = (AVMasteringDisplayMetadata *)sidedata->data;
245  if (av_amf_display_mastering_meta_to_hdrmeta(mastering_display, hdrmeta) != 0)
246  mastering_display = NULL;
247  }
248 
250  if (sidedata) {
251  content_light = (AVContentLightMetadata *)sidedata->data;
252  if (av_amf_light_metadata_to_hdrmeta(content_light, hdrmeta) != 0)
253  content_light = NULL;
254  }
255 
256  if (!mastering_display && !content_light)
257  return AVERROR(ENODATA);
258 
259  return 0;
260 }
261 
262 int av_amf_attach_hdr_metadata(AVFrame *frame, const AMFHDRMetadata *hdrmeta) {
263  if (!hdrmeta || !frame)
264  return AVERROR(EINVAL);
265 
266  AVMasteringDisplayMetadata *mastering =
268  const int chroma_den = 50000;
269  const int luma_den = 10000;
270 
271  if (!mastering)
272  return AVERROR(ENOMEM);
273 
274  mastering->display_primaries[0][0] =
275  av_make_q(hdrmeta->redPrimary[0], chroma_den);
276  mastering->display_primaries[0][1] =
277  av_make_q(hdrmeta->redPrimary[1], chroma_den);
278 
279  mastering->display_primaries[1][0] =
280  av_make_q(hdrmeta->greenPrimary[0], chroma_den);
281  mastering->display_primaries[1][1] =
282  av_make_q(hdrmeta->greenPrimary[1], chroma_den);
283 
284  mastering->display_primaries[2][0] =
285  av_make_q(hdrmeta->bluePrimary[0], chroma_den);
286  mastering->display_primaries[2][1] =
287  av_make_q(hdrmeta->bluePrimary[1], chroma_den);
288 
289  mastering->white_point[0] = av_make_q(hdrmeta->whitePoint[0], chroma_den);
290  mastering->white_point[1] = av_make_q(hdrmeta->whitePoint[1], chroma_den);
291 
292  mastering->max_luminance =
293  av_make_q(hdrmeta->maxMasteringLuminance, luma_den);
294  mastering->min_luminance =
295  av_make_q(hdrmeta->maxMasteringLuminance, luma_den);
296 
297  mastering->has_luminance = 1;
298  mastering->has_primaries = 1;
299  if (hdrmeta->maxContentLightLevel) {
300  AVContentLightMetadata *light =
302 
303  if (!light)
304  return AVERROR(ENOMEM);
305 
306  light->MaxCLL = hdrmeta->maxContentLightLevel;
307  light->MaxFALL = hdrmeta->maxFrameAverageLightLevel;
308  }
309 
310  return 0;
311 }
312 
313 static const enum AVPixelFormat supported_formats[] = {
320 #if CONFIG_D3D11VA
322 #endif
323 #if CONFIG_D3D12VA
325 #endif
326 #if CONFIG_DXVA2
328 #endif
329 };
330 
338 };
339 
341  const void *hwconfig,
342  AVHWFramesConstraints *constraints)
343 {
344  int i;
345 
347  sizeof(*constraints->valid_sw_formats));
348  if (!constraints->valid_sw_formats)
349  return AVERROR(ENOMEM);
350 
351  for (i = 0; i < FF_ARRAY_ELEMS(supported_formats); i++)
352  constraints->valid_sw_formats[i] = supported_formats[i];
354 
355  constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
356  if (!constraints->valid_hw_formats)
357  return AVERROR(ENOMEM);
358 
359  constraints->valid_hw_formats[0] = AV_PIX_FMT_AMF_SURFACE;
360  constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
361 
362  return 0;
363 }
364 
365 static void amf_dummy_free(void *opaque, uint8_t *data)
366 {
367 
368 }
369 
370 static AVBufferRef *amf_pool_alloc(void *opaque, size_t size)
371 {
372  AVHWFramesContext *hwfc = (AVHWFramesContext *)opaque;
373  AVBufferRef *buf;
374 
376  if (!buf) {
377  av_log(hwfc, AV_LOG_ERROR, "Failed to create buffer for AMF context.\n");
378  return NULL;
379  }
380  return buf;
381 }
382 
384 {
385  int i;
386 
387  for (i = 0; i < FF_ARRAY_ELEMS(supported_formats); i++) {
388  if (ctx->sw_format == supported_formats[i])
389  break;
390  }
392  av_log(ctx, AV_LOG_ERROR, "Pixel format '%s' is not supported\n",
393  av_get_pix_fmt_name(ctx->sw_format));
394  return AVERROR(ENOSYS);
395  }
396 
398  av_buffer_pool_init2(sizeof(AMFSurface), ctx,
399  &amf_pool_alloc, NULL);
400 
401  return 0;
402 }
403 
404 
406 {
407  frame->buf[0] = av_buffer_pool_get(ctx->pool);
408  if (!frame->buf[0])
409  return AVERROR(ENOMEM);
410 
411  frame->data[0] = frame->buf[0]->data;
412  frame->format = AV_PIX_FMT_AMF_SURFACE;
413  frame->width = ctx->width;
414  frame->height = ctx->height;
415  return 0;
416 }
417 
420  enum AVPixelFormat **formats)
421 {
422  enum AVPixelFormat *fmts;
423  int i;
424 
426  if (!fmts)
427  return AVERROR(ENOMEM);
429  fmts[i] = supported_transfer_formats[i];
430 
431  *formats = fmts;
432 
433  return 0;
434 }
435 
436 static void amf_free_amfsurface(void *opaque, uint8_t *data)
437 {
438  if(!!data){
439  AMFSurface *surface = (AMFSurface*)(data);
440  surface->pVtbl->Release(surface);
441  }
442 }
443 
445  const AVFrame *src)
446 {
447  AMFSurface* surface = (AMFSurface*)dst->data[0];
448  AMFPlane *plane;
449  uint8_t *dst_data[4];
450  int dst_linesize[4];
451  int planes;
452  int i;
453  int res;
454  int w = FFMIN(dst->width, src->width);
455  int h = FFMIN(dst->height, src->height);
456 
457  if (dst->hw_frames_ctx->data != (uint8_t *)ctx || src->format != ctx->sw_format)
458  return AVERROR(EINVAL);
459 
460  if (!surface) {
461  AVHWDeviceContext *hwdev_ctx = ctx->device_ctx;
462  AVAMFDeviceContext *amf_device_ctx = (AVAMFDeviceContext *)hwdev_ctx->hwctx;
463  AMF_SURFACE_FORMAT format = av_av_to_amf_format(ctx->sw_format);
464  res = amf_device_ctx->context->pVtbl->AllocSurface(amf_device_ctx->context, AMF_MEMORY_HOST, format, dst->width, dst->height, &surface);
465  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR(ENOMEM), "AllocSurface() failed with error %d\n", res);
466  dst->data[0] = (uint8_t *)surface;
467  dst->buf[1] = av_buffer_create((uint8_t *)surface, sizeof(surface),
469  NULL,
471  AMF_RETURN_IF_FALSE(ctx, !!dst->buf[1], AVERROR(ENOMEM), "av_buffer_create for amf surface failed.");
472  }
473 
474  planes = (int)surface->pVtbl->GetPlanesCount(surface);
475  av_assert0(planes < FF_ARRAY_ELEMS(dst_data));
476 
477  for (i = 0; i < planes; i++) {
478  plane = surface->pVtbl->GetPlaneAt(surface, i);
479  dst_data[i] = plane->pVtbl->GetNative(plane);
480  dst_linesize[i] = plane->pVtbl->GetHPitch(plane);
481  }
482  av_image_copy2(dst_data, dst_linesize,
483  src->data, src->linesize, src->format,
484  w, h);
485 
486  return 0;
487 }
488 
490  const AVFrame *src)
491 {
492  AMFSurface* surface = (AMFSurface*)src->data[0];
493  AMFPlane *plane;
494  uint8_t *src_data[4];
495  int src_linesize[4];
496  int planes;
497  int i;
498  int w = FFMIN(dst->width, src->width);
499  int h = FFMIN(dst->height, src->height);
500  int ret;
501 
502  if (src->hw_frames_ctx->data != (uint8_t *)ctx || dst->format != ctx->sw_format)
503  return AVERROR(EINVAL);
504 
505  ret = surface->pVtbl->Convert(surface, AMF_MEMORY_HOST);
506  AMF_RETURN_IF_FALSE(ctx, ret == AMF_OK, AVERROR_UNKNOWN, "Convert(amf::AMF_MEMORY_HOST) failed with error %d\n", AVERROR_UNKNOWN);
507 
508  planes = (int)surface->pVtbl->GetPlanesCount(surface);
509  av_assert0(planes < FF_ARRAY_ELEMS(src_data));
510 
511  for (i = 0; i < planes; i++) {
512  plane = surface->pVtbl->GetPlaneAt(surface, i);
513  src_data[i] = plane->pVtbl->GetNative(plane);
514  src_linesize[i] = plane->pVtbl->GetHPitch(plane);
515  }
516  av_image_copy2(dst->data, dst->linesize,
517  src_data, src_linesize, dst->format,
518  w, h);
519  return 0;
520 }
521 
522 
523 
524 static void amf_device_uninit(AVHWDeviceContext *device_ctx)
525 {
526  AVAMFDeviceContext *amf_ctx = device_ctx->hwctx;
527  AMF_RESULT res = AMF_NOT_INITIALIZED;
528  AMFTrace *trace;
529 
530  if (amf_ctx->context) {
531  amf_ctx->context->pVtbl->Terminate(amf_ctx->context);
532  amf_ctx->context->pVtbl->Release(amf_ctx->context);
533  amf_ctx->context = NULL;
534  }
535 
536  if (amf_ctx->factory)
537  res = amf_ctx->factory->pVtbl->GetTrace(amf_ctx->factory, &trace);
538 
539  if (res == AMF_OK) {
540  trace->pVtbl->UnregisterWriter(trace, FFMPEG_AMF_WRITER_ID);
541  }
542 
543  if(amf_ctx->library) {
544  dlclose(amf_ctx->library);
545  amf_ctx->library = NULL;
546  }
547  if (amf_ctx->trace_writer) {
548  amf_writer_free(amf_ctx->trace_writer);
549  }
550 
551  if (amf_ctx->lock_ctx == amf_lock_default) {
552  ff_mutex_destroy((AVMutex*)amf_ctx->lock_ctx);
553  av_freep(&amf_ctx->lock_ctx);
554  amf_ctx->lock = NULL;
555  amf_ctx->unlock = NULL;
556  }
557 
558  amf_ctx->version = 0;
559 }
560 
562 {
563  AVAMFDeviceContext *amf_ctx = ctx->hwctx;
564  AMFContext1 *context1 = NULL;
565  AMF_RESULT res;
566 
567  if (!amf_ctx->lock) {
568  amf_ctx->lock_ctx = av_mallocz(sizeof(AVMutex));
569  if (!amf_ctx->lock_ctx) {
570  return AVERROR(ENOMEM);
571  }
572  ff_mutex_init((AVMutex*)amf_ctx->lock_ctx, NULL);
573  amf_ctx->lock = amf_lock_default;
574  amf_ctx->unlock = amf_unlock_default;
575  }
576 
577 #ifdef _WIN32
578  res = amf_ctx->context->pVtbl->InitDX11(amf_ctx->context, NULL, AMF_DX11_1);
579  if (res == AMF_OK || res == AMF_ALREADY_INITIALIZED) {
580  av_log(ctx, AV_LOG_VERBOSE, "AMF initialisation succeeded via D3D11.\n");
581  } else {
582  res = amf_ctx->context->pVtbl->InitDX9(amf_ctx->context, NULL);
583  if (res == AMF_OK) {
584  av_log(ctx, AV_LOG_VERBOSE, "AMF initialisation succeeded via D3D9.\n");
585  } else {
586 #endif
587  AMFGuid guid = IID_AMFContext1();
588  res = amf_ctx->context->pVtbl->QueryInterface(amf_ctx->context, &guid, (void**)&context1);
589  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "CreateContext1() failed with error %d\n", res);
590 
591  res = context1->pVtbl->InitVulkan(context1, NULL);
592  context1->pVtbl->Release(context1);
593  if (res != AMF_OK && res != AMF_ALREADY_INITIALIZED) {
594  if (res == AMF_NOT_SUPPORTED)
595  av_log(ctx, AV_LOG_ERROR, "AMF via Vulkan is not supported on the given device.\n");
596  else
597  av_log(ctx, AV_LOG_ERROR, "AMF failed to initialise on the given Vulkan device: %d.\n", res);
598  return AVERROR(ENOSYS);
599  }
600  av_log(ctx, AV_LOG_VERBOSE, "AMF initialisation succeeded via Vulkan.\n");
601 #ifdef _WIN32
602  }
603  }
604 #endif
605 
606  return 0;
607 }
608 
609 static int amf_load_library(AVAMFDeviceContext* amf_ctx, void* avcl)
610 {
611  AMFInit_Fn init_fun;
612  AMFQueryVersion_Fn version_fun;
613  AMF_RESULT res;
614 
615  amf_ctx->library = dlopen(AMF_DLL_NAMEA, RTLD_NOW | RTLD_LOCAL);
616  AMF_RETURN_IF_FALSE(avcl, amf_ctx->library != NULL,
617  AVERROR_UNKNOWN, "DLL %s failed to open\n", AMF_DLL_NAMEA);
618 
619  init_fun = (AMFInit_Fn)dlsym(amf_ctx->library, AMF_INIT_FUNCTION_NAME);
620  AMF_RETURN_IF_FALSE(avcl, init_fun != NULL, AVERROR_UNKNOWN, "DLL %s failed to find function %s\n", AMF_DLL_NAMEA, AMF_INIT_FUNCTION_NAME);
621 
622  version_fun = (AMFQueryVersion_Fn)dlsym(amf_ctx->library, AMF_QUERY_VERSION_FUNCTION_NAME);
623  AMF_RETURN_IF_FALSE(avcl, version_fun != NULL, AVERROR_UNKNOWN, "DLL %s failed to find function %s\n", AMF_DLL_NAMEA, AMF_QUERY_VERSION_FUNCTION_NAME);
624 
625  amf_uint64 version;
626  res = version_fun(&version);
627  AMF_RETURN_IF_FALSE(avcl, res == AMF_OK, AVERROR_UNKNOWN, "%s failed with error %d\n", AMF_QUERY_VERSION_FUNCTION_NAME, res);
628  amf_ctx->version = version;
629  res = init_fun(AMF_FULL_VERSION, &amf_ctx->factory);
630  AMF_RETURN_IF_FALSE(avcl, res == AMF_OK, AVERROR_UNKNOWN, "%s failed with error %d\n", AMF_INIT_FUNCTION_NAME, res);
631  return 0;
632 }
633 
634 static int amf_device_create(AVHWDeviceContext *device_ctx,
635  const char *device,
636  AVDictionary *opts, int flags)
637 {
638  AVAMFDeviceContext *ctx = device_ctx->hwctx;
639  AMFTrace *trace;
640  int ret;
641  if ((ret = amf_load_library(ctx, device_ctx)) == 0) {
642  ret = ctx->factory->pVtbl->GetTrace(ctx->factory, &trace);
643  if (ret == AMF_OK) {
644  int level_ff = av_log_get_level();
645  int level_amf = AMF_TRACE_TRACE;
646  amf_bool enable_log = true;
647  switch(level_ff)
648  {
649  case AV_LOG_QUIET:
650  level_amf = AMF_TRACE_ERROR;
651  enable_log = false;
652  break;
653  case AV_LOG_PANIC:
654  case AV_LOG_FATAL:
655  case AV_LOG_ERROR:
656  level_amf = AMF_TRACE_ERROR;
657  break;
658  case AV_LOG_WARNING:
659  case AV_LOG_INFO:
660  level_amf = AMF_TRACE_WARNING;
661  break;
662  case AV_LOG_VERBOSE:
663  level_amf = AMF_TRACE_INFO;
664  break;
665  case AV_LOG_DEBUG:
666  level_amf = AMF_TRACE_DEBUG;
667  break;
668  case AV_LOG_TRACE:
669  level_amf = AMF_TRACE_TRACE;
670  break;
671  }
672  if(ctx->version == AMF_MAKE_FULL_VERSION(1, 4, 35, 0)){// get around a bug in trace in AMF runtime driver 24.20
673  level_amf = AMF_TRACE_WARNING;
674  }
675 
676  trace->pVtbl->EnableWriter(trace, AMF_TRACE_WRITER_CONSOLE, 0);
677  trace->pVtbl->SetGlobalLevel(trace, level_amf);
678 
679  // connect AMF logger to av_log
680  ctx->trace_writer = amf_writer_alloc(device_ctx);
681  trace->pVtbl->RegisterWriter(trace, FFMPEG_AMF_WRITER_ID, (AMFTraceWriter*)ctx->trace_writer, 1);
682  trace->pVtbl->SetWriterLevel(trace, FFMPEG_AMF_WRITER_ID, level_amf);
683  trace->pVtbl->EnableWriter(trace, FFMPEG_AMF_WRITER_ID, enable_log);
684  trace->pVtbl->SetWriterLevel(trace, AMF_TRACE_WRITER_DEBUG_OUTPUT, level_amf);
685  trace->pVtbl->EnableWriter(trace, AMF_TRACE_WRITER_DEBUG_OUTPUT, enable_log);
686  }
687 
688 
689  ret = ctx->factory->pVtbl->CreateContext(ctx->factory, &ctx->context);
690  if (ret == AMF_OK) {
691  AMF_ASSIGN_PROPERTY_INT64(ret, ctx->context, L"DeviceSurfaceCacheSize", 50 );
692  return 0;
693  }
694  av_log(device_ctx, AV_LOG_ERROR, "CreateContext() failed with error %d.\n", ret);
695  }
696  amf_device_uninit(device_ctx);
697  return ret;
698 }
699 
700 #if CONFIG_DXVA2
701 static int amf_init_from_dxva2_device(AVAMFDeviceContext * amf_ctx, AVHWDeviceContext *child_device_ctx)
702 {
703  AVDXVA2DeviceContext *hwctx = child_device_ctx->hwctx;
704  IDirect3DDevice9 *device;
705  HANDLE device_handle;
706  HRESULT hr;
707  AMF_RESULT res;
708  int ret;
709 
710  hr = IDirect3DDeviceManager9_OpenDeviceHandle(hwctx->devmgr, &device_handle);
711  if (FAILED(hr)) {
712  av_log(child_device_ctx, AV_LOG_ERROR, "Failed to open device handle for Direct3D9 device: %lx.\n", (unsigned long)hr);
713  return AVERROR_EXTERNAL;
714  }
715 
716  hr = IDirect3DDeviceManager9_LockDevice(hwctx->devmgr, device_handle, &device, FALSE);
717  if (SUCCEEDED(hr)) {
718  IDirect3DDeviceManager9_UnlockDevice(hwctx->devmgr, device_handle, FALSE);
719  ret = 0;
720  } else {
721  av_log(child_device_ctx, AV_LOG_ERROR, "Failed to lock device handle for Direct3D9 device: %lx.\n", (unsigned long)hr);
723  }
724 
725 
726  IDirect3DDeviceManager9_CloseDeviceHandle(hwctx->devmgr, device_handle);
727 
728  if (ret < 0)
729  return ret;
730 
731  res = amf_ctx->context->pVtbl->InitDX9(amf_ctx->context, device);
732 
733  IDirect3DDevice9_Release(device);
734 
735  if (res != AMF_OK && res != AMF_ALREADY_INITIALIZED) {
736  if (res == AMF_NOT_SUPPORTED)
737  av_log(child_device_ctx, AV_LOG_ERROR, "AMF via D3D9 is not supported on the given device.\n");
738  else
739  av_log(child_device_ctx, AV_LOG_ERROR, "AMF failed to initialise on given D3D9 device: %d.\n", res);
740  return AVERROR(ENODEV);
741  }
742  av_log(child_device_ctx, AV_LOG_INFO, "AMF via DXVA2.\n");
743  return 0;
744 }
745 #endif
746 
747 #if CONFIG_D3D11VA
748 static int amf_init_from_d3d11_device(AVAMFDeviceContext* amf_ctx, AVHWDeviceContext *child_device_ctx)
749 {
750  AMF_RESULT res;
751  AVD3D11VADeviceContext *hwctx = child_device_ctx->hwctx;
752  res = amf_ctx->context->pVtbl->InitDX11(amf_ctx->context, hwctx->device, AMF_DX11_1);
753  if (res != AMF_OK && res != AMF_ALREADY_INITIALIZED) {
754  if (res == AMF_NOT_SUPPORTED)
755  av_log(child_device_ctx, AV_LOG_ERROR, "AMF via D3D11 is not supported on the given device.\n");
756  else
757  av_log(child_device_ctx, AV_LOG_ERROR, "AMF failed to initialise on the given D3D11 device: %d.\n", res);
758  return AVERROR(ENODEV);
759  }
760  av_log(child_device_ctx, AV_LOG_INFO, "AMF via D3D11.\n");
761  return 0;
762 }
763 #endif
764 
765 #if CONFIG_D3D12VA
766 static int amf_init_from_d3d12_device(AVAMFDeviceContext* amf_ctx, AVHWDeviceContext *child_device_ctx)
767 {
768  AVD3D12VADeviceContext *hwctx = child_device_ctx->hwctx;
769  AMF_RESULT res;
770  AMFContext2 *context2 = NULL;
771  AMFGuid guid = IID_AMFContext2();
772  res = amf_ctx->context->pVtbl->QueryInterface(amf_ctx->context, &guid, (void**)&context2);
773  AMF_RETURN_IF_FALSE(child_device_ctx, res == AMF_OK, AVERROR_UNKNOWN, "CreateContext2() failed with error %d\n", res);
774  res = context2->pVtbl->InitDX12(context2, hwctx->device, AMF_DX12);
775  context2->pVtbl->Release(context2);
776  if (res != AMF_OK && res != AMF_ALREADY_INITIALIZED) {
777  if (res == AMF_NOT_SUPPORTED)
778  av_log(child_device_ctx, AV_LOG_ERROR, "AMF via D3D12 is not supported on the given device.\n");
779  else
780  av_log(child_device_ctx, AV_LOG_ERROR, "AMF failed to initialise on the given D3D12 device: %d.\n", res);
781  return AVERROR(ENODEV);
782  }
783  av_log(child_device_ctx, AV_LOG_INFO, "AMF via D3D12.\n");
784  return 0;
785 }
786 #endif
787 
788 
789 static int amf_device_derive(AVHWDeviceContext *device_ctx,
790  AVHWDeviceContext *child_device_ctx, AVDictionary *opts,
791  int flags)
792 {
793 #if CONFIG_DXVA2 || CONFIG_D3D11VA
794  AVAMFDeviceContext *amf_ctx = device_ctx->hwctx;
795 #endif
796  int ret;
797 
798  ret = amf_device_create(device_ctx, "", opts, flags);
799  if(ret < 0)
800  return ret;
801 
802  switch (child_device_ctx->type) {
803 #if CONFIG_DXVA2
805  return amf_init_from_dxva2_device(amf_ctx, child_device_ctx);
806 #endif
807 #if CONFIG_D3D11VA
809  return amf_init_from_d3d11_device(amf_ctx, child_device_ctx);
810 #endif
811 #if CONFIG_D3D12VA
813  return amf_init_from_d3d12_device(amf_ctx, child_device_ctx);
814 #endif
815  default:
816  av_log(child_device_ctx, AV_LOG_ERROR, "AMF initialisation from a %s device is not supported.\n",
817  av_hwdevice_get_type_name(child_device_ctx->type));
818  return AVERROR(ENOSYS);
819  }
820 }
821 
824  .name = "AMF",
825 
826  .device_hwctx_size = sizeof(AVAMFDeviceContext),
827  .frames_hwctx_size = sizeof(AMFFramesContext),
828 
829  .device_create = amf_device_create,
830  .device_derive = amf_device_derive,
831  .device_init = amf_device_init,
832  .device_uninit = amf_device_uninit,
833  .frames_get_constraints = amf_frames_get_constraints,
834  .frames_init = amf_frames_init,
835  .frames_get_buffer = amf_get_buffer,
836  .transfer_get_formats = amf_transfer_get_formats,
837  .transfer_data_to = amf_transfer_data_to,
838  .transfer_data_from = amf_transfer_data_from,
839 
840  .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_AMF_SURFACE, AV_PIX_FMT_NONE },
841 };
flags
const SwsFlags flags[]
Definition: swscale.c:71
formats
formats
Definition: signature.h:47
AMFFramesContext::dummy
void * dummy
Definition: hwcontext_amf.c:108
AVMasteringDisplayMetadata::has_primaries
int has_primaries
Flag indicating whether the display primaries (and white point) are set.
Definition: mastering_display_metadata.h:62
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:88
FFHWFramesContext::pool_internal
AVBufferPool * pool_internal
Definition: hwcontext_internal.h:101
AVD3D12VADeviceContext::device
ID3D12Device * device
Device used for objects creation and access.
Definition: hwcontext_d3d12va.h:54
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:216
AVMasteringDisplayMetadata::max_luminance
AVRational max_luminance
Max luminance of mastering display (cd/m^2).
Definition: mastering_display_metadata.h:57
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
ff_mutex_init
static int ff_mutex_init(AVMutex *mutex, const void *attr)
Definition: thread.h:187
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
av_frame_get_side_data
AVFrameSideData * av_frame_get_side_data(const AVFrame *frame, enum AVFrameSideDataType type)
Definition: frame.c:659
message
Definition: api-threadmessage-test.c:47
AV_LOG_QUIET
#define AV_LOG_QUIET
Print no output.
Definition: log.h:192
thread.h
AV_LOG_PANIC
#define AV_LOG_PANIC
Something went really wrong and we will crash now.
Definition: log.h:197
AVMasteringDisplayMetadata::display_primaries
AVRational display_primaries[3][2]
CIE 1931 xy chromaticity coords of color primaries (r, g, b order).
Definition: mastering_display_metadata.h:42
AVMasteringDisplayMetadata::has_luminance
int has_luminance
Flag indicating whether the luminance (min_ and max_) have been set.
Definition: mastering_display_metadata.h:67
amf_writer_alloc
static AmfTraceWriter * amf_writer_alloc(void *avctx)
Definition: hwcontext_amf.c:82
av_amf_attach_hdr_metadata
int av_amf_attach_hdr_metadata(AVFrame *frame, const AMFHDRMetadata *hdrmeta)
Definition: hwcontext_amf.c:262
AVContentLightMetadata::MaxCLL
unsigned MaxCLL
Max content light level (cd/m^2).
Definition: mastering_display_metadata.h:111
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:427
pixdesc.h
AVCOL_RANGE_JPEG
@ AVCOL_RANGE_JPEG
Full range content.
Definition: pixfmt.h:777
amf_dummy_free
static void amf_dummy_free(void *opaque, uint8_t *data)
Definition: hwcontext_amf.c:365
AVAMFDeviceContext::lock
void(* lock)(void *lock_ctx)
Definition: hwcontext_amf.h:44
data
const char data[16]
Definition: mxf.c:149
amf_device_create
static int amf_device_create(AVHWDeviceContext *device_ctx, const char *device, AVDictionary *opts, int flags)
Definition: hwcontext_amf.c:634
AVDXVA2DeviceContext::devmgr
IDirect3DDeviceManager9 * devmgr
Definition: hwcontext_dxva2.h:40
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:226
AMFTraceWriter_Write
static void AMF_CDECL_CALL AMFTraceWriter_Write(AMFTraceWriter *pThis, const wchar_t *scope, const wchar_t *message)
Definition: hwcontext_amf.c:71
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:102
AVDictionary
Definition: dict.c:32
AMF_RETURN_IF_FALSE
#define AMF_RETURN_IF_FALSE(avctx, exp, ret_value,...)
Error handling helper.
Definition: amfenc.h:169
AVHWFramesConstraints::valid_hw_formats
enum AVPixelFormat * valid_hw_formats
A list of possible values for format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:449
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVAMFFormatMap
Definition: hwcontext_amf.c:111
AVCOL_SPC_BT2020_CL
@ AVCOL_SPC_BT2020_CL
ITU-R BT2020 constant luminance system.
Definition: pixfmt.h:712
AV_PIX_FMT_AMF_SURFACE
@ AV_PIX_FMT_AMF_SURFACE
HW acceleration through AMF.
Definition: pixfmt.h:477
av_amf_display_mastering_meta_to_hdrmeta
int av_amf_display_mastering_meta_to_hdrmeta(const AVMasteringDisplayMetadata *display_meta, AMFHDRMetadata *hdrmeta)
Definition: hwcontext_amf.c:186
AVHWFramesConstraints
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
Definition: hwcontext.h:444
ff_mutex_unlock
static int ff_mutex_unlock(AVMutex *mutex)
Definition: thread.h:189
AVContentLightMetadata
Content light level needed by to transmit HDR over HDMI (CTA-861.3).
Definition: mastering_display_metadata.h:107
AVAMFDeviceContext::lock_ctx
void * lock_ctx
Definition: hwcontext_amf.h:46
av_amf_to_av_format
enum AVPixelFormat av_amf_to_av_format(enum AMF_SURFACE_FORMAT fmt)
Definition: hwcontext_amf.c:144
AV_HWDEVICE_TYPE_D3D11VA
@ AV_HWDEVICE_TYPE_D3D11VA
Definition: hwcontext.h:35
av_buffer_pool_init2
AVBufferPool * av_buffer_pool_init2(size_t size, void *opaque, AVBufferRef *(*alloc)(void *opaque, size_t size), void(*pool_free)(void *opaque))
Allocate and initialize a buffer pool with a more complex allocator.
Definition: buffer.c:259
av_av_to_amf_format
enum AMF_SURFACE_FORMAT av_av_to_amf_format(enum AVPixelFormat fmt)
Definition: hwcontext_amf.c:133
AVAMFDeviceContext::context
AMFContext * context
Definition: hwcontext_amf.h:41
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:63
amf_transfer_get_formats
static int amf_transfer_get_formats(AVHWFramesContext *ctx, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats)
Definition: hwcontext_amf.c:418
avassert.h
HWContextType::type
enum AVHWDeviceType type
Definition: hwcontext_internal.h:30
ffhwframesctx
static FFHWFramesContext * ffhwframesctx(AVHWFramesContext *ctx)
Definition: hwcontext_internal.h:115
AV_LOG_TRACE
#define AV_LOG_TRACE
Extremely verbose debugging, useful for libav* development.
Definition: log.h:236
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:210
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_amf_get_color_profile
enum AMF_VIDEO_CONVERTER_COLOR_PROFILE_ENUM av_amf_get_color_profile(enum AVColorRange color_range, enum AVColorSpace color_space)
Definition: hwcontext_amf.c:155
AVHWFramesConstraints::valid_sw_formats
enum AVPixelFormat * valid_sw_formats
A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:456
AVMutex
#define AVMutex
Definition: thread.h:184
av_buffer_pool_get
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
Definition: buffer.c:390
AVAMFFormatMap::av_format
enum AVPixelFormat av_format
Definition: hwcontext_amf.c:112
AVMasteringDisplayMetadata::white_point
AVRational white_point[2]
CIE 1931 xy chromaticity coords of white point.
Definition: mastering_display_metadata.h:47
AV_PIX_FMT_DXVA2_VLD
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
Definition: pixfmt.h:134
amf_frames_get_constraints
static int amf_frames_get_constraints(AVHWDeviceContext *ctx, const void *hwconfig, AVHWFramesConstraints *constraints)
Definition: hwcontext_amf.c:340
AVD3D11VADeviceContext::device
ID3D11Device * device
Device used for texture creation and access.
Definition: hwcontext_d3d11va.h:56
AV_BUFFER_FLAG_READONLY
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
Definition: buffer.h:114
AVCOL_SPC_SMPTE170M
@ AVCOL_SPC_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC / functionally identical to above
Definition: pixfmt.h:707
av_q2d
static double av_q2d(AVRational a)
Convert an AVRational to a double.
Definition: rational.h:104
AmfTraceWriter::vtblp
AMFTraceWriterVtbl * vtblp
Definition: hwcontext_amf.c:66
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:42
amf_device_init
static int amf_device_init(AVHWDeviceContext *ctx)
Definition: hwcontext_amf.c:561
AV_HWDEVICE_TYPE_AMF
@ AV_HWDEVICE_TYPE_AMF
Definition: hwcontext.h:41
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:231
amf_transfer_data_to
static int amf_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_amf.c:444
ctx
static AVFormatContext * ctx
Definition: movenc.c:49
FFMPEG_AMF_WRITER_ID
#define FFMPEG_AMF_WRITER_ID
Definition: hwcontext_amf.c:53
hwcontext_amf.h
AVAMFDeviceContext::version
int64_t version
version of AMF runtime
Definition: hwcontext_amf.h:40
amf_writer_free
static void amf_writer_free(void *opaque)
Definition: hwcontext_amf.c:96
av_hwdevice_get_type_name
const char * av_hwdevice_get_type_name(enum AVHWDeviceType type)
Get the string name of an AVHWDeviceType.
Definition: hwcontext.c:120
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
color_range
color_range
Definition: vf_selectivecolor.c:43
av_mallocz
#define av_mallocz(s)
Definition: tableprint_vlc.h:31
AV_PIX_FMT_RGBA
@ AV_PIX_FMT_RGBA
packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
Definition: pixfmt.h:100
if
if(ret)
Definition: filter_design.txt:179
av_log_get_level
int av_log_get_level(void)
Get the current log level.
Definition: log.c:472
opts
static AVDictionary * opts
Definition: movenc.c:51
planes
static const struct @578 planes[]
NULL
#define NULL
Definition: coverity.c:32
amf_pool_alloc
static AVBufferRef * amf_pool_alloc(void *opaque, size_t size)
Definition: hwcontext_amf.c:370
format
New swscale design to change SwsGraph is what coordinates multiple passes These can include cascaded scaling error diffusion and so on Or we could have separate passes for the vertical and horizontal scaling In between each SwsPass lies a fully allocated image buffer Graph passes may have different levels of e g we can have a single threaded error diffusion pass following a multi threaded scaling pass SwsGraph is internally recreated whenever the image format
Definition: swscale-v2.txt:14
AV_HWDEVICE_TYPE_DXVA2
@ AV_HWDEVICE_TYPE_DXVA2
Definition: hwcontext.h:32
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:74
hwcontext_vulkan.h
AV_FRAME_DATA_MASTERING_DISPLAY_METADATA
@ AV_FRAME_DATA_MASTERING_DISPLAY_METADATA
Mastering display metadata associated with a video frame.
Definition: frame.h:120
AV_PIX_FMT_BGR0
@ AV_PIX_FMT_BGR0
packed BGR 8:8:8, 32bpp, BGRXBGRX... X=unused/undefined
Definition: pixfmt.h:265
AV_PIX_FMT_D3D12
@ AV_PIX_FMT_D3D12
Hardware surfaces for Direct3D 12.
Definition: pixfmt.h:440
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:81
AVAMFDeviceContext::trace_writer
void * trace_writer
Definition: hwcontext_amf.h:38
hwcontext_d3d12va.h
amf_get_buffer
static int amf_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
Definition: hwcontext_amf.c:405
AVAMFDeviceContext::library
void * library
Definition: hwcontext_amf.h:36
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
AmfTraceWriter
Definition: hwcontext_amf.c:65
ff_mutex_destroy
static int ff_mutex_destroy(AVMutex *mutex)
Definition: thread.h:190
AV_PIX_FMT_X2BGR10
#define AV_PIX_FMT_X2BGR10
Definition: pixfmt.h:614
hwcontext_dxva2.h
AV_HWDEVICE_TYPE_D3D12VA
@ AV_HWDEVICE_TYPE_D3D12VA
Definition: hwcontext.h:40
amf_free_amfsurface
static void amf_free_amfsurface(void *opaque, uint8_t *data)
Definition: hwcontext_amf.c:436
dst
uint8_t ptrdiff_t const uint8_t ptrdiff_t int intptr_t intptr_t int int16_t * dst
Definition: dsp.h:87
i
#define i(width, name, range_min, range_max)
Definition: cbs_h264.c:63
av_amf_extract_hdr_metadata
int av_amf_extract_hdr_metadata(const AVFrame *frame, AMFHDRMetadata *hdrmeta)
Definition: hwcontext_amf.c:233
size
int size
Definition: twinvq_data.h:10344
AVAMFDeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_amf.h:35
amf_lock_default
static void amf_lock_default(void *opaque)
Definition: hwcontext_amf.c:55
ff_hwcontext_type_amf
const HWContextType ff_hwcontext_type_amf
Definition: hwcontext_amf.c:822
av_make_q
static AVRational av_make_q(int num, int den)
Create an AVRational.
Definition: rational.h:71
amf_device_uninit
static void amf_device_uninit(AVHWDeviceContext *device_ctx)
Definition: hwcontext_amf.c:524
AVFrameSideData::data
uint8_t * data
Definition: frame.h:284
AMFFramesContext
We still need AVHWFramesContext to utilize our hardware memory otherwise, we will receive the error "...
Definition: hwcontext_amf.c:107
buffer.h
AVERROR_EXTERNAL
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:59
av_content_light_metadata_create_side_data
AVContentLightMetadata * av_content_light_metadata_create_side_data(AVFrame *frame)
Allocate a complete AVContentLightMetadata and add it to the frame.
Definition: mastering_display_metadata.c:82
ff_mutex_lock
static int ff_mutex_lock(AVMutex *mutex)
Definition: thread.h:188
AV_PIX_FMT_RGB0
@ AV_PIX_FMT_RGB0
packed RGB 8:8:8, 32bpp, RGBXRGBX... X=unused/undefined
Definition: pixfmt.h:263
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:336
version
version
Definition: libkvazaar.c:313
amf_frames_init
static int amf_frames_init(AVHWFramesContext *ctx)
Definition: hwcontext_amf.c:383
AV_LOG_INFO
#define AV_LOG_INFO
Standard information.
Definition: log.h:221
AV_PIX_FMT_ARGB
@ AV_PIX_FMT_ARGB
packed ARGB 8:8:8:8, 32bpp, ARGBARGB...
Definition: pixfmt.h:99
AVAMFFormatMap::amf_format
enum AMF_SURFACE_FORMAT amf_format
Definition: hwcontext_amf.c:113
AV_FRAME_DATA_CONTENT_LIGHT_LEVEL
@ AV_FRAME_DATA_CONTENT_LIGHT_LEVEL
Content light level (based on CTA-861.3).
Definition: frame.h:137
AVD3D12VADeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_d3d12va.h:43
AVAMFDeviceContext::factory
AMFFactory * factory
Definition: hwcontext_amf.h:37
AVCOL_SPC_BT2020_NCL
@ AVCOL_SPC_BT2020_NCL
ITU-R BT2020 non-constant luminance system.
Definition: pixfmt.h:711
AVDXVA2DeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_dxva2.h:39
av_malloc_array
#define av_malloc_array(a, b)
Definition: tableprint_vlc.h:32
AVColorSpace
AVColorSpace
YUV colorspace type.
Definition: pixfmt.h:700
AVD3D11VADeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_d3d11va.h:45
amf_transfer_data_from
static int amf_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_amf.c:489
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
AVMasteringDisplayMetadata
Mastering display metadata capable of representing the color volume of the display used to master the...
Definition: mastering_display_metadata.h:38
amf_device_derive
static int amf_device_derive(AVHWDeviceContext *device_ctx, AVHWDeviceContext *child_device_ctx, AVDictionary *opts, int flags)
Definition: hwcontext_amf.c:789
AVHWFrameTransferDirection
AVHWFrameTransferDirection
Definition: hwcontext.h:406
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:118
ret
ret
Definition: filter_design.txt:187
AV_LOG_FATAL
#define AV_LOG_FATAL
Something went wrong and recovery is not possible.
Definition: log.h:204
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:75
pixfmt.h
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:96
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:265
supported_formats
static enum AVPixelFormat supported_formats[]
Definition: hwcontext_amf.c:313
hwcontext_amf_internal.h
amf_unlock_default
static void amf_unlock_default(void *opaque)
Definition: hwcontext_amf.c:60
AVAMFDeviceContext::unlock
void(* unlock)(void *lock_ctx)
Definition: hwcontext_amf.h:45
amf_load_library
static int amf_load_library(AVAMFDeviceContext *amf_ctx, void *avcl)
Definition: hwcontext_amf.c:609
av_mastering_display_metadata_create_side_data
AVMasteringDisplayMetadata * av_mastering_display_metadata_create_side_data(AVFrame *frame)
Allocate a complete AVMasteringDisplayMetadata and add it to the frame.
Definition: mastering_display_metadata.c:58
av_image_copy2
static void av_image_copy2(uint8_t *const dst_data[4], const int dst_linesizes[4], uint8_t *const src_data[4], const int src_linesizes[4], enum AVPixelFormat pix_fmt, int width, int height)
Wrapper around av_image_copy() to workaround the limitation that the conversion from uint8_t * const ...
Definition: imgutils.h:184
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
format_map
const FormatMap format_map[]
Definition: hwcontext_amf.c:116
AmfTraceWriter::avctx
void * avctx
Definition: hwcontext_amf.c:67
L
#define L(x)
Definition: vpx_arith.h:36
AVMasteringDisplayMetadata::min_luminance
AVRational min_luminance
Min luminance of mastering display (cd/m^2).
Definition: mastering_display_metadata.h:52
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:602
mem.h
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
AVFrameSideData
Structure to hold side data for an AVFrame.
Definition: frame.h:282
AmfTraceWriter::vtbl
AMFTraceWriterVtbl vtbl
Definition: hwcontext_amf.c:68
w
uint8_t w
Definition: llvidencdsp.c:39
hwcontext_internal.h
AMFTraceWriter_Flush
static void AMF_CDECL_CALL AMFTraceWriter_Flush(AMFTraceWriter *pThis)
Definition: hwcontext_amf.c:78
AVContentLightMetadata::MaxFALL
unsigned MaxFALL
Max average light level per frame (cd/m^2).
Definition: mastering_display_metadata.h:116
AV_PIX_FMT_RGBAF16
#define AV_PIX_FMT_RGBAF16
Definition: pixfmt.h:624
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
imgutils.h
hwcontext.h
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
HWContextType
Definition: hwcontext_internal.h:29
av_amf_light_metadata_to_hdrmeta
int av_amf_light_metadata_to_hdrmeta(const AVContentLightMetadata *light_meta, AMFHDRMetadata *hdrmeta)
Definition: hwcontext_amf.c:222
h
h
Definition: vp9dsp_template.c:2070
AVCOL_SPC_BT709
@ AVCOL_SPC_BT709
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / derived in SMPTE RP 177 Annex B
Definition: pixfmt.h:702
AVColorRange
AVColorRange
Visual content value range.
Definition: pixfmt.h:742
hwcontext_d3d11va.h
src
#define src
Definition: vp8dsp.c:248
supported_transfer_formats
static enum AVPixelFormat supported_transfer_formats[]
Definition: hwcontext_amf.c:331
w32dlfcn.h
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:3376