FFmpeg
amfenc.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "config.h"
20 
21 #include "libavutil/avassert.h"
22 #include "libavutil/imgutils.h"
23 #include "libavutil/hwcontext.h"
25 #if CONFIG_D3D11VA
27 #endif
28 #if CONFIG_DXVA2
29 #define COBJMACROS
31 #endif
32 #include "libavutil/mem.h"
33 #include "libavutil/pixdesc.h"
34 #include "libavutil/time.h"
35 
36 #include "amfenc.h"
37 #include "encode.h"
38 
39 #define AMF_AV_FRAME_REF L"av_frame_ref"
40 #define PTS_PROP L"PtsProp"
41 
42 #if CONFIG_D3D11VA
43 #include <d3d11.h>
44 #endif
45 
46 #ifdef _WIN32
47 #include "compat/w32dlfcn.h"
48 #else
49 #include <dlfcn.h>
50 #endif
51 
52 #define FFMPEG_AMF_WRITER_ID L"ffmpeg_amf"
53 
54 
58 #if CONFIG_D3D11VA
60 #endif
61 #if CONFIG_DXVA2
63 #endif
74 };
75 
77 
78 static int amf_init_encoder(AVCodecContext *avctx)
79 {
81  const wchar_t *codec_id = NULL;
82  AMF_RESULT res;
84  AVHWDeviceContext *hw_device_ctx = (AVHWDeviceContext*)ctx->device_ctx_ref->data;
85  AVAMFDeviceContext *amf_device_ctx = (AVAMFDeviceContext *)hw_device_ctx->hwctx;
86  int alloc_size;
87  wchar_t name[512];
88 
89 
90  alloc_size = swprintf(name, amf_countof(name), L"%s%lld",PTS_PROP, next_encoder_index) + 1;
91  ctx->pts_property_name = av_memdup(name, alloc_size * sizeof(wchar_t));
92  if(!ctx->pts_property_name)
93  return AVERROR(ENOMEM);
94 
95  alloc_size = swprintf(name, amf_countof(name), L"%s%lld",AMF_AV_FRAME_REF, next_encoder_index) + 1;
96  ctx->av_frame_property_name = av_memdup(name, alloc_size * sizeof(wchar_t));
97  if(!ctx->av_frame_property_name)
98  return AVERROR(ENOMEM);
99 
101 
102  switch (avctx->codec->id) {
103  case AV_CODEC_ID_H264:
104  codec_id = AMFVideoEncoderVCE_AVC;
105  break;
106  case AV_CODEC_ID_HEVC:
107  codec_id = AMFVideoEncoder_HEVC;
108  break;
109  case AV_CODEC_ID_AV1 :
110  codec_id = AMFVideoEncoder_AV1;
111  break;
112  default:
113  break;
114  }
115  AMF_RETURN_IF_FALSE(ctx, codec_id != NULL, AVERROR(EINVAL), "Codec %d is not supported\n", avctx->codec->id);
116 
117  if (avctx->hw_frames_ctx)
118  pix_fmt = ((AVHWFramesContext*)avctx->hw_frames_ctx->data)->sw_format;
119  else
120  pix_fmt = avctx->pix_fmt;
121 
122  if (pix_fmt == AV_PIX_FMT_P010) {
123  AMF_RETURN_IF_FALSE(ctx, amf_device_ctx->version >= AMF_MAKE_FULL_VERSION(1, 4, 32, 0), AVERROR_UNKNOWN, "10-bit encoder is not supported by AMD GPU drivers versions lower than 23.30.\n");
124  }
125 
126  ctx->format = av_av_to_amf_format(pix_fmt);
127  AMF_RETURN_IF_FALSE(ctx, ctx->format != AMF_SURFACE_UNKNOWN, AVERROR(EINVAL),
128  "Format %s is not supported\n", av_get_pix_fmt_name(pix_fmt));
129 
130  res = amf_device_ctx->factory->pVtbl->CreateComponent(amf_device_ctx->factory, amf_device_ctx->context, codec_id, &ctx->encoder);
131  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_ENCODER_NOT_FOUND, "CreateComponent(%ls) failed with error %d\n", codec_id, res);
132 
133  ctx->submitted_frame = 0;
134  ctx->encoded_frame = 0;
135  ctx->eof = 0;
136 
137  return 0;
138 }
139 
141 {
142  AMFEncoderContext *ctx = avctx->priv_data;
143 
144  if (ctx->encoder) {
145  ctx->encoder->pVtbl->Terminate(ctx->encoder);
146  ctx->encoder->pVtbl->Release(ctx->encoder);
147  ctx->encoder = NULL;
148  }
149 
150  av_buffer_unref(&ctx->device_ctx_ref);
151  av_fifo_freep2(&ctx->timestamp_list);
152 
153  if (ctx->output_list) {
154  // release remaining AMF output buffers
155  while(av_fifo_can_read(ctx->output_list)) {
156  AMFBuffer* buffer = NULL;
157  av_fifo_read(ctx->output_list, &buffer, 1);
158  if(buffer != NULL)
159  buffer->pVtbl->Release(buffer);
160  }
161  av_fifo_freep2(&ctx->output_list);
162  }
163  av_freep(&ctx->pts_property_name);
164  av_freep(&ctx->av_frame_property_name);
165 
166  return 0;
167 }
168 
169 static int amf_copy_surface(AVCodecContext *avctx, const AVFrame *frame,
170  AMFSurface* surface)
171 {
172  AMFPlane *plane;
173  uint8_t *dst_data[4] = {0};
174  int dst_linesize[4] = {0};
175  int planes;
176  int i;
177 
178  planes = (int)surface->pVtbl->GetPlanesCount(surface);
179  av_assert0(planes < FF_ARRAY_ELEMS(dst_data));
180 
181  for (i = 0; i < planes; i++) {
182  plane = surface->pVtbl->GetPlaneAt(surface, i);
183  dst_data[i] = plane->pVtbl->GetNative(plane);
184  dst_linesize[i] = plane->pVtbl->GetHPitch(plane);
185  }
186  av_image_copy2(dst_data, dst_linesize,
187  frame->data, frame->linesize, frame->format,
188  avctx->width, avctx->height);
189 
190  return 0;
191 }
192 
193 static int amf_copy_buffer(AVCodecContext *avctx, AVPacket *pkt, AMFBuffer *buffer)
194 {
195  AMFEncoderContext *ctx = avctx->priv_data;
196  int ret;
197  AMFVariantStruct var = {0};
198  int64_t timestamp = AV_NOPTS_VALUE;
199  int64_t size = buffer->pVtbl->GetSize(buffer);
200 
201  if ((ret = ff_get_encode_buffer(avctx, pkt, size, 0)) < 0) {
202  return ret;
203  }
204  memcpy(pkt->data, buffer->pVtbl->GetNative(buffer), size);
205 
206  switch (avctx->codec->id) {
207  case AV_CODEC_ID_H264:
208  buffer->pVtbl->GetProperty(buffer, AMF_VIDEO_ENCODER_OUTPUT_DATA_TYPE, &var);
209  if(var.int64Value == AMF_VIDEO_ENCODER_OUTPUT_DATA_TYPE_IDR) {
211  }
212  break;
213  case AV_CODEC_ID_HEVC:
214  buffer->pVtbl->GetProperty(buffer, AMF_VIDEO_ENCODER_HEVC_OUTPUT_DATA_TYPE, &var);
215  if (var.int64Value == AMF_VIDEO_ENCODER_HEVC_OUTPUT_DATA_TYPE_IDR) {
217  }
218  break;
219  case AV_CODEC_ID_AV1:
220  buffer->pVtbl->GetProperty(buffer, AMF_VIDEO_ENCODER_AV1_OUTPUT_FRAME_TYPE, &var);
221  if (var.int64Value == AMF_VIDEO_ENCODER_AV1_OUTPUT_FRAME_TYPE_KEY) {
223  }
224  default:
225  break;
226  }
227 
228  buffer->pVtbl->GetProperty(buffer, ctx->pts_property_name, &var);
229 
230  pkt->pts = var.int64Value; // original pts
231 
232  AMF_RETURN_IF_FALSE(ctx, av_fifo_read(ctx->timestamp_list, &timestamp, 1) >= 0,
233  AVERROR_UNKNOWN, "timestamp_list is empty\n");
234 
235  // calc dts shift if max_b_frames > 0
236  if ((ctx->max_b_frames > 0 || ((ctx->pa_adaptive_mini_gop == 1) ? true : false)) && ctx->dts_delay == 0) {
237  int64_t timestamp_last = AV_NOPTS_VALUE;
238  size_t can_read = av_fifo_can_read(ctx->timestamp_list);
239 
240  AMF_RETURN_IF_FALSE(ctx, can_read > 0, AVERROR_UNKNOWN,
241  "timestamp_list is empty while max_b_frames = %d\n", avctx->max_b_frames);
242  av_fifo_peek(ctx->timestamp_list, &timestamp_last, 1, can_read - 1);
243  if (timestamp < 0 || timestamp_last < AV_NOPTS_VALUE) {
244  return AVERROR(ERANGE);
245  }
246  ctx->dts_delay = timestamp_last - timestamp;
247  }
248  pkt->dts = timestamp - ctx->dts_delay;
249  return 0;
250 }
251 
252 // amfenc API implementation
254 {
255  int ret;
256  AMFEncoderContext *ctx = avctx->priv_data;
257  AVHWDeviceContext *hwdev_ctx = NULL;
258 
259  // hardcoded to current HW queue size - will auto-realloc if too small
260  ctx->timestamp_list = av_fifo_alloc2(avctx->max_b_frames + 16, sizeof(int64_t),
262  if (!ctx->timestamp_list) {
263  return AVERROR(ENOMEM);
264  }
265  ctx->output_list = av_fifo_alloc2(2, sizeof(AMFBuffer*), AV_FIFO_FLAG_AUTO_GROW);
266  if (!ctx->output_list)
267  return AVERROR(ENOMEM);
268 
269  ctx->dts_delay = 0;
270 
271  ctx->hwsurfaces_in_queue = 0;
272 
273  if (avctx->hw_device_ctx) {
274  hwdev_ctx = (AVHWDeviceContext*)avctx->hw_device_ctx->data;
275  if (hwdev_ctx->type == AV_HWDEVICE_TYPE_AMF)
276  {
277  ctx->device_ctx_ref = av_buffer_ref(avctx->hw_device_ctx);
278  }
279  else {
281  AMF_RETURN_IF_FALSE(ctx, ret == 0, ret, "Failed to create derived AMF device context: %s\n", av_err2str(ret));
282  }
283  } else if (avctx->hw_frames_ctx) {
284  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
285  if (frames_ctx->device_ref ) {
286  if (frames_ctx->format == AV_PIX_FMT_AMF_SURFACE) {
287  ctx->device_ctx_ref = av_buffer_ref(frames_ctx->device_ref);
288  }
289  else {
290  ret = av_hwdevice_ctx_create_derived(&ctx->device_ctx_ref, AV_HWDEVICE_TYPE_AMF, frames_ctx->device_ref, 0);
291  AMF_RETURN_IF_FALSE(ctx, ret == 0, ret, "Failed to create derived AMF device context: %s\n", av_err2str(ret));
292  }
293  }
294  }
295  else {
296  ret = av_hwdevice_ctx_create(&ctx->device_ctx_ref, AV_HWDEVICE_TYPE_AMF, NULL, NULL, 0);
297  AMF_RETURN_IF_FALSE(ctx, ret == 0, ret, "Failed to create hardware device context (AMF) : %s\n", av_err2str(ret));
298  }
299 
300  if (ctx->pa_lookahead_buffer_depth >= ctx->hwsurfaces_in_queue_max) {
301  av_log(avctx, AV_LOG_WARNING,
302  "async_depth (%d) too small for lookahead (%d), increasing to (%d)\n",
303  ctx->hwsurfaces_in_queue_max,
304  ctx->pa_lookahead_buffer_depth,
305  ctx->pa_lookahead_buffer_depth + 1);
306  ctx->hwsurfaces_in_queue_max = ctx->pa_lookahead_buffer_depth + 1;
307  }
308 
309  if ((ret = amf_init_encoder(avctx)) == 0) {
310  return 0;
311  }
312 
313  ff_amf_encode_close(avctx);
314  return ret;
315 }
316 
317 static AMF_RESULT amf_set_property_buffer(AMFSurface *object, const wchar_t *name, AMFBuffer *val)
318 {
319  AMF_RESULT res;
320  AMFVariantStruct var;
321  res = AMFVariantInit(&var);
322  if (res == AMF_OK) {
323  AMFGuid guid_AMFInterface = IID_AMFInterface();
324  AMFInterface *amf_interface;
325  res = val->pVtbl->QueryInterface(val, &guid_AMFInterface, (void**)&amf_interface);
326 
327  if (res == AMF_OK) {
328  res = AMFVariantAssignInterface(&var, amf_interface);
329  amf_interface->pVtbl->Release(amf_interface);
330  }
331  if (res == AMF_OK) {
332  res = object->pVtbl->SetProperty(object, name, var);
333  }
334  AMFVariantClear(&var);
335  }
336  return res;
337 }
338 
339 static AMF_RESULT amf_store_attached_frame_ref(AMFEncoderContext *ctx, const AVFrame *frame, AMFSurface *surface)
340 {
341  AMF_RESULT res = AMF_FAIL;
342  int64_t data;
344  if (frame_ref) {
345  memcpy(&data, &frame_ref, sizeof(frame_ref)); // store pointer in 8 bytes
346  AMF_ASSIGN_PROPERTY_INT64(res, surface, ctx->av_frame_property_name, data);
347  }
348  return res;
349 }
350 
352 {
353  AMFVariantStruct var = {0};
354  AMF_RESULT res = buffer->pVtbl->GetProperty(buffer, ctx->av_frame_property_name, &var);
355  if(res == AMF_OK && var.int64Value){
357  memcpy(&frame_ref, &var.int64Value, sizeof(frame_ref));
359  }
360  return res;
361 }
362 
363 static int amf_submit_frame(AVCodecContext *avctx, AVFrame *frame, AMFSurface **surface_resubmit)
364 {
365  AMFEncoderContext *ctx = avctx->priv_data;
366  AVHWDeviceContext *hw_device_ctx = (AVHWDeviceContext*)ctx->device_ctx_ref->data;
367  AVAMFDeviceContext *amf_device_ctx = (AVAMFDeviceContext *)hw_device_ctx->hwctx;
368  AMFSurface *surface;
369  AMF_RESULT res;
370  int ret;
371  int hw_surface = 0;
372  int output_delay = FFMAX(ctx->max_b_frames, 0) + ((avctx->flags & AV_CODEC_FLAG_LOW_DELAY) ? 0 : 1);
373 
374 // prepare surface from frame
375  switch (frame->format) {
376 #if CONFIG_D3D11VA
377  case AV_PIX_FMT_D3D11:
378  {
379  static const GUID AMFTextureArrayIndexGUID = { 0x28115527, 0xe7c3, 0x4b66, { 0x99, 0xd3, 0x4f, 0x2a, 0xe6, 0xb4, 0x7f, 0xaf } };
380  ID3D11Texture2D *texture = (ID3D11Texture2D*)frame->data[0]; // actual texture
381  int index = (intptr_t)frame->data[1]; // index is a slice in texture array is - set to tell AMF which slice to use
382  av_assert0(frame->hw_frames_ctx && avctx->hw_frames_ctx &&
383  frame->hw_frames_ctx->data == avctx->hw_frames_ctx->data);
384  texture->lpVtbl->SetPrivateData(texture, &AMFTextureArrayIndexGUID, sizeof(index), &index);
385  res = amf_device_ctx->context->pVtbl->CreateSurfaceFromDX11Native(amf_device_ctx->context, texture, &surface, NULL); // wrap to AMF surface
386  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR(ENOMEM), "CreateSurfaceFromDX11Native() failed with error %d\n", res);
387  hw_surface = 1;
388  }
389  break;
390 #endif
391 #if CONFIG_DXVA2
393  {
394  IDirect3DSurface9 *texture = (IDirect3DSurface9 *)frame->data[3]; // actual texture
395  res = amf_device_ctx->context->pVtbl->CreateSurfaceFromDX9Native(amf_device_ctx->context, texture, &surface, NULL); // wrap to AMF surface
396  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR(ENOMEM), "CreateSurfaceFromDX9Native() failed with error %d\n", res);
397  hw_surface = 1;
398  }
399  break;
400 #endif
402  {
403  surface = (AMFSurface*)frame->data[0];
404  surface->pVtbl->Acquire(surface);
405  hw_surface = 1;
406  }
407  break;
408  default:
409  {
410  res = amf_device_ctx->context->pVtbl->AllocSurface(amf_device_ctx->context, AMF_MEMORY_HOST, ctx->format, avctx->width, avctx->height, &surface);
411  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR(ENOMEM), "AllocSurface() failed with error %d\n", res);
412  amf_copy_surface(avctx, frame, surface);
413  }
414  break;
415  }
416  if (hw_surface) {
418  ctx->hwsurfaces_in_queue++;
419  // input HW surfaces can be vertically aligned by 16; tell AMF the real size
420  surface->pVtbl->SetCrop(surface, 0, 0, frame->width, frame->height);
421  }
422  // HDR10 metadata
423  if (frame->color_trc == AVCOL_TRC_SMPTE2084) {
424  AMFBuffer * hdrmeta_buffer = NULL;
425  res = amf_device_ctx->context->pVtbl->AllocBuffer(amf_device_ctx->context, AMF_MEMORY_HOST, sizeof(AMFHDRMetadata), &hdrmeta_buffer);
426  if (res == AMF_OK) {
427  AMFHDRMetadata * hdrmeta = (AMFHDRMetadata*)hdrmeta_buffer->pVtbl->GetNative(hdrmeta_buffer);
428  if (av_amf_extract_hdr_metadata(frame, hdrmeta) == 0) {
429  switch (avctx->codec->id) {
430  case AV_CODEC_ID_H264:
431  AMF_ASSIGN_PROPERTY_INTERFACE(res, ctx->encoder, AMF_VIDEO_ENCODER_INPUT_HDR_METADATA, hdrmeta_buffer); break;
432  case AV_CODEC_ID_HEVC:
433  AMF_ASSIGN_PROPERTY_INTERFACE(res, ctx->encoder, AMF_VIDEO_ENCODER_HEVC_INPUT_HDR_METADATA, hdrmeta_buffer); break;
434  case AV_CODEC_ID_AV1:
435  AMF_ASSIGN_PROPERTY_INTERFACE(res, ctx->encoder, AMF_VIDEO_ENCODER_AV1_INPUT_HDR_METADATA, hdrmeta_buffer); break;
436  }
437  res = amf_set_property_buffer(surface, L"av_frame_hdrmeta", hdrmeta_buffer);
438  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "SetProperty failed for \"av_frame_hdrmeta\" with error %d\n", res);
439  }
440  hdrmeta_buffer->pVtbl->Release(hdrmeta_buffer);
441  }
442  }
443  surface->pVtbl->SetPts(surface, frame->pts);
444 
445  AMF_ASSIGN_PROPERTY_INT64(res, surface, ctx->pts_property_name, frame->pts);
446 
447  switch (avctx->codec->id) {
448  case AV_CODEC_ID_H264:
449  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_INSERT_AUD, !!ctx->aud);
450  switch (frame->pict_type) {
451  case AV_PICTURE_TYPE_I:
452  if (ctx->forced_idr) {
453  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_INSERT_SPS, 1);
454  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_INSERT_PPS, 1);
455  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_PICTURE_TYPE_IDR);
456  } else {
457  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_PICTURE_TYPE_I);
458  }
459  break;
460  case AV_PICTURE_TYPE_P:
461  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_PICTURE_TYPE_P);
462  break;
463  case AV_PICTURE_TYPE_B:
464  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_PICTURE_TYPE_B);
465  break;
466  }
467  break;
468  case AV_CODEC_ID_HEVC:
469  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_INSERT_AUD, !!ctx->aud);
470  switch (frame->pict_type) {
471  case AV_PICTURE_TYPE_I:
472  if (ctx->forced_idr) {
473  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_INSERT_HEADER, 1);
474  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_HEVC_PICTURE_TYPE_IDR);
475  } else {
476  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_HEVC_PICTURE_TYPE_I);
477  }
478  break;
479  case AV_PICTURE_TYPE_P:
480  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_HEVC_PICTURE_TYPE_P);
481  break;
482  }
483  break;
484  case AV_CODEC_ID_AV1:
485  if (frame->pict_type == AV_PICTURE_TYPE_I) {
486  if (ctx->forced_idr) {
487  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_AV1_FORCE_INSERT_SEQUENCE_HEADER, 1);
488  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_AV1_FORCE_FRAME_TYPE, AMF_VIDEO_ENCODER_AV1_FORCE_FRAME_TYPE_KEY);
489  } else {
490  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_AV1_FORCE_FRAME_TYPE, AMF_VIDEO_ENCODER_AV1_FORCE_FRAME_TYPE_INTRA_ONLY);
491  }
492  }
493  break;
494  default:
495  break;
496  }
497  // submit surface
498  res = ctx->encoder->pVtbl->SubmitInput(ctx->encoder, (AMFData*)surface);
499  if (res == AMF_INPUT_FULL) { // handle full queue
500  //store surface for later submission
501  *surface_resubmit = surface;
502  } else {
503  surface->pVtbl->Release(surface);
504  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "SubmitInput() failed with error %d\n", res);
505  ctx->submitted_frame++;
506  ret = av_fifo_write(ctx->timestamp_list, &frame->pts, 1);
507  if (ret < 0)
508  return ret;
509  if(ctx->submitted_frame <= ctx->encoded_frame + output_delay)
510  return AVERROR(EAGAIN); // too soon to poll or wait
511  }
512  return 0;
513 }
514 
515 static int amf_submit_frame_locked(AVCodecContext *avctx, AVFrame *frame, AMFSurface **surface_resubmit)
516 {
517  int ret;
518  AMFEncoderContext *ctx = avctx->priv_data;
519  AVHWDeviceContext *hw_device_ctx = (AVHWDeviceContext*)ctx->device_ctx_ref->data;
520  AVAMFDeviceContext *amf_device_ctx = (AVAMFDeviceContext *)hw_device_ctx->hwctx;
521 
522  if (amf_device_ctx->lock)
523  amf_device_ctx->lock(amf_device_ctx->lock_ctx);
524  ret = amf_submit_frame(avctx, frame, surface_resubmit);
525  if (amf_device_ctx->unlock)
526  amf_device_ctx->unlock(amf_device_ctx->lock_ctx);
527 
528  return ret;
529 }
530 static AMF_RESULT amf_query_output(AVCodecContext *avctx, AMFBuffer **buffer)
531 {
532  AMFEncoderContext *ctx = avctx->priv_data;
533  AMFData *data = NULL;
534  AMF_RESULT ret = ctx->encoder->pVtbl->QueryOutput(ctx->encoder, &data);
535  *buffer = NULL;
536  if (data) {
537  AMFGuid guid = IID_AMFBuffer();
538  data->pVtbl->QueryInterface(data, &guid, (void**)buffer); // query for buffer interface
539  data->pVtbl->Release(data);
540  if (amf_release_attached_frame_ref(ctx, *buffer) == AMF_OK)
541  ctx->hwsurfaces_in_queue--;
542  ctx->encoded_frame++;
543  }
544  return ret;
545 }
546 
548 {
549  AMFEncoderContext *ctx = avctx->priv_data;
550  AMFSurface *surface = NULL;
551  AMF_RESULT res;
552  int ret;
553  AMF_RESULT res_query;
554  AMFBuffer* buffer = NULL;
556  int block_and_wait;
557  int64_t pts = 0;
558  int output_delay = FFMAX(ctx->max_b_frames, 0) + ((avctx->flags & AV_CODEC_FLAG_LOW_DELAY) ? 0 : 1);
559 
560  if (!ctx->encoder){
562  return AVERROR(EINVAL);
563  }
564  // check if some outputs are available
565  av_fifo_read(ctx->output_list, &buffer, 1);
566  if (buffer != NULL) { // return already retrieved output
567  ret = amf_copy_buffer(avctx, avpkt, buffer);
568  buffer->pVtbl->Release(buffer);
569  return ret;
570  }
571 
572  ret = ff_encode_get_frame(avctx, frame);
573  if(ret < 0){
574  if(ret != AVERROR_EOF){
576  if(ret == AVERROR(EAGAIN)){
577  if(ctx->submitted_frame <= ctx->encoded_frame + output_delay) // too soon to poll
578  return ret;
579  }
580  }
581  }
582  if(ret != AVERROR(EAGAIN)){
583  if (!frame->buf[0]) { // submit drain
584  if (!ctx->eof) { // submit drain one time only
585  if(!ctx->delayed_drain) {
586  res = ctx->encoder->pVtbl->Drain(ctx->encoder);
587  if (res == AMF_INPUT_FULL) {
588  ctx->delayed_drain = 1; // input queue is full: resubmit Drain() in receive loop
589  } else {
590  if (res == AMF_OK) {
591  ctx->eof = 1; // drain started
592  }
593  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "Drain() failed with error %d\n", res);
594  }
595  }
596  }
597  } else { // submit frame
598  ret = amf_submit_frame_locked(avctx, frame, &surface);
599  if(ret < 0){
601  return ret;
602  }
603  pts = frame->pts;
604  }
605  }
607 
608  do {
609  block_and_wait = 0;
610  // poll data
611  res_query = amf_query_output(avctx, &buffer);
612  if (buffer) {
613  ret = amf_copy_buffer(avctx, avpkt, buffer);
614  buffer->pVtbl->Release(buffer);
615 
616  AMF_RETURN_IF_FALSE(ctx, ret >= 0, ret, "amf_copy_buffer() failed with error %d\n", ret);
617 
618  if (ctx->delayed_drain) { // try to resubmit drain
619  res = ctx->encoder->pVtbl->Drain(ctx->encoder);
620  if (res != AMF_INPUT_FULL) {
621  ctx->delayed_drain = 0;
622  ctx->eof = 1; // drain started
623  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "Repeated Drain() failed with error %d\n", res);
624  } else {
625  av_log(avctx, AV_LOG_WARNING, "Data acquired but delayed drain submission got AMF_INPUT_FULL- should not happen\n");
626  }
627  }
628  } else if (ctx->delayed_drain || (ctx->eof && res_query != AMF_EOF) || (ctx->hwsurfaces_in_queue >= ctx->hwsurfaces_in_queue_max) || surface) {
629  block_and_wait = 1;
630  // Only sleep if the driver doesn't support waiting in QueryOutput()
631  // or if we already have output data so we will skip calling it.
632  if (!ctx->query_timeout_supported || avpkt->data || avpkt->buf) {
633  av_usleep(1000);
634  }
635  }
636  } while (block_and_wait);
637 
638  if (res_query == AMF_EOF) {
639  ret = AVERROR_EOF;
640  } else if (buffer == NULL) {
641  ret = AVERROR(EAGAIN);
642  } else {
643  if(surface) {
644  // resubmit surface
645  do {
646  res = ctx->encoder->pVtbl->SubmitInput(ctx->encoder, (AMFData*)surface);
647  if (res != AMF_INPUT_FULL)
648  break;
649 
650  if (!ctx->query_timeout_supported)
651  av_usleep(1000);
652 
653  // Need to free up space in the encoder queue.
654  // The number of retrieved outputs is limited currently to 21
655  amf_query_output(avctx, &buffer);
656  if (buffer != NULL) {
657  ret = av_fifo_write(ctx->output_list, &buffer, 1);
658  if (ret < 0)
659  return ret;
660  }
661  } while(res == AMF_INPUT_FULL);
662 
663  surface->pVtbl->Release(surface);
664  if (res == AMF_INPUT_FULL) {
665  av_log(avctx, AV_LOG_WARNING, "Data acquired but delayed SubmitInput returned AMF_INPUT_FULL- should not happen\n");
666  } else {
667  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "SubmitInput() failed with error %d\n", res);
668 
669  ret = av_fifo_write(ctx->timestamp_list, &pts, 1);
670 
671  ctx->submitted_frame++;
672 
673  if (ret < 0)
674  return ret;
675  }
676  }
677  ret = 0;
678  }
679  return ret;
680 }
681 
683 #if CONFIG_D3D11VA
684  HW_CONFIG_ENCODER_FRAMES(D3D11, D3D11VA),
685  HW_CONFIG_ENCODER_DEVICE(NONE, D3D11VA),
686 #endif
687 #if CONFIG_DXVA2
688  HW_CONFIG_ENCODER_FRAMES(DXVA2_VLD, DXVA2),
690 #endif
691  HW_CONFIG_ENCODER_FRAMES(AMF_SURFACE, AMF),
693  NULL,
694 };
amf_release_attached_frame_ref
static AMF_RESULT amf_release_attached_frame_ref(AMFEncoderContext *ctx, AMFBuffer *buffer)
Definition: amfenc.c:351
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:216
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
name
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Definition: writing_filters.txt:88
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
AVERROR_EOF
#define AVERROR_EOF
End of file.
Definition: error.h:57
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:200
int64_t
long long int64_t
Definition: coverity.c:34
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:64
av_fifo_peek
int av_fifo_peek(const AVFifo *f, void *buf, size_t nb_elems, size_t offset)
Read data from a FIFO without modifying FIFO state.
Definition: fifo.c:255
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:427
pixdesc.h
AVPacket::data
uint8_t * data
Definition: packet.h:588
encode.h
data
const char data[16]
Definition: mxf.c:149
amf_set_property_buffer
static AMF_RESULT amf_set_property_buffer(AMFSurface *object, const wchar_t *name, AMFBuffer *val)
Definition: amfenc.c:317
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:102
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AMF_RETURN_IF_FALSE
#define AMF_RETURN_IF_FALSE(avctx, exp, ret_value,...)
Error handling helper.
Definition: amfenc.h:169
amf_copy_surface
static int amf_copy_surface(AVCodecContext *avctx, const AVFrame *frame, AMFSurface *surface)
Definition: amfenc.c:169
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
NONE
#define NONE
Definition: vf_drawvg.c:261
AV_PKT_FLAG_KEY
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
Definition: packet.h:643
AV_PIX_FMT_AMF_SURFACE
@ AV_PIX_FMT_AMF_SURFACE
HW acceleration through AMF.
Definition: pixfmt.h:477
av_memdup
void * av_memdup(const void *p, size_t size)
Duplicate a buffer with av_malloc().
Definition: mem.c:304
AMF_AV_FRAME_REF
#define AMF_AV_FRAME_REF
Definition: amfenc.c:39
AVCodecContext::codec
const struct AVCodec * codec
Definition: avcodec.h:448
av_fifo_write
int av_fifo_write(AVFifo *f, const void *buf, size_t nb_elems)
Write data into a FIFO.
Definition: fifo.c:188
ff_amf_encode_close
int av_cold ff_amf_encode_close(AVCodecContext *avctx)
Common encoder termination function.
Definition: amfenc.c:140
ff_amf_encode_init
int ff_amf_encode_init(AVCodecContext *avctx)
Common encoder initization function.
Definition: amfenc.c:253
AVCodecContext::flags
int flags
AV_CODEC_FLAG_*.
Definition: avcodec.h:496
val
static double val(void *priv, double ch)
Definition: aeval.c:77
AV_CODEC_FLAG_LOW_DELAY
#define AV_CODEC_FLAG_LOW_DELAY
Force low delay.
Definition: avcodec.h:314
pts
static int64_t pts
Definition: transcode_aac.c:644
av_av_to_amf_format
enum AMF_SURFACE_FORMAT av_av_to_amf_format(enum AVPixelFormat fmt)
Definition: hwcontext_amf.c:133
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:63
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:52
avassert.h
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_cold
#define av_cold
Definition: attributes.h:106
av_fifo_read
int av_fifo_read(AVFifo *f, void *buf, size_t nb_elems)
Read data from a FIFO.
Definition: fifo.c:240
AV_PIX_FMT_DXVA2_VLD
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
Definition: pixfmt.h:134
AMFEncoderContext
AMF encoder context.
Definition: amfenc.h:40
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demux_decode.c:41
amf_submit_frame
static int amf_submit_frame(AVCodecContext *avctx, AVFrame *frame, AMFSurface **surface_resubmit)
Definition: amfenc.c:363
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:42
AV_HWDEVICE_TYPE_AMF
@ AV_HWDEVICE_TYPE_AMF
Definition: hwcontext.h:41
amf_init_encoder
static int amf_init_encoder(AVCodecContext *avctx)
Definition: amfenc.c:78
ctx
static AVFormatContext * ctx
Definition: movenc.c:49
amf_query_output
static AMF_RESULT amf_query_output(AVCodecContext *avctx, AMFBuffer **buffer)
Definition: amfenc.c:530
av_frame_clone
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:483
hwcontext_amf.h
codec_id
enum AVCodecID codec_id
Definition: vaapi_decode.c:410
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
av_usleep
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
AV_CODEC_ID_H264
@ AV_CODEC_ID_H264
Definition: codec_id.h:79
AV_PIX_FMT_RGBA
@ AV_PIX_FMT_RGBA
packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
Definition: pixfmt.h:100
if
if(ret)
Definition: filter_design.txt:179
AVPacket::buf
AVBufferRef * buf
A reference to the reference-counted buffer where the packet data is stored.
Definition: packet.h:571
NULL
#define NULL
Definition: coverity.c:32
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AV_CODEC_ID_AV1
@ AV_CODEC_ID_AV1
Definition: codec_id.h:284
AVHWFramesContext::device_ref
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:129
ff_amf_receive_packet
int ff_amf_receive_packet(AVCodecContext *avctx, AVPacket *avpkt)
Ecoding one frame - common function for all AMF encoders.
Definition: amfenc.c:547
AV_PICTURE_TYPE_I
@ AV_PICTURE_TYPE_I
Intra.
Definition: avutil.h:278
av_fifo_can_read
size_t av_fifo_can_read(const AVFifo *f)
Definition: fifo.c:87
amf_copy_buffer
static int amf_copy_buffer(AVCodecContext *avctx, AVPacket *pkt, AMFBuffer *buffer)
Definition: amfenc.c:193
planes
static const struct @562 planes[]
AV_PIX_FMT_BGR0
@ AV_PIX_FMT_BGR0
packed BGR 8:8:8, 32bpp, BGRXBGRX... X=unused/undefined
Definition: pixfmt.h:265
time.h
PTS_PROP
#define PTS_PROP
Definition: amfenc.c:40
amf_submit_frame_locked
static int amf_submit_frame_locked(AVCodecContext *avctx, AVFrame *frame, AMFSurface **surface_resubmit)
Definition: amfenc.c:515
index
int index
Definition: gxfenc.c:90
AVCOL_TRC_SMPTE2084
@ AVCOL_TRC_SMPTE2084
SMPTE ST 2084 for 10-, 12-, 14- and 16-bit systems.
Definition: pixfmt.h:683
AV_PIX_FMT_X2BGR10
#define AV_PIX_FMT_X2BGR10
Definition: pixfmt.h:614
hwcontext_dxva2.h
HW_CONFIG_ENCODER_DEVICE
#define HW_CONFIG_ENCODER_DEVICE(format, device_type_)
Definition: hwconfig.h:95
i
#define i(width, name, range_min, range_max)
Definition: cbs_h264.c:63
av_err2str
#define av_err2str(errnum)
Convenience macro, the return value should be used only directly in function arguments but never stan...
Definition: error.h:122
av_amf_extract_hdr_metadata
int av_amf_extract_hdr_metadata(const AVFrame *frame, AMFHDRMetadata *hdrmeta)
Definition: hwcontext_amf.c:233
ff_amf_pix_fmts
enum AVPixelFormat ff_amf_pix_fmts[]
Supported formats.
Definition: amfenc.c:55
size
int size
Definition: twinvq_data.h:10344
AVAMFDeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_amf.h:35
AV_NOPTS_VALUE
#define AV_NOPTS_VALUE
Undefined timestamp value.
Definition: avutil.h:247
AVCodecHWConfigInternal
Definition: hwconfig.h:25
AVPacket::dts
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed.
Definition: packet.h:587
AVPacket::flags
int flags
A combination of AV_PKT_FLAG values.
Definition: packet.h:594
AV_PIX_FMT_RGB0
@ AV_PIX_FMT_RGB0
packed RGB 8:8:8, 32bpp, RGBXRGBX... X=unused/undefined
Definition: pixfmt.h:263
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:336
AVCodec::id
enum AVCodecID id
Definition: codec.h:186
AV_PIX_FMT_ARGB
@ AV_PIX_FMT_ARGB
packed ARGB 8:8:8:8, 32bpp, ARGBARGB...
Definition: pixfmt.h:99
HW_CONFIG_ENCODER_FRAMES
#define HW_CONFIG_ENCODER_FRAMES(format, device_type_)
Definition: hwconfig.h:98
av_hwdevice_ctx_create_derived
int av_hwdevice_ctx_create_derived(AVBufferRef **dst_ref_ptr, enum AVHWDeviceType type, AVBufferRef *src_ref, int flags)
Create a new device of the specified type from an existing device.
Definition: hwcontext.c:718
AVPacket::pts
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: packet.h:581
hw_device_ctx
static AVBufferRef * hw_device_ctx
Definition: hw_decode.c:45
ff_amfenc_hw_configs
const AVCodecHWConfigInternal *const ff_amfenc_hw_configs[]
Definition: amfenc.c:682
amf_store_attached_frame_ref
static AMF_RESULT amf_store_attached_frame_ref(AMFEncoderContext *ctx, const AVFrame *frame, AMFSurface *surface)
Definition: amfenc.c:339
AV_CODEC_ID_HEVC
@ AV_CODEC_ID_HEVC
Definition: codec_id.h:228
AVCodecContext::hw_device_ctx
AVBufferRef * hw_device_ctx
A reference to the AVHWDeviceContext describing the device which will be used by a hardware encoder/d...
Definition: avcodec.h:1487
AVCodecContext::height
int height
Definition: avcodec.h:600
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:639
AVCodecContext::hw_frames_ctx
AVBufferRef * hw_frames_ctx
A reference to the AVHWFramesContext describing the input (for encoding) or output (decoding) frames.
Definition: avcodec.h:1465
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:118
frame_ref
static int frame_ref(AVFrame *dst, const AVFrame *src)
Definition: swscale.c:1333
ret
ret
Definition: filter_design.txt:187
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:75
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:96
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:265
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:615
av_fifo_alloc2
AVFifo * av_fifo_alloc2(size_t nb_elems, size_t elem_size, unsigned int flags)
Allocate and initialize an AVFifo with a given element size.
Definition: fifo.c:47
AVCodecContext
main external API structure.
Definition: avcodec.h:439
AV_PICTURE_TYPE_B
@ AV_PICTURE_TYPE_B
Bi-dir predicted.
Definition: avutil.h:280
buffer
the frame and frame reference mechanism is intended to as much as expensive copies of that data while still allowing the filters to produce correct results The data is stored in buffers represented by AVFrame structures Several references can point to the same frame buffer
Definition: filter_design.txt:49
ff_get_encode_buffer
int ff_get_encode_buffer(AVCodecContext *avctx, AVPacket *avpkt, int64_t size, int flags)
Get a buffer for a packet.
Definition: encode.c:105
av_image_copy2
static void av_image_copy2(uint8_t *const dst_data[4], const int dst_linesizes[4], uint8_t *const src_data[4], const int src_linesizes[4], enum AVPixelFormat pix_fmt, int width, int height)
Wrapper around av_image_copy() to workaround the limitation that the conversion from uint8_t * const ...
Definition: imgutils.h:184
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
L
#define L(x)
Definition: vpx_arith.h:36
amfenc.h
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:602
AV_PICTURE_TYPE_P
@ AV_PICTURE_TYPE_P
Predicted.
Definition: avutil.h:279
AVERROR_ENCODER_NOT_FOUND
#define AVERROR_ENCODER_NOT_FOUND
Encoder not found.
Definition: error.h:56
mem.h
AVCodecContext::max_b_frames
int max_b_frames
maximum number of B-frames between non-B-frames Note: The output will be delayed by max_b_frames+1 re...
Definition: avcodec.h:777
ff_encode_get_frame
int ff_encode_get_frame(AVCodecContext *avctx, AVFrame *frame)
Called by encoders to get the next frame for encoding.
Definition: encode.c:204
AVPacket
This structure stores compressed data.
Definition: packet.h:565
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:466
next_encoder_index
static int64_t next_encoder_index
Definition: amfenc.c:76
AV_PIX_FMT_RGBAF16
#define AV_PIX_FMT_RGBAF16
Definition: pixfmt.h:624
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:600
imgutils.h
hwcontext.h
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
av_fifo_freep2
void av_fifo_freep2(AVFifo **f)
Free an AVFifo and reset pointer to NULL.
Definition: fifo.c:286
pkt
static AVPacket * pkt
Definition: demux_decode.c:55
hwcontext_d3d11va.h
AV_FIFO_FLAG_AUTO_GROW
#define AV_FIFO_FLAG_AUTO_GROW
Automatically resize the FIFO on writes, so that the data fits.
Definition: fifo.h:63
w32dlfcn.h
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:3376