Go to the documentation of this file.
   30 #include <dxgidebug.h> 
   57     HANDLE d3dlib, dxgilib;
 
   59     d3dlib  = dlopen(
"d3d11.dll", 0);
 
   60     dxgilib = dlopen(
"dxgi.dll", 0);
 
   61     if (!d3dlib || !dxgilib)
 
   64     mD3D11CreateDevice = (PFN_D3D11_CREATE_DEVICE) GetProcAddress(d3dlib, 
"D3D11CreateDevice");
 
   96     WaitForSingleObjectEx(
ctx, INFINITE, FALSE);
 
  110         ID3D11Texture2D_Release(frames_hwctx->
texture);
 
  113     if (
s->staging_texture)
 
  114         ID3D11Texture2D_Release(
s->staging_texture);
 
  115     s->staging_texture = 
NULL;
 
  121                                           const void *hwconfig,
 
  125     int nb_sw_formats = 0;
 
  135         UINT format_support = 0;
 
  137         if (SUCCEEDED(hr) && (format_support & D3D11_FORMAT_SUPPORT_TEXTURE2D))
 
  154     ID3D11Texture2D_Release((ID3D11Texture2D *)opaque);
 
  165         ID3D11Texture2D_Release(tex);
 
  169     if (
s->nb_surfaces <= 
s->nb_surfaces_used) {
 
  171                                                    s->nb_surfaces_used + 1,
 
  174             ID3D11Texture2D_Release(tex);
 
  177         s->nb_surfaces = 
s->nb_surfaces_used + 1;
 
  182     s->nb_surfaces_used++;
 
  189         ID3D11Texture2D_Release(tex);
 
  203     ID3D11Texture2D *tex;
 
  204     D3D11_TEXTURE2D_DESC texDesc = {
 
  206         .Height     = 
ctx->height,
 
  209         .SampleDesc = { .Count = 1 },
 
  211         .Usage      = D3D11_USAGE_DEFAULT,
 
  216     hr = ID3D11Device_CreateTexture2D(device_hwctx->
device, &texDesc, 
NULL, &tex);
 
  230     D3D11_TEXTURE2D_DESC  texDesc;
 
  235     ID3D11Texture2D_GetDesc(hwctx->
texture, &texDesc);
 
  237     if (
s->nb_surfaces_used >= texDesc.ArraySize) {
 
  242     ID3D11Texture2D_AddRef(hwctx->
texture);
 
  254     D3D11_TEXTURE2D_DESC texDesc;
 
  268     texDesc = (D3D11_TEXTURE2D_DESC){
 
  270         .Height     = 
ctx->height,
 
  273         .SampleDesc = { .Count = 1 },
 
  274         .ArraySize  = 
ctx->initial_pool_size,
 
  275         .Usage      = D3D11_USAGE_DEFAULT,
 
  281         D3D11_TEXTURE2D_DESC texDesc2;
 
  282         ID3D11Texture2D_GetDesc(hwctx->
texture, &texDesc2);
 
  284         if (texDesc.Width != texDesc2.Width ||
 
  285             texDesc.Height != texDesc2.Height ||
 
  286             texDesc.Format != texDesc2.Format) {
 
  290     } 
else if (!(texDesc.BindFlags & D3D11_BIND_RENDER_TARGET) && texDesc.ArraySize > 0) {
 
  291         hr = ID3D11Device_CreateTexture2D(device_hwctx->
device, &texDesc, 
NULL, &hwctx->
texture);
 
  301     s->nb_surfaces = 
ctx->initial_pool_size;
 
  305     if (!
ctx->internal->pool_internal)
 
  321     frame->data[0] = (uint8_t *)
desc->texture;
 
  341     fmts[0] = 
ctx->sw_format;
 
  345     if (
s->format == DXGI_FORMAT_420_OPAQUE)
 
  358     D3D11_TEXTURE2D_DESC texDesc = {
 
  360         .Height         = 
ctx->height,
 
  363         .SampleDesc     = { .Count = 1 },
 
  365         .Usage          = D3D11_USAGE_STAGING,
 
  366         .CPUAccessFlags = D3D11_CPU_ACCESS_READ | D3D11_CPU_ACCESS_WRITE,
 
  369     hr = ID3D11Device_CreateTexture2D(device_hwctx->
device, &texDesc, 
NULL, &
s->staging_texture);
 
  380                               D3D11_TEXTURE2D_DESC *
desc,
 
  381                               D3D11_MAPPED_SUBRESOURCE *
map)
 
  385     for (
i = 0; 
i < 4; 
i++)
 
  386         linesize[
i] = 
map->RowPitch;
 
  389                            (uint8_t*)
map->pData, linesize);
 
  401     ID3D11Resource *texture = (ID3D11Resource *)(ID3D11Texture2D *)
frame->data[0];
 
  403     ID3D11Resource *staging;
 
  406     uint8_t *map_data[4];
 
  408     D3D11_TEXTURE2D_DESC 
desc;
 
  409     D3D11_MAPPED_SUBRESOURCE 
map;
 
  412     if (
frame->hw_frames_ctx->data != (uint8_t *)
ctx || other->
format != 
ctx->sw_format)
 
  417     if (!
s->staging_texture) {
 
  423     staging = (ID3D11Resource *)
s->staging_texture;
 
  425     ID3D11Texture2D_GetDesc(
s->staging_texture, &
desc);
 
  428         ID3D11DeviceContext_CopySubresourceRegion(device_hwctx->
device_context,
 
  433                                      staging, 0, D3D11_MAP_READ, 0, &
map);
 
  440                       ctx->sw_format, 
w, 
h);
 
  442         ID3D11DeviceContext_Unmap(device_hwctx->
device_context, staging, 0);
 
  445                                      staging, 0, D3D11_MAP_WRITE, 0, &
map);
 
  452                       ctx->sw_format, 
w, 
h);
 
  454         ID3D11DeviceContext_Unmap(device_hwctx->
device_context, staging, 0);
 
  456         ID3D11DeviceContext_CopySubresourceRegion(device_hwctx->
device_context,
 
  457                                                   texture, 
index, 0, 0, 0,
 
  475     if (!device_hwctx->
lock) {
 
  477         if (device_hwctx->
lock_ctx == INVALID_HANDLE_VALUE) {
 
  492         hr = ID3D11DeviceContext_QueryInterface(device_hwctx->
device, &IID_ID3D11VideoDevice,
 
  499         hr = ID3D11DeviceContext_QueryInterface(device_hwctx->
device_context, &IID_ID3D11VideoContext,
 
  512     if (device_hwctx->
device) {
 
  513         ID3D11Device_Release(device_hwctx->
device);
 
  533         CloseHandle(device_hwctx->
lock_ctx);
 
  534         device_hwctx->
lock_ctx = INVALID_HANDLE_VALUE;
 
  545     IDXGIAdapter           *pAdapter = 
NULL;
 
  546     ID3D10Multithread      *pMultithread;
 
  547     UINT creationFlags = D3D11_CREATE_DEVICE_VIDEO_SUPPORT;
 
  553     if (!LoadLibrary(
"d3d11_1sdklayers.dll"))
 
  558         creationFlags |= D3D11_CREATE_DEVICE_DEBUG;
 
  568         IDXGIFactory2 *pDXGIFactory;
 
  571             int adapter = atoi(device);
 
  572             if (FAILED(IDXGIFactory2_EnumAdapters(pDXGIFactory, adapter, &pAdapter)))
 
  574             IDXGIFactory2_Release(pDXGIFactory);
 
  579         DXGI_ADAPTER_DESC 
desc;
 
  580         hr = IDXGIAdapter2_GetDesc(pAdapter, &
desc);
 
  587     hr = 
mD3D11CreateDevice(pAdapter, pAdapter ? D3D_DRIVER_TYPE_UNKNOWN : D3D_DRIVER_TYPE_HARDWARE, 
NULL, creationFlags, 
NULL, 0,
 
  590         IDXGIAdapter_Release(pAdapter);
 
  596     hr = ID3D11Device_QueryInterface(device_hwctx->
device, &IID_ID3D10Multithread, (
void **)&pMultithread);
 
  598         ID3D10Multithread_SetMultithreadProtected(pMultithread, TRUE);
 
  599         ID3D10Multithread_Release(pMultithread);
 
  602 #if !HAVE_UWP && HAVE_DXGIDEBUG_H 
  604         HANDLE dxgidebug_dll = LoadLibrary(
"dxgidebug.dll");
 
  606             HRESULT (WINAPI  * pf_DXGIGetDebugInterface)(
const GUID *riid, 
void **ppDebug)
 
  607                 = (
void *)GetProcAddress(dxgidebug_dll, 
"DXGIGetDebugInterface");
 
  608             if (pf_DXGIGetDebugInterface) {
 
  609                 IDXGIDebug *dxgi_debug = 
NULL;
 
  610                 hr = pf_DXGIGetDebugInterface(&IID_IDXGIDebug, (
void**)&dxgi_debug);
 
  611                 if (SUCCEEDED(hr) && dxgi_debug)
 
  612                     IDXGIDebug_ReportLiveObjects(dxgi_debug, DXGI_DEBUG_ALL, DXGI_DEBUG_RLO_ALL);
 
  
static AVBufferRef * d3d11va_alloc_single(AVHWFramesContext *ctx)
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
AVPixelFormat
Pixel format.
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
static int d3d11va_transfer_get_formats(AVHWFramesContext *ctx, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats)
This structure describes decoded (raw) audio or video data.
UINT MiscFlags
D3D11_TEXTURE2D_DESC.MiscFlags used for texture creation.
static int d3d11va_transfer_data(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
enum AVPixelFormat * valid_hw_formats
A list of possible values for format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
static void fill_texture_ptrs(uint8_t *data[4], int linesize[4], AVHWFramesContext *ctx, D3D11_TEXTURE2D_DESC *desc, D3D11_MAPPED_SUBRESOURCE *map)
HRESULT(WINAPI * PFN_CREATE_DXGI_FACTORY)(REFIID riid, void **ppFactory)
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
static const struct @298 supported_formats[]
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
UINT BindFlags
D3D11_TEXTURE2D_DESC.BindFlags used for texture creation.
const HWContextType ff_hwcontext_type_d3d11va
static int d3d11va_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
ID3D11Texture2D * texture
The texture in which the frame is located.
@ AV_HWDEVICE_TYPE_D3D11VA
AVBufferPool * av_buffer_pool_init2(size_t size, void *opaque, AVBufferRef *(*alloc)(void *opaque, size_t size), void(*pool_free)(void *opaque))
Allocate and initialize a buffer pool with a more complex allocator.
static void d3d11va_frames_uninit(AVHWFramesContext *ctx)
static void d3d11va_default_unlock(void *ctx)
int av_image_fill_pointers(uint8_t *data[4], enum AVPixelFormat pix_fmt, int height, uint8_t *ptr, const int linesizes[4])
Fill plane data pointers for an image with pixel format pix_fmt and height height.
static int d3d11va_frames_get_constraints(AVHWDeviceContext *ctx, const void *hwconfig, AVHWFramesConstraints *constraints)
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
static int ff_thread_once(char *control, void(*routine)(void))
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
#define FF_ARRAY_ELEMS(a)
static av_cold void load_functions(void)
static int device_init(AVFormatContext *ctx, int *width, int *height, uint32_t pixelformat)
enum AVPixelFormat * valid_sw_formats
A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
ID3D11VideoContext * video_context
If unset, this will be set from the device_context field on init.
ID3D11Device * device
Device used for texture creation and access.
static int d3d11va_device_create(AVHWDeviceContext *ctx, const char *device, AVDictionary *opts, int flags)
static AVOnce functions_loaded
static enum AVPixelFormat pix_fmts[]
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
#define av_realloc_f(p, o, n)
AVD3D11FrameDescriptor * texture_infos
In case if texture structure member above is not NULL contains the same texture pointer for all eleme...
static void d3d11va_default_lock(void *ctx)
ID3D11VideoDevice * video_device
If unset, this will be set from the device field on init.
static AVBufferRef * wrap_texture_buf(AVHWFramesContext *ctx, ID3D11Texture2D *tex, int index)
void(* unlock)(void *lock_ctx)
This struct is allocated as AVHWFramesContext.hwctx.
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
static int d3d11va_frames_init(AVHWFramesContext *ctx)
static PFN_CREATE_DXGI_FACTORY mCreateDXGIFactory
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
static void free_texture(void *opaque, uint8_t *data)
ID3D11Texture2D * texture
The canonical texture used for pool allocation.
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
#define AV_LOG_INFO
Standard information.
#define i(width, name, range_min, range_max)
#define av_malloc_array(a, b)
This struct is allocated as AVHWDeviceContext.hwctx.
intptr_t index
The index into the array texture element representing the frame, or 0 if the texture is not an array ...
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
static int d3d11va_create_staging_texture(AVHWFramesContext *ctx)
void(* lock)(void *lock_ctx)
Callbacks for locking.
AVHWFrameTransferDirection
This struct describes a set or pool of "hardware" frames (i.e.
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
static PFN_D3D11_CREATE_DEVICE mD3D11CreateDevice
static AVBufferRef * d3d11va_pool_alloc(void *opaque, size_t size)
void av_image_copy(uint8_t *dst_data[4], int dst_linesizes[4], const uint8_t *src_data[4], const int src_linesizes[4], enum AVPixelFormat pix_fmt, int width, int height)
Copy image in src_data to dst_data.
enum AVPixelFormat pix_fmt
static void d3d11va_device_uninit(AVHWDeviceContext *hwdev)
ID3D11Texture2D * staging_texture
A reference to a data buffer.
static int d3d11va_device_init(AVHWDeviceContext *hwdev)
const VDPAUPixFmtMap * map
D3D11 frame descriptor for pool allocation.
#define flags(name, subs,...)
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
ID3D11DeviceContext * device_context
If unset, this will be set from the device field on init.
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.