Go to the documentation of this file.
30 #include <dxgidebug.h>
57 HANDLE d3dlib, dxgilib;
59 d3dlib = dlopen(
"d3d11.dll", 0);
60 dxgilib = dlopen(
"dxgi.dll", 0);
61 if (!d3dlib || !dxgilib)
64 mD3D11CreateDevice = (PFN_D3D11_CREATE_DEVICE) GetProcAddress(d3dlib,
"D3D11CreateDevice");
95 WaitForSingleObjectEx(
ctx, INFINITE, FALSE);
109 ID3D11Texture2D_Release(frames_hwctx->
texture);
112 if (
s->staging_texture)
113 ID3D11Texture2D_Release(
s->staging_texture);
114 s->staging_texture =
NULL;
118 const void *hwconfig,
122 int nb_sw_formats = 0;
132 UINT format_support = 0;
134 if (SUCCEEDED(hr) && (format_support & D3D11_FORMAT_SUPPORT_TEXTURE2D))
151 ID3D11Texture2D_Release((ID3D11Texture2D *)opaque);
160 ID3D11Texture2D_Release(tex);
169 ID3D11Texture2D_Release(tex);
183 ID3D11Texture2D *tex;
184 D3D11_TEXTURE2D_DESC texDesc = {
186 .Height =
ctx->height,
189 .SampleDesc = { .Count = 1 },
191 .Usage = D3D11_USAGE_DEFAULT,
196 hr = ID3D11Device_CreateTexture2D(device_hwctx->
device, &texDesc,
NULL, &tex);
210 D3D11_TEXTURE2D_DESC texDesc;
215 ID3D11Texture2D_GetDesc(hwctx->
texture, &texDesc);
217 if (
s->nb_surfaces_used >= texDesc.ArraySize) {
222 ID3D11Texture2D_AddRef(hwctx->
texture);
234 D3D11_TEXTURE2D_DESC texDesc;
248 texDesc = (D3D11_TEXTURE2D_DESC){
250 .Height =
ctx->height,
253 .SampleDesc = { .Count = 1 },
254 .ArraySize =
ctx->initial_pool_size,
255 .Usage = D3D11_USAGE_DEFAULT,
261 D3D11_TEXTURE2D_DESC texDesc2;
262 ID3D11Texture2D_GetDesc(hwctx->
texture, &texDesc2);
264 if (texDesc.Width != texDesc2.Width ||
265 texDesc.Height != texDesc2.Height ||
266 texDesc.Format != texDesc2.Format) {
270 }
else if (texDesc.ArraySize > 0) {
271 hr = ID3D11Device_CreateTexture2D(device_hwctx->
device, &texDesc,
NULL, &hwctx->
texture);
316 fmts[0] =
ctx->sw_format;
320 if (
s->format == DXGI_FORMAT_420_OPAQUE)
333 D3D11_TEXTURE2D_DESC texDesc = {
335 .Height =
ctx->height,
338 .SampleDesc = { .Count = 1 },
340 .Usage = D3D11_USAGE_STAGING,
341 .CPUAccessFlags = D3D11_CPU_ACCESS_READ | D3D11_CPU_ACCESS_WRITE,
344 hr = ID3D11Device_CreateTexture2D(device_hwctx->
device, &texDesc,
NULL, &
s->staging_texture);
355 D3D11_TEXTURE2D_DESC *
desc,
356 D3D11_MAPPED_SUBRESOURCE *
map)
360 for (
i = 0;
i < 4;
i++)
361 linesize[
i] =
map->RowPitch;
376 ID3D11Resource *texture = (ID3D11Resource *)(ID3D11Texture2D *)
frame->data[0];
378 ID3D11Resource *staging;
383 D3D11_TEXTURE2D_DESC
desc;
384 D3D11_MAPPED_SUBRESOURCE
map;
392 if (!
s->staging_texture) {
398 staging = (ID3D11Resource *)
s->staging_texture;
400 ID3D11Texture2D_GetDesc(
s->staging_texture, &
desc);
403 ID3D11DeviceContext_CopySubresourceRegion(device_hwctx->
device_context,
408 staging, 0, D3D11_MAP_READ, 0, &
map);
415 ctx->sw_format,
w,
h);
417 ID3D11DeviceContext_Unmap(device_hwctx->
device_context, staging, 0);
420 staging, 0, D3D11_MAP_WRITE, 0, &
map);
427 ctx->sw_format,
w,
h);
429 ID3D11DeviceContext_Unmap(device_hwctx->
device_context, staging, 0);
431 ID3D11DeviceContext_CopySubresourceRegion(device_hwctx->
device_context,
432 texture,
index, 0, 0, 0,
450 if (!device_hwctx->
lock) {
452 if (device_hwctx->
lock_ctx == INVALID_HANDLE_VALUE) {
467 hr = ID3D11DeviceContext_QueryInterface(device_hwctx->
device, &IID_ID3D11VideoDevice,
474 hr = ID3D11DeviceContext_QueryInterface(device_hwctx->
device_context, &IID_ID3D11VideoContext,
487 if (device_hwctx->
device) {
488 ID3D11Device_Release(device_hwctx->
device);
508 CloseHandle(device_hwctx->
lock_ctx);
509 device_hwctx->
lock_ctx = INVALID_HANDLE_VALUE;
520 IDXGIAdapter *pAdapter =
NULL;
521 ID3D10Multithread *pMultithread;
522 UINT creationFlags = D3D11_CREATE_DEVICE_VIDEO_SUPPORT;
528 if (!LoadLibrary(
"d3d11_1sdklayers.dll"))
533 creationFlags |= D3D11_CREATE_DEVICE_DEBUG;
543 IDXGIFactory2 *pDXGIFactory;
546 int adapter = atoi(device);
547 if (FAILED(IDXGIFactory2_EnumAdapters(pDXGIFactory, adapter, &pAdapter)))
549 IDXGIFactory2_Release(pDXGIFactory);
554 DXGI_ADAPTER_DESC
desc;
555 hr = IDXGIAdapter2_GetDesc(pAdapter, &
desc);
562 hr =
mD3D11CreateDevice(pAdapter, pAdapter ? D3D_DRIVER_TYPE_UNKNOWN : D3D_DRIVER_TYPE_HARDWARE,
NULL, creationFlags,
NULL, 0,
565 IDXGIAdapter_Release(pAdapter);
571 hr = ID3D11Device_QueryInterface(device_hwctx->
device, &IID_ID3D10Multithread, (
void **)&pMultithread);
573 ID3D10Multithread_SetMultithreadProtected(pMultithread, TRUE);
574 ID3D10Multithread_Release(pMultithread);
577 #if !HAVE_UWP && HAVE_DXGIDEBUG_H
579 HANDLE dxgidebug_dll = LoadLibrary(
"dxgidebug.dll");
581 HRESULT (WINAPI * pf_DXGIGetDebugInterface)(
const GUID *riid,
void **ppDebug)
582 = (
void *)GetProcAddress(dxgidebug_dll,
"DXGIGetDebugInterface");
583 if (pf_DXGIGetDebugInterface) {
584 IDXGIDebug *dxgi_debug =
NULL;
585 hr = pf_DXGIGetDebugInterface(&IID_IDXGIDebug, (
void**)&dxgi_debug);
586 if (SUCCEEDED(hr) && dxgi_debug)
587 IDXGIDebug_ReportLiveObjects(dxgi_debug, DXGI_DEBUG_ALL, DXGI_DEBUG_RLO_ALL);
static AVBufferRef * d3d11va_alloc_single(AVHWFramesContext *ctx)
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
AVPixelFormat
Pixel format.
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
static int d3d11va_transfer_get_formats(AVHWFramesContext *ctx, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats)
This structure describes decoded (raw) audio or video data.
UINT MiscFlags
D3D11_TEXTURE2D_DESC.MiscFlags used for texture creation.
static int d3d11va_transfer_data(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
AVBufferRef * av_buffer_create(uint8_t *data, int size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
enum AVPixelFormat * valid_hw_formats
A list of possible values for format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
AVFormatInternal * internal
An opaque field for libavformat internal usage.
static void fill_texture_ptrs(uint8_t *data[4], int linesize[4], AVHWFramesContext *ctx, D3D11_TEXTURE2D_DESC *desc, D3D11_MAPPED_SUBRESOURCE *map)
HRESULT(WINAPI * PFN_CREATE_DXGI_FACTORY)(REFIID riid, void **ppFactory)
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
UINT BindFlags
D3D11_TEXTURE2D_DESC.BindFlags used for texture creation.
const HWContextType ff_hwcontext_type_d3d11va
static int d3d11va_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
@ AV_HWDEVICE_TYPE_D3D11VA
static void d3d11va_frames_uninit(AVHWFramesContext *ctx)
static void d3d11va_default_unlock(void *ctx)
int av_image_fill_pointers(uint8_t *data[4], enum AVPixelFormat pix_fmt, int height, uint8_t *ptr, const int linesizes[4])
Fill plane data pointers for an image with pixel format pix_fmt and height height.
static int d3d11va_frames_get_constraints(AVHWDeviceContext *ctx, const void *hwconfig, AVHWFramesConstraints *constraints)
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
static int ff_thread_once(char *control, void(*routine)(void))
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
static av_cold void load_functions(void)
static int device_init(AVFormatContext *ctx, int *width, int *height, uint32_t pixelformat)
enum AVPixelFormat * valid_sw_formats
A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
ID3D11VideoContext * video_context
If unset, this will be set from the device_context field on init.
ID3D11Device * device
Device used for texture creation and access.
static int d3d11va_device_create(AVHWDeviceContext *ctx, const char *device, AVDictionary *opts, int flags)
static AVOnce functions_loaded
static enum AVPixelFormat pix_fmts[]
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
static void d3d11va_default_lock(void *ctx)
ID3D11VideoDevice * video_device
If unset, this will be set from the device field on init.
void(* unlock)(void *lock_ctx)
This struct is allocated as AVHWFramesContext.hwctx.
static AVBufferRef * wrap_texture_buf(ID3D11Texture2D *tex, int index)
static AVBufferRef * d3d11va_pool_alloc(void *opaque, int size)
static int d3d11va_frames_init(AVHWFramesContext *ctx)
static PFN_CREATE_DXGI_FACTORY mCreateDXGIFactory
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
static void free_texture(void *opaque, uint8_t *data)
ID3D11Texture2D * texture
The canonical texture used for pool allocation.
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
#define AV_LOG_INFO
Standard information.
static const struct @291 supported_formats[]
#define i(width, name, range_min, range_max)
#define av_malloc_array(a, b)
This struct is allocated as AVHWDeviceContext.hwctx.
AVBufferPool * av_buffer_pool_init2(int size, void *opaque, AVBufferRef *(*alloc)(void *opaque, int size), void(*pool_free)(void *opaque))
Allocate and initialize a buffer pool with a more complex allocator.
static int d3d11va_create_staging_texture(AVHWFramesContext *ctx)
void(* lock)(void *lock_ctx)
Callbacks for locking.
AVHWFrameTransferDirection
This struct describes a set or pool of "hardware" frames (i.e.
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
static PFN_D3D11_CREATE_DEVICE mD3D11CreateDevice
FFmpeg Automated Testing Environment ************************************Introduction Using FATE from your FFmpeg source directory Submitting the results to the FFmpeg result aggregation server Uploading new samples to the fate suite FATE makefile targets and variables Makefile targets Makefile variables Examples Introduction **************FATE is an extended regression suite on the client side and a means for results aggregation and presentation on the server side The first part of this document explains how you can use FATE from your FFmpeg source directory to test your ffmpeg binary The second part describes how you can run FATE to submit the results to FFmpeg’s FATE server In any way you can have a look at the publicly viewable FATE results by visiting this as it can be seen if some test on some platform broke with their recent contribution This usually happens on the platforms the developers could not test on The second part of this document describes how you can run FATE to submit your results to FFmpeg’s FATE server If you want to submit your results be sure to check that your combination of OS and compiler is not already listed on the above mentioned website In the third part you can find a comprehensive listing of FATE makefile targets and variables Using FATE from your FFmpeg source directory **********************************************If you want to run FATE on your machine you need to have the samples in place You can get the samples via the build target fate rsync Use this command from the top level source this will cause FATE to fail NOTE To use a custom wrapper to run the pass ‘ target exec’ to ‘configure’ or set the TARGET_EXEC Make variable Submitting the results to the FFmpeg result aggregation server ****************************************************************To submit your results to the server you should run fate through the shell script ‘tests fate sh’ from the FFmpeg sources This script needs to be invoked with a configuration file as its first argument tests fate sh path to fate_config A configuration file template with comments describing the individual configuration variables can be found at ‘doc fate_config sh template’ Create a configuration that suits your based on the configuration template The ‘slot’ configuration variable can be any string that is not yet but it is suggested that you name it adhering to the following pattern ‘ARCH OS COMPILER COMPILER VERSION’ The configuration file itself will be sourced in a shell therefore all shell features may be used This enables you to setup the environment as you need it for your build For your first test runs the ‘fate_recv’ variable should be empty or commented out This will run everything as normal except that it will omit the submission of the results to the server The following files should be present in $workdir as specified in the configuration it may help to try out the ‘ssh’ command with one or more ‘ v’ options You should get detailed output concerning your SSH configuration and the authentication process The only thing left is to automate the execution of the fate sh script and the synchronisation of the samples directory Uploading new samples to the fate suite *****************************************If you need a sample uploaded send a mail to samples request This is for developers who have an account on the fate suite server If you upload new please make sure they are as small as space on each network bandwidth and so on benefit from smaller test cases Also keep in mind older checkouts use existing sample that means in practice generally do not remove or overwrite files as it likely would break older checkouts or releases Also all needed samples for a commit should be ideally before the push If you need an account for frequently uploading samples or you wish to help others by doing that send a mail to ffmpeg devel rsync vauL Duo ug o o w
#define FF_ARRAY_ELEMS(a)
void av_image_copy(uint8_t *dst_data[4], int dst_linesizes[4], const uint8_t *src_data[4], const int src_linesizes[4], enum AVPixelFormat pix_fmt, int width, int height)
Copy image in src_data to dst_data.
enum AVPixelFormat pix_fmt
static void d3d11va_device_uninit(AVHWDeviceContext *hwdev)
ID3D11Texture2D * staging_texture
A reference to a data buffer.
static int d3d11va_device_init(AVHWDeviceContext *hwdev)
const VDPAUPixFmtMap * map
D3D11 frame descriptor for pool allocation.
#define flags(name, subs,...)
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
ID3D11DeviceContext * device_context
If unset, this will be set from the device field on init.
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.