Go to the documentation of this file.
61 void *spv_opaque =
NULL;
67 const int plane =
s->alpha ? 3 : 0;
75 spv = ff_vk_spirv_init();
90 VK_SHADER_STAGE_COMPUTE_BIT,
91 (
const char *[]) {
"GL_KHR_shader_subgroup_ballot" }, 1,
96 GLSLC(0,
layout(push_constant, std430) uniform pushConstants { );
97 GLSLC(1,
float threshold; );
101 VK_SHADER_STAGE_COMPUTE_BIT);
106 .type = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
108 .mem_quali =
"readonly",
111 .stages = VK_SHADER_STAGE_COMPUTE_BIT,
113 .name =
"sum_buffer",
114 .type = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
115 .stages = VK_SHADER_STAGE_COMPUTE_BIT,
116 .buf_content =
"uint slice_sum[];",
122 GLSLC(0, shared uint wg_sum; );
127 GLSLC(1, barrier(); );
129 GLSLC(1,
const ivec2
pos = ivec2(gl_GlobalInvocationID.xy); );
130 GLSLF(1,
if (!IS_WITHIN(
pos, imageSize(input_img[%d]))) ,plane);
132 GLSLF(1,
float value = imageLoad(input_img[%d],
pos).x; ,plane);
133 GLSLC(1, uvec4 isblack = subgroupBallot(
value <= threshold); );
134 GLSLC(1,
if (subgroupElect()) );
136 GLSLC(1, barrier(); );
137 GLSLC(1,
if (gl_LocalInvocationIndex == 0
u) );
166 if ((black_end -
s->black_start) >=
s->black_min_duration_time /
av_q2d(
inlink->time_base)) {
168 "black_start:%s black_end:%s black_duration:%s\n",
181 uint64_t nb_black_pixels = 0;
187 ratio = (
double) nb_black_pixels / (
link->w *
link->h);
190 "frame:%"PRId64
" picture_black_ratio:%f pts:%s t:%s type:%c\n",
195 if (ratio >=
s->picture_black_ratio_th) {
197 s->black_start = in->
pts;
217 VkImageMemoryBarrier2 img_bar[4];
233 const int depth =
desc->comp[0].depth;
234 const int ymin = 16 << (depth - 8);
235 const int ymax = 235 << (depth - 8);
236 const int imax = (1 << depth) - 1;
237 push_data.
threshold = (
s->pixel_black_th * (ymax - ymin) + ymin) / imax;
244 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
245 VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,
248 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT |
249 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
250 VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
260 VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT,
261 VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT));
265 VK_IMAGE_LAYOUT_GENERAL, VK_NULL_HANDLE);
268 VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT,
269 VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT,
270 VK_ACCESS_SHADER_READ_BIT,
271 VK_IMAGE_LAYOUT_GENERAL,
272 VK_QUEUE_FAMILY_IGNORED);
275 vk->CmdPipelineBarrier2(exec->
buf, &(VkDependencyInfo) {
276 .sType = VK_STRUCTURE_TYPE_DEPENDENCY_INFO,
277 .pBufferMemoryBarriers = &(VkBufferMemoryBarrier2) {
278 .sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2,
279 .srcStageMask = VK_PIPELINE_STAGE_2_NONE,
280 .dstStageMask = VK_PIPELINE_STAGE_2_TRANSFER_BIT,
281 .dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT,
282 .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
283 .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
284 .buffer = sum_vk->buf,
285 .size = sum_vk->size,
288 .bufferMemoryBarrierCount = 1,
291 vk->CmdFillBuffer(exec->buf, sum_vk->buf, 0, sum_vk->size, 0x0);
293 vk->CmdPipelineBarrier2(exec->buf, &(VkDependencyInfo) {
294 .sType = VK_STRUCTURE_TYPE_DEPENDENCY_INFO,
295 .pImageMemoryBarriers = img_bar,
296 .imageMemoryBarrierCount = nb_img_bar,
297 .pBufferMemoryBarriers = &(VkBufferMemoryBarrier2) {
298 .sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2,
299 .srcStageMask = VK_PIPELINE_STAGE_2_TRANSFER_BIT,
300 .dstStageMask = VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT,
301 .srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT,
302 .dstAccessMask = VK_ACCESS_2_SHADER_STORAGE_READ_BIT |
303 VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT,
304 .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
305 .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
306 .buffer = sum_vk->buf,
307 .size = sum_vk->size,
310 .bufferMemoryBarrierCount = 1,
314 sum_vk, 0, sum_vk->size,
315 VK_FORMAT_UNDEFINED));
319 0,
sizeof(push_data), &push_data);
321 vk->CmdDispatch(exec->buf,
322 FFALIGN(in->width,
s->shd.lg_size[0]) /
s->shd.lg_size[0],
323 FFALIGN(in->height,
s->shd.lg_size[1]) /
s->shd.lg_size[1],
326 vk->CmdPipelineBarrier2(exec->buf, &(VkDependencyInfo) {
327 .sType = VK_STRUCTURE_TYPE_DEPENDENCY_INFO,
328 .pBufferMemoryBarriers = &(VkBufferMemoryBarrier2) {
329 .sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2,
330 .srcStageMask = VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT,
331 .dstStageMask = VK_PIPELINE_STAGE_2_HOST_BIT,
332 .srcAccessMask = VK_ACCESS_2_SHADER_STORAGE_READ_BIT |
333 VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT,
334 .dstAccessMask = VK_ACCESS_HOST_READ_BIT,
335 .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
336 .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
337 .buffer = sum_vk->buf,
338 .size = sum_vk->size,
341 .bufferMemoryBarrierCount = 1,
401 #define OFFSET(x) offsetof(BlackDetectVulkanContext, x)
402 #define FLAGS (AV_OPT_FLAG_FILTERING_PARAM | AV_OPT_FLAG_VIDEO_PARAM)
405 {
"black_min_duration",
"set minimum detected black duration in seconds",
OFFSET(black_min_duration_time),
AV_OPT_TYPE_DOUBLE, {.dbl=2}, 0, DBL_MAX,
FLAGS },
406 {
"picture_black_ratio_th",
"set the picture black ratio threshold",
OFFSET(picture_black_ratio_th),
AV_OPT_TYPE_DOUBLE, {.dbl=.98}, 0, 1,
FLAGS },
434 .
p.
name =
"blackdetect_vulkan",
436 .p.priv_class = &blackdetect_vulkan_class,
static av_cold int init_filter(AVFilterContext *ctx)
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
void ff_vk_shader_free(FFVulkanContext *s, FFVulkanShader *shd)
Free a shader.
int ff_vk_shader_init(FFVulkanContext *s, FFVulkanShader *shd, const char *name, VkPipelineStageFlags stage, const char *extensions[], int nb_extensions, int lg_x, int lg_y, int lg_z, uint32_t required_subgroup_size)
Initialize a shader object, with a specific set of extensions, type+bind, local group size,...
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
uint8_t * data
The data buffer.
int ff_vk_exec_pool_init(FFVulkanContext *s, AVVulkanDeviceQueueFamily *qf, FFVkExecPool *pool, int nb_contexts, int nb_queries, VkQueryType query_type, int query_64bit, const void *query_create_pnext)
Allocates/frees an execution pool.
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
int64_t current_pts
Current timestamp of the link, as defined by the most recent frame(s), in link time_base units.
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
#define FILTER_INPUTS(array)
This structure describes decoded (raw) audio or video data.
const FFFilter ff_vf_blackdetect_vulkan
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
int ff_vk_filter_init(AVFilterContext *avctx)
General lavfi IO functions.
@ AVCOL_RANGE_JPEG
Full range content.
FFVkExecContext * ff_vk_exec_get(FFVulkanContext *s, FFVkExecPool *pool)
Retrieve an execution pool.
void ff_vk_uninit(FFVulkanContext *s)
Frees main context.
void(* uninit)(struct FFVkSPIRVCompiler **ctx)
const char * name
Filter name.
A link between two filters.
static int config_output(AVFilterLink *outlink)
@ AV_PIX_FMT_VULKAN
Vulkan hardware images.
int ff_vk_exec_add_dep_frame(FFVulkanContext *s, FFVkExecContext *e, AVFrame *f, VkPipelineStageFlagBits2 wait_stage, VkPipelineStageFlagBits2 signal_stage)
Link properties exposed to filter code, but not external callers.
int av_pix_fmt_count_planes(enum AVPixelFormat pix_fmt)
void * priv
private data for use by the filter
#define u(width, name, range_min, range_max)
AVVulkanDeviceQueueFamily * qf
void ff_vk_shader_update_img_array(FFVulkanContext *s, FFVkExecContext *e, FFVulkanShader *shd, AVFrame *f, VkImageView *views, int set, int binding, VkImageLayout layout, VkSampler sampler)
Update a descriptor in a buffer with an image array.
int ff_vk_shader_register_exec(FFVulkanContext *s, FFVkExecPool *pool, FFVulkanShader *shd)
Register a shader with an exec pool.
int ff_vk_shader_add_descriptor_set(FFVulkanContext *s, FFVulkanShader *shd, FFVulkanDescriptorSetBinding *desc, int nb, int singular, int print_to_shader_only)
Add descriptor to a shader.
A filter pad used for either input or output.
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
#define FF_ARRAY_ELEMS(a)
@ AV_OPT_TYPE_DOUBLE
Underlying C type is double.
static double av_q2d(AVRational a)
Convert an AVRational to a double.
void ff_vk_exec_wait(FFVulkanContext *s, FFVkExecContext *e)
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
#define AV_PIX_FMT_FLAG_ALPHA
The pixel format has an alpha channel.
void ff_vk_exec_pool_free(FFVulkanContext *s, FFVkExecPool *pool)
#define FILTER_OUTPUTS(array)
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
const char * ff_vk_shader_rep_fmt(enum AVPixelFormat pix_fmt, enum FFVkShaderRepFormat rep_fmt)
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
AVBufferPool * sum_buf_pool
AVFilterLink ** inputs
array of pointers to input links
void av_buffer_pool_uninit(AVBufferPool **ppool)
Mark the pool as being available for freeing.
int ff_vk_filter_config_output(AVFilterLink *outlink)
static void blackdetect_vulkan_uninit(AVFilterContext *avctx)
uint64_t flags
Combination of AV_PIX_FMT_FLAG_...
AVFILTER_DEFINE_CLASS(blackdetect_vulkan)
static FilterLink * ff_filter_link(AVFilterLink *link)
#define FF_FILTER_FLAG_HWFRAME_AWARE
The filter is aware of hardware frames, and any hardware frame context should not be automatically pr...
double black_min_duration_time
enum AVPictureType pict_type
Picture type of the frame.
int(* init)(AVBSFContext *ctx)
#define av_ts2timestr(ts, tb)
Convenience macro, the return value should be used only directly in function arguments but never stan...
double picture_black_ratio_th
void ff_vk_shader_update_push_const(FFVulkanContext *s, FFVkExecContext *e, FFVulkanShader *shd, VkShaderStageFlagBits stage, int offset, size_t size, void *src)
Update push constant in a shader.
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
#define AV_PIX_FMT_FLAG_RGB
The pixel format contains RGB-like data (as opposed to YUV/grayscale).
uint32_t slice_sum[SLICES]
#define AVFILTER_FLAG_HWDEVICE
The filter can create hardware frames using AVFilterContext.hw_device_ctx.
#define AV_NUM_DATA_POINTERS
#define AV_NOPTS_VALUE
Undefined timestamp value.
AVRational time_base
Time base for the timestamps in this frame.
AVFilterContext * src
source filter
int(* compile_shader)(FFVulkanContext *s, struct FFVkSPIRVCompiler *ctx, FFVulkanShader *shd, uint8_t **data, size_t *size, const char *entrypoint, void **opaque)
#define AVERROR_EXTERNAL
Generic error in an external library.
int ff_vk_shader_update_desc_buffer(FFVulkanContext *s, FFVkExecContext *e, FFVulkanShader *shd, int set, int bind, int elem, FFVkBuffer *buf, VkDeviceSize offset, VkDeviceSize len, VkFormat fmt)
Update a descriptor in a buffer with a buffer.
static void report_black_region(AVFilterContext *ctx, int64_t black_end)
#define AV_LOG_INFO
Standard information.
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel layout
char av_get_picture_type_char(enum AVPictureType pict_type)
Return a single letter to describe the given picture type pict_type.
static void uninit(AVBSFContext *ctx)
int ff_vk_exec_start(FFVulkanContext *s, FFVkExecContext *e)
Start/submit/wait an execution.
#define i(width, name, range_min, range_max)
void ff_vk_frame_barrier(FFVulkanContext *s, FFVkExecContext *e, AVFrame *pic, VkImageMemoryBarrier2 *bar, int *nb_bar, VkPipelineStageFlags src_stage, VkPipelineStageFlags dst_stage, VkAccessFlagBits new_access, VkImageLayout new_layout, uint32_t new_qf)
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default value
int ff_vk_shader_link(FFVulkanContext *s, FFVulkanShader *shd, uint8_t *spirv, size_t spirv_len, const char *entrypoint)
Link a shader into an executable.
const char * name
Pad name.
static const AVFilterPad blackdetect_vulkan_outputs[]
void(* free_shader)(struct FFVkSPIRVCompiler *ctx, void **opaque)
void ff_vk_exec_bind_shader(FFVulkanContext *s, FFVkExecContext *e, FFVulkanShader *shd)
Bind a shader.
static int blackdetect_vulkan_filter_frame(AVFilterLink *link, AVFrame *in)
int ff_vk_create_imageviews(FFVulkanContext *s, FFVkExecContext *e, VkImageView views[AV_NUM_DATA_POINTERS], AVFrame *f, enum FFVkShaderRepFormat rep_fmt)
Create an imageview and add it as a dependency to an execution.
int ff_vk_shader_add_push_const(FFVulkanShader *shd, int offset, int size, VkShaderStageFlagBits stage)
Add/update push constants for execution.
AVVulkanDeviceQueueFamily * ff_vk_qf_find(FFVulkanContext *s, VkQueueFlagBits dev_family, VkVideoCodecOperationFlagBitsKHR vid_ops)
Chooses an appropriate QF.
enum AVPixelFormat input_format
#define AV_PIX_FMT_FLAG_XYZ
The pixel format contains XYZ-like data (as opposed to YUV/RGB/grayscale).
static const AVFilterPad blackdetect_vulkan_inputs[]
AVDictionary * metadata
metadata.
#define AV_PIX_FMT_FLAG_PLANAR
At least one pixel component is not in the first data plane.
static void evaluate(AVFilterLink *link, AVFrame *in, const BlackDetectBuf *sum)
static const AVOption blackdetect_vulkan_options[]
int ff_vk_filter_config_input(AVFilterLink *inlink)
AVFilter p
The public AVFilter.
A reference to a data buffer.
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
void ff_vk_exec_discard_deps(FFVulkanContext *s, FFVkExecContext *e)
static const int16_t alpha[]
@ AV_OPT_TYPE_BOOL
Underlying C type is int.
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
int ff_vk_exec_submit(FFVulkanContext *s, FFVkExecContext *e)
#define av_ts2str(ts)
Convenience macro, the return value should be used only directly in function arguments but never stan...
#define FILTER_SINGLE_PIXFMT(pix_fmt_)
int ff_vk_get_pooled_buffer(FFVulkanContext *ctx, AVBufferPool **buf_pool, AVBufferRef **buf, VkBufferUsageFlags usage, void *create_pNext, size_t size, VkMemoryPropertyFlagBits mem_props)
Initialize a pool and create AVBufferRefs containing FFVkBuffer.
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.