avfilter: add pad opencl filter

This commit is contained in:
Paul B Mahol 2020-02-06 17:41:08 +01:00
parent 6d37ca8a4e
commit 1c6a91990d
9 changed files with 516 additions and 1 deletions

View File

@ -35,6 +35,7 @@ version <next>:
- xfade video filter
- xfade_opencl filter
- afirsrc audio filter source
- pad_opencl filter
version 4.2:

1
configure vendored
View File

@ -3535,6 +3535,7 @@ overlay_qsv_filter_deps="libmfx"
overlay_qsv_filter_select="qsvvpp"
overlay_vulkan_filter_deps="vulkan libglslang"
owdenoise_filter_deps="gpl"
pad_opencl_filter_deps="opencl"
pan_filter_deps="swresample"
perspective_filter_deps="gpl"
phase_filter_deps="gpl"

View File

@ -21172,6 +21172,83 @@ The inputs have same memory layout for color channels , the overlay has addition
@end itemize
@section pad_opencl
Add paddings to the input image, and place the original input at the
provided @var{x}, @var{y} coordinates.
It accepts the following options:
@table @option
@item width, w
@item height, h
Specify an expression for the size of the output image with the
paddings added. If the value for @var{width} or @var{height} is 0, the
corresponding input size is used for the output.
The @var{width} expression can reference the value set by the
@var{height} expression, and vice versa.
The default value of @var{width} and @var{height} is 0.
@item x
@item y
Specify the offsets to place the input image at within the padded area,
with respect to the top/left border of the output image.
The @var{x} expression can reference the value set by the @var{y}
expression, and vice versa.
The default value of @var{x} and @var{y} is 0.
If @var{x} or @var{y} evaluate to a negative number, they'll be changed
so the input image is centered on the padded area.
@item color
Specify the color of the padded area. For the syntax of this option,
check the @ref{color syntax,,"Color" section in the ffmpeg-utils
manual,ffmpeg-utils}.
@item aspect
Pad to an aspect instead to a resolution.
@end table
The value for the @var{width}, @var{height}, @var{x}, and @var{y}
options are expressions containing the following constants:
@table @option
@item in_w
@item in_h
The input video width and height.
@item iw
@item ih
These are the same as @var{in_w} and @var{in_h}.
@item out_w
@item out_h
The output width and height (the size of the padded area), as
specified by the @var{width} and @var{height} expressions.
@item ow
@item oh
These are the same as @var{out_w} and @var{out_h}.
@item x
@item y
The x and y offsets as specified by the @var{x} and @var{y}
expressions, or NAN if not yet specified.
@item a
same as @var{iw} / @var{ih}
@item sar
input sample aspect ratio
@item dar
input display aspect ratio, it is the same as (@var{iw} / @var{ih}) * @var{sar}
@end table
@section prewitt_opencl
Apply the Prewitt operator (@url{https://en.wikipedia.org/wiki/Prewitt_operator}) to input video stream.

View File

@ -329,6 +329,7 @@ OBJS-$(CONFIG_OVERLAY_QSV_FILTER) += vf_overlay_qsv.o framesync.o
OBJS-$(CONFIG_OVERLAY_VULKAN_FILTER) += vf_overlay_vulkan.o vulkan.o
OBJS-$(CONFIG_OWDENOISE_FILTER) += vf_owdenoise.o
OBJS-$(CONFIG_PAD_FILTER) += vf_pad.o
OBJS-$(CONFIG_PAD_OPENCL_FILTER) += vf_pad_opencl.o opencl.o opencl/pad.o
OBJS-$(CONFIG_PALETTEGEN_FILTER) += vf_palettegen.o
OBJS-$(CONFIG_PALETTEUSE_FILTER) += vf_paletteuse.o framesync.o
OBJS-$(CONFIG_PERMS_FILTER) += f_perms.o

View File

@ -313,6 +313,7 @@ extern AVFilter ff_vf_overlay_qsv;
extern AVFilter ff_vf_overlay_vulkan;
extern AVFilter ff_vf_owdenoise;
extern AVFilter ff_vf_pad;
extern AVFilter ff_vf_pad_opencl;
extern AVFilter ff_vf_palettegen;
extern AVFilter ff_vf_paletteuse;
extern AVFilter ff_vf_perms;

36
libavfilter/opencl/pad.cl Normal file
View File

@ -0,0 +1,36 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
const sampler_t sampler = CLK_NORMALIZED_COORDS_FALSE |
CLK_FILTER_NEAREST;
__kernel void pad (
__read_only image2d_t src,
__write_only image2d_t dst,
float4 color,
int2 xy)
{
int2 size_src = get_image_dim(src);
int2 loc = (int2)(get_global_id(0), get_global_id(1));
int2 src_pos = (int2)(get_global_id(0) - xy.x, get_global_id(1) - xy.y);
float4 pixel = loc.x >= size_src.x + xy.x ||
loc.y >= size_src.y + xy.y ||
loc.x < xy.x ||
loc.y < xy.y ? color : read_imagef(src, sampler, src_pos);
write_imagef(dst, loc, pixel);
}

View File

@ -27,6 +27,7 @@ extern const char *ff_opencl_source_deshake;
extern const char *ff_opencl_source_neighbor;
extern const char *ff_opencl_source_nlmeans;
extern const char *ff_opencl_source_overlay;
extern const char *ff_opencl_source_pad;
extern const char *ff_opencl_source_tonemap;
extern const char *ff_opencl_source_transpose;
extern const char *ff_opencl_source_unsharp;

View File

@ -30,7 +30,7 @@
#include "libavutil/version.h"
#define LIBAVFILTER_VERSION_MAJOR 7
#define LIBAVFILTER_VERSION_MINOR 75
#define LIBAVFILTER_VERSION_MINOR 76
#define LIBAVFILTER_VERSION_MICRO 100

397
libavfilter/vf_pad_opencl.c Normal file
View File

@ -0,0 +1,397 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "libavutil/colorspace.h"
#include "libavutil/eval.h"
#include "libavutil/opt.h"
#include "libavutil/imgutils.h"
#include "avfilter.h"
#include "drawutils.h"
#include "formats.h"
#include "internal.h"
#include "opencl.h"
#include "opencl_source.h"
#include "video.h"
static const char *const var_names[] = {
"in_w", "iw",
"in_h", "ih",
"out_w", "ow",
"out_h", "oh",
"x",
"y",
"a",
"sar",
"dar",
NULL
};
enum var_name {
VAR_IN_W, VAR_IW,
VAR_IN_H, VAR_IH,
VAR_OUT_W, VAR_OW,
VAR_OUT_H, VAR_OH,
VAR_X,
VAR_Y,
VAR_A,
VAR_SAR,
VAR_DAR,
VARS_NB
};
typedef struct PadOpenCLContext {
OpenCLFilterContext ocf;
int initialized;
int is_rgb;
int is_packed;
int hsub, vsub;
char *w_expr;
char *h_expr;
char *x_expr;
char *y_expr;
AVRational aspect;
cl_command_queue command_queue;
cl_kernel kernel_pad;
int w, h;
int x, y;
uint8_t pad_rgba[4];
uint8_t pad_color[4];
cl_float4 pad_color_float;
cl_int2 pad_pos;
} PadOpenCLContext;
static int pad_opencl_init(AVFilterContext *avctx, AVFrame *input_frame)
{
PadOpenCLContext *ctx = avctx->priv;
AVHWFramesContext *input_frames_ctx = (AVHWFramesContext *)input_frame->hw_frames_ctx->data;
const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(input_frames_ctx->sw_format);
uint8_t rgba_map[4];
cl_int cle;
int err;
ff_fill_rgba_map(rgba_map, input_frames_ctx->sw_format);
ctx->is_rgb = !!(desc->flags & AV_PIX_FMT_FLAG_RGB);
ctx->is_packed = !(desc->flags & AV_PIX_FMT_FLAG_PLANAR);
ctx->hsub = desc->log2_chroma_w;
ctx->vsub = desc->log2_chroma_h;
err = ff_opencl_filter_load_program(avctx, &ff_opencl_source_pad, 1);
if (err < 0)
goto fail;
ctx->command_queue = clCreateCommandQueue(
ctx->ocf.hwctx->context,
ctx->ocf.hwctx->device_id,
0,
&cle
);
if (ctx->is_rgb) {
ctx->pad_color[rgba_map[0]] = ctx->pad_rgba[0];
ctx->pad_color[rgba_map[1]] = ctx->pad_rgba[1];
ctx->pad_color[rgba_map[2]] = ctx->pad_rgba[2];
ctx->pad_color[rgba_map[3]] = ctx->pad_rgba[3];
} else {
ctx->pad_color[0] = RGB_TO_Y_BT709(ctx->pad_rgba[0], ctx->pad_rgba[1], ctx->pad_rgba[2]);
ctx->pad_color[1] = RGB_TO_U_BT709(ctx->pad_rgba[0], ctx->pad_rgba[1], ctx->pad_rgba[2], 0);
ctx->pad_color[2] = RGB_TO_V_BT709(ctx->pad_rgba[0], ctx->pad_rgba[1], ctx->pad_rgba[2], 0);
ctx->pad_color[3] = ctx->pad_rgba[3];
}
CL_FAIL_ON_ERROR(AVERROR(EIO), "Failed to create OpenCL command queue %d.\n", cle);
ctx->kernel_pad = clCreateKernel(ctx->ocf.program, "pad", &cle);
CL_FAIL_ON_ERROR(AVERROR(EIO), "Failed to create pad kernel: %d.\n", cle);
for (int i = 0; i < 4; ++i) {
ctx->pad_color_float.s[i] = (float)ctx->pad_color[i] / 255.0;
}
ctx->pad_pos.s[0] = ctx->x;
ctx->pad_pos.s[1] = ctx->y;
ctx->initialized = 1;
return 0;
fail:
if (ctx->command_queue)
clReleaseCommandQueue(ctx->command_queue);
if (ctx->kernel_pad)
clReleaseKernel(ctx->kernel_pad);
return err;
}
static int filter_frame(AVFilterLink *link, AVFrame *input_frame)
{
AVFilterContext *avctx = link->dst;
AVFilterLink *outlink = avctx->outputs[0];
PadOpenCLContext *pad_ctx = avctx->priv;
AVFrame *output_frame = NULL;
int err;
cl_int cle;
size_t global_work[2];
cl_mem src, dst;
if (!input_frame->hw_frames_ctx)
return AVERROR(EINVAL);
if (!pad_ctx->initialized) {
err = pad_opencl_init(avctx, input_frame);
if (err < 0)
goto fail;
}
output_frame = ff_get_video_buffer(outlink, outlink->w, outlink->h);
if (!output_frame) {
err = AVERROR(ENOMEM);
goto fail;
}
for (int p = 0; p < FF_ARRAY_ELEMS(output_frame->data); p++) {
cl_float4 pad_color_float;
cl_int2 pad_pos;
if (pad_ctx->is_packed) {
pad_color_float = pad_ctx->pad_color_float;
} else {
pad_color_float.s[0] = pad_ctx->pad_color_float.s[p];
pad_color_float.s[1] = pad_ctx->pad_color_float.s[2];
}
if (p > 0 && p < 3) {
pad_pos.s[0] = pad_ctx->pad_pos.s[0] >> pad_ctx->hsub;
pad_pos.s[1] = pad_ctx->pad_pos.s[1] >> pad_ctx->vsub;
} else {
pad_pos.s[0] = pad_ctx->pad_pos.s[0];
pad_pos.s[1] = pad_ctx->pad_pos.s[1];
}
src = (cl_mem)input_frame->data[p];
dst = (cl_mem)output_frame->data[p];
if (!dst)
break;
CL_SET_KERNEL_ARG(pad_ctx->kernel_pad, 0, cl_mem, &src);
CL_SET_KERNEL_ARG(pad_ctx->kernel_pad, 1, cl_mem, &dst);
CL_SET_KERNEL_ARG(pad_ctx->kernel_pad, 2, cl_float4, &pad_color_float);
CL_SET_KERNEL_ARG(pad_ctx->kernel_pad, 3, cl_int2, &pad_pos);
err = ff_opencl_filter_work_size_from_image(avctx, global_work, output_frame, p, 16);
if (err < 0)
goto fail;
cle = clEnqueueNDRangeKernel(pad_ctx->command_queue, pad_ctx->kernel_pad, 2, NULL,
global_work, NULL, 0, NULL, NULL);
CL_FAIL_ON_ERROR(AVERROR(EIO), "Failed to enqueue pad kernel: %d.\n", cle);
}
// Run queued kernel
cle = clFinish(pad_ctx->command_queue);
CL_FAIL_ON_ERROR(AVERROR(EIO), "Failed to finish command queue: %d.\n", cle);
err = av_frame_copy_props(output_frame, input_frame);
if (err < 0)
goto fail;
av_frame_free(&input_frame);
return ff_filter_frame(outlink, output_frame);
fail:
clFinish(pad_ctx->command_queue);
av_frame_free(&input_frame);
av_frame_free(&output_frame);
return err;
}
static av_cold void pad_opencl_uninit(AVFilterContext *avctx)
{
PadOpenCLContext *ctx = avctx->priv;
cl_int cle;
if (ctx->kernel_pad) {
cle = clReleaseKernel(ctx->kernel_pad);
if (cle != CL_SUCCESS)
av_log(avctx, AV_LOG_ERROR, "Failed to release "
"kernel: %d.\n", cle);
}
if (ctx->command_queue) {
cle = clReleaseCommandQueue(ctx->command_queue);
if (cle != CL_SUCCESS)
av_log(avctx, AV_LOG_ERROR, "Failed to release "
"command queue: %d.\n", cle);
}
ff_opencl_filter_uninit(avctx);
}
static int pad_opencl_config_output(AVFilterLink *outlink)
{
AVFilterContext *avctx = outlink->src;
AVFilterLink *inlink = avctx->inputs[0];
PadOpenCLContext *ctx = avctx->priv;
AVRational adjusted_aspect = ctx->aspect;
double var_values[VARS_NB], res;
int err, ret;
char *expr;
var_values[VAR_IN_W] = var_values[VAR_IW] = inlink->w;
var_values[VAR_IN_H] = var_values[VAR_IH] = inlink->h;
var_values[VAR_OUT_W] = var_values[VAR_OW] = NAN;
var_values[VAR_OUT_H] = var_values[VAR_OH] = NAN;
var_values[VAR_A] = (double) inlink->w / inlink->h;
var_values[VAR_SAR] = inlink->sample_aspect_ratio.num ?
(double) inlink->sample_aspect_ratio.num / inlink->sample_aspect_ratio.den : 1;
var_values[VAR_DAR] = var_values[VAR_A] * var_values[VAR_SAR];
av_expr_parse_and_eval(&res, (expr = ctx->w_expr),
var_names, var_values,
NULL, NULL, NULL, NULL, NULL, 0, ctx);
ctx->w = var_values[VAR_OUT_W] = var_values[VAR_OW] = res;
if ((ret = av_expr_parse_and_eval(&res, (expr = ctx->h_expr),
var_names, var_values,
NULL, NULL, NULL, NULL, NULL, 0, ctx)) < 0)
return ret;
ctx->h = var_values[VAR_OUT_H] = var_values[VAR_OH] = res;
if (!ctx->h)
var_values[VAR_OUT_H] = var_values[VAR_OH] = ctx->h = inlink->h;
/* evaluate the width again, as it may depend on the evaluated output height */
if ((ret = av_expr_parse_and_eval(&res, (expr = ctx->w_expr),
var_names, var_values,
NULL, NULL, NULL, NULL, NULL, 0, ctx)) < 0)
return ret;
ctx->w = var_values[VAR_OUT_W] = var_values[VAR_OW] = res;
if (!ctx->w)
var_values[VAR_OUT_W] = var_values[VAR_OW] = ctx->w = inlink->w;
if (adjusted_aspect.num && adjusted_aspect.den) {
adjusted_aspect = av_div_q(adjusted_aspect, inlink->sample_aspect_ratio);
if (ctx->h < av_rescale(ctx->w, adjusted_aspect.den, adjusted_aspect.num)) {
ctx->h = var_values[VAR_OUT_H] = var_values[VAR_OH] = av_rescale(ctx->w, adjusted_aspect.den, adjusted_aspect.num);
} else {
ctx->w = var_values[VAR_OUT_W] = var_values[VAR_OW] = av_rescale(ctx->h, adjusted_aspect.num, adjusted_aspect.den);
}
}
/* evaluate x and y */
av_expr_parse_and_eval(&res, (expr = ctx->x_expr),
var_names, var_values,
NULL, NULL, NULL, NULL, NULL, 0, ctx);
ctx->x = var_values[VAR_X] = res;
if ((ret = av_expr_parse_and_eval(&res, (expr = ctx->y_expr),
var_names, var_values,
NULL, NULL, NULL, NULL, NULL, 0, ctx)) < 0)
return ret;
ctx->y = var_values[VAR_Y] = res;
/* evaluate x again, as it may depend on the evaluated y value */
if ((ret = av_expr_parse_and_eval(&res, (expr = ctx->x_expr),
var_names, var_values,
NULL, NULL, NULL, NULL, NULL, 0, ctx)) < 0)
return ret;
ctx->x = var_values[VAR_X] = res;
if (ctx->x < 0 || ctx->x + inlink->w > ctx->w)
ctx->x = var_values[VAR_X] = (ctx->w - inlink->w) / 2;
if (ctx->y < 0 || ctx->y + inlink->h > ctx->h)
ctx->y = var_values[VAR_Y] = (ctx->h - inlink->h) / 2;
/* sanity check params */
if (ctx->w < inlink->w || ctx->h < inlink->h) {
av_log(ctx, AV_LOG_ERROR, "Padded dimensions cannot be smaller than input dimensions.\n");
return AVERROR(EINVAL);
}
if (ctx->w > avctx->inputs[0]->w) {
ctx->ocf.output_width = ctx->w;
} else {
ctx->ocf.output_width = avctx->inputs[0]->w;
}
if (ctx->h > avctx->inputs[0]->h) {
ctx->ocf.output_height = ctx->h;
} else {
ctx->ocf.output_height = avctx->inputs[0]->h;
}
if (ctx->x + avctx->inputs[0]->w > ctx->ocf.output_width ||
ctx->y + avctx->inputs[0]->h > ctx->ocf.output_height) {
return AVERROR(EINVAL);
}
err = ff_opencl_filter_config_output(outlink);
if (err < 0)
return err;
return 0;
}
static const AVFilterPad pad_opencl_inputs[] = {
{
.name = "default",
.type = AVMEDIA_TYPE_VIDEO,
.filter_frame = filter_frame,
.config_props = &ff_opencl_filter_config_input,
},
{ NULL }
};
static const AVFilterPad pad_opencl_outputs[] = {
{
.name = "default",
.type = AVMEDIA_TYPE_VIDEO,
.config_props = &pad_opencl_config_output,
},
{ NULL }
};
#define OFFSET(x) offsetof(PadOpenCLContext, x)
#define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
static const AVOption pad_opencl_options[] = {
{ "width", "set the pad area width", OFFSET(w_expr), AV_OPT_TYPE_STRING, {.str = "iw"}, 0, 0, FLAGS },
{ "w", "set the pad area width", OFFSET(w_expr), AV_OPT_TYPE_STRING, {.str = "iw"}, 0, 0, FLAGS },
{ "height", "set the pad area height", OFFSET(h_expr), AV_OPT_TYPE_STRING, {.str = "ih"}, 0, 0, FLAGS },
{ "h", "set the pad area height", OFFSET(h_expr), AV_OPT_TYPE_STRING, {.str = "ih"}, 0, 0, FLAGS },
{ "x", "set the x offset for the input image position", OFFSET(x_expr), AV_OPT_TYPE_STRING, {.str = "0"}, 0, INT16_MAX, FLAGS },
{ "y", "set the y offset for the input image position", OFFSET(y_expr), AV_OPT_TYPE_STRING, {.str = "0"}, 0, INT16_MAX, FLAGS },
{ "color", "set the color of the padded area border", OFFSET(pad_rgba), AV_OPT_TYPE_COLOR, { .str = "black" }, 0, 0, FLAGS },
{ "aspect", "pad to fit an aspect instead of a resolution", OFFSET(aspect), AV_OPT_TYPE_RATIONAL, {.dbl = 0}, 0, INT16_MAX, FLAGS },
{ NULL }
};
AVFILTER_DEFINE_CLASS(pad_opencl);
AVFilter ff_vf_pad_opencl = {
.name = "pad_opencl",
.description = NULL_IF_CONFIG_SMALL("Pad the input video."),
.priv_size = sizeof(PadOpenCLContext),
.priv_class = &pad_opencl_class,
.init = &ff_opencl_filter_init,
.uninit = &pad_opencl_uninit,
.query_formats = &ff_opencl_filter_query_formats,
.inputs = pad_opencl_inputs,
.outputs = pad_opencl_outputs,
.flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE
};