libavfilter: Remove Async Flag from DNN Filter Side

Remove async flag from filter's perspective after the unification
of async and sync modes in the DNN backend.

Signed-off-by: Shubhanshu Saxena <shubhanshu.e01@gmail.com>
This commit is contained in:
Shubhanshu Saxena 2021-08-26 02:40:47 +05:30 committed by Guo Yejun
parent 70b4dca054
commit 1544d6fa0a
4 changed files with 12 additions and 18 deletions

View File

@ -10291,11 +10291,8 @@ and the second line is the name of label id 1, etc.
The label id is considered as name if the label file is not provided. The label id is considered as name if the label file is not provided.
@item backend_configs @item backend_configs
Set the configs to be passed into backend Set the configs to be passed into backend. To use async execution, set async (default: set).
Roll back to sync execution if the backend does not support async.
@item async
use DNN async execution if set (default: set),
roll back to sync execution if the backend does not support async.
@end table @end table
@ -10347,15 +10344,12 @@ Set the input name of the dnn network.
Set the output name of the dnn network. Set the output name of the dnn network.
@item backend_configs @item backend_configs
Set the configs to be passed into backend Set the configs to be passed into backend. To use async execution, set async (default: set).
Roll back to sync execution if the backend does not support async.
For tensorflow backend, you can set its configs with @option{sess_config} options, For tensorflow backend, you can set its configs with @option{sess_config} options,
please use tools/python/tf_sess_config.py to get the configs of TensorFlow backend for your system. please use tools/python/tf_sess_config.py to get the configs of TensorFlow backend for your system.
@item async
use DNN async execution if set (default: set),
roll back to sync execution if the backend does not support async.
@end table @end table
@subsection Examples @subsection Examples

View File

@ -884,6 +884,13 @@ DNNModel *ff_dnn_load_model_tf(const char *model_filename, DNNFunctionType func_
ctx->options.nireq = av_cpu_count() / 2 + 1; ctx->options.nireq = av_cpu_count() / 2 + 1;
} }
#if !HAVE_PTHREAD_CANCEL
if (ctx->options.async) {
ctx->options.async = 0;
av_log(filter_ctx, AV_LOG_WARNING, "pthread is not supported, roll back to sync.\n");
}
#endif
tf_model->request_queue = ff_safe_queue_create(); tf_model->request_queue = ff_safe_queue_create();
if (!tf_model->request_queue) { if (!tf_model->request_queue) {
goto err; goto err;

View File

@ -84,13 +84,6 @@ int ff_dnn_init(DnnContext *ctx, DNNFunctionType func_type, AVFilterContext *fil
return AVERROR(EINVAL); return AVERROR(EINVAL);
} }
#if !HAVE_PTHREAD_CANCEL
if (ctx->async) {
ctx->async = 0;
av_log(filter_ctx, AV_LOG_WARNING, "pthread is not supported, roll back to sync.\n");
}
#endif
return 0; return 0;
} }

View File

@ -46,7 +46,7 @@ typedef struct DnnContext {
{ "output", "output name of the model", OFFSET(model_outputnames_string), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, FLAGS },\ { "output", "output name of the model", OFFSET(model_outputnames_string), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, FLAGS },\
{ "backend_configs", "backend configs", OFFSET(backend_options), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, FLAGS },\ { "backend_configs", "backend configs", OFFSET(backend_options), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, FLAGS },\
{ "options", "backend configs (deprecated, use backend_configs)", OFFSET(backend_options), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, FLAGS | AV_OPT_FLAG_DEPRECATED},\ { "options", "backend configs (deprecated, use backend_configs)", OFFSET(backend_options), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, FLAGS | AV_OPT_FLAG_DEPRECATED},\
{ "async", "use DNN async inference", OFFSET(async), AV_OPT_TYPE_BOOL, { .i64 = 1}, 0, 1, FLAGS}, { "async", "use DNN async inference (ignored, use backend_configs='async=1')", OFFSET(async), AV_OPT_TYPE_BOOL, { .i64 = 1}, 0, 1, FLAGS},
int ff_dnn_init(DnnContext *ctx, DNNFunctionType func_type, AVFilterContext *filter_ctx); int ff_dnn_init(DnnContext *ctx, DNNFunctionType func_type, AVFilterContext *filter_ctx);