summaryrefslogtreecommitdiff
path: root/libavfilter/dnn
diff options
context:
space:
mode:
authorGuo, Yejun <yejun.guo@intel.com>2021-03-16 13:17:34 +0800
committerGuo, Yejun <yejun.guo@intel.com>2021-05-06 10:50:44 +0800
commit7eb9accc376dca4f766d87d68c72aa167e4e9c7e (patch)
tree7ebea41a180d154101110c6da32d8fcc14e962eb /libavfilter/dnn
parente37cc723870cfed9ad1dca03f73ba022f6b8e3e4 (diff)
lavfi/dnn_backend_openvino.c: move the logic for batch mode earlier
Diffstat (limited to 'libavfilter/dnn')
-rw-r--r--libavfilter/dnn/dnn_backend_openvino.c12
1 files changed, 5 insertions, 7 deletions
diff --git a/libavfilter/dnn/dnn_backend_openvino.c b/libavfilter/dnn/dnn_backend_openvino.c
index a8a02d7589..9f3c696e0a 100644
--- a/libavfilter/dnn/dnn_backend_openvino.c
+++ b/libavfilter/dnn/dnn_backend_openvino.c
@@ -432,13 +432,6 @@ static DNNReturnType execute_model_ov(RequestItem *request, Queue *inferenceq)
ctx = &task->ov_model->ctx;
if (task->async) {
- if (ff_queue_size(inferenceq) < ctx->options.batch_size) {
- if (ff_safe_queue_push_front(task->ov_model->request_queue, request) < 0) {
- av_log(ctx, AV_LOG_ERROR, "Failed to push back request_queue.\n");
- return DNN_ERROR;
- }
- return DNN_SUCCESS;
- }
ret = fill_model_input_ov(task->ov_model, request);
if (ret != DNN_SUCCESS) {
return ret;
@@ -793,6 +786,11 @@ DNNReturnType ff_dnn_execute_model_async_ov(const DNNModel *model, const char *i
return DNN_ERROR;
}
+ if (ff_queue_size(ov_model->inference_queue) < ctx->options.batch_size) {
+ // not enough inference items queued for a batch
+ return DNN_SUCCESS;
+ }
+
request = ff_safe_queue_pop_front(ov_model->request_queue);
if (!request) {
av_log(ctx, AV_LOG_ERROR, "unable to get infer request.\n");