From f73943d514802ecc521b92ebf2d1595526968fd5 Mon Sep 17 00:00:00 2001 From: Shubhanshu Saxena Date: Fri, 18 Jun 2021 21:53:08 +0530 Subject: lavfi/dnn_backend_openvino.c: Fix Memory Leak in execute_model_ov In cases where the execution inside the function execute_model_ov fails, push the RequestItem back to the request_queue before returning the error. In case pushing back fails, release the allocated memory. Signed-off-by: Shubhanshu Saxena --- libavfilter/dnn/dnn_backend_openvino.c | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) (limited to 'libavfilter/dnn') diff --git a/libavfilter/dnn/dnn_backend_openvino.c b/libavfilter/dnn/dnn_backend_openvino.c index d002feb167..3295fc79d3 100644 --- a/libavfilter/dnn/dnn_backend_openvino.c +++ b/libavfilter/dnn/dnn_backend_openvino.c @@ -448,12 +448,12 @@ static DNNReturnType execute_model_ov(RequestItem *request, Queue *inferenceq) status = ie_infer_set_completion_callback(request->infer_request, &request->callback); if (status != OK) { av_log(ctx, AV_LOG_ERROR, "Failed to set completion callback for inference\n"); - return DNN_ERROR; + goto err; } status = ie_infer_request_infer_async(request->infer_request); if (status != OK) { av_log(ctx, AV_LOG_ERROR, "Failed to start async inference\n"); - return DNN_ERROR; + goto err; } return DNN_SUCCESS; } else { @@ -464,11 +464,17 @@ static DNNReturnType execute_model_ov(RequestItem *request, Queue *inferenceq) status = ie_infer_request_infer(request->infer_request); if (status != OK) { av_log(ctx, AV_LOG_ERROR, "Failed to start synchronous model inference\n"); - return DNN_ERROR; + goto err; } infer_completion_callback(request); return (task->inference_done == task->inference_todo) ? DNN_SUCCESS : DNN_ERROR; } +err: + if (ff_safe_queue_push_back(ov_model->request_queue, request) < 0) { + ie_infer_request_free(&request->infer_request); + av_freep(&request); + } + return DNN_ERROR; } static DNNReturnType get_input_ov(void *model, DNNData *input, const char *input_name) -- cgit v1.2.3