summaryrefslogtreecommitdiff
path: root/libavfilter
diff options
context:
space:
mode:
authorShubhanshu Saxena <shubhanshu.e01@gmail.com>2021-08-08 16:25:33 +0530
committerGuo Yejun <yejun.guo@intel.com>2021-08-10 22:27:27 +0800
commite6ae8fc18e579e1ca750ed392da2df3474b88976 (patch)
treee2f1f069b43c92d7d340f88792e48072643b6a98 /libavfilter
parentc71657858833008d2c17990ee78d2ec792996a1a (diff)
lavfi/dnn_backend_tf: TFInferRequest Execution and Documentation
This commit adds a function for execution of TFInferRequest and documentation for functions related to TFInferRequest. Signed-off-by: Shubhanshu Saxena <shubhanshu.e01@gmail.com>
Diffstat (limited to 'libavfilter')
-rw-r--r--libavfilter/dnn/dnn_backend_tf.c45
1 files changed, 45 insertions, 0 deletions
diff --git a/libavfilter/dnn/dnn_backend_tf.c b/libavfilter/dnn/dnn_backend_tf.c
index ac609ab9cb..939afec41d 100644
--- a/libavfilter/dnn/dnn_backend_tf.c
+++ b/libavfilter/dnn/dnn_backend_tf.c
@@ -95,6 +95,13 @@ static void free_buffer(void *data, size_t length)
av_freep(&data);
}
+/**
+ * Free the contents of TensorFlow inference request.
+ * It does not free the TFInferRequest instance.
+ *
+ * @param request pointer to TFInferRequest instance.
+ * NULL pointer is allowed.
+ */
static void tf_free_request(TFInferRequest *request)
{
if (!request)
@@ -117,6 +124,12 @@ static void tf_free_request(TFInferRequest *request)
}
}
+/**
+ * Create a TensorFlow inference request. All properties
+ * are initially unallocated and set as NULL.
+ *
+ * @return pointer to the allocated TFInferRequest instance.
+ */
static TFInferRequest *tf_create_inference_request(void)
{
TFInferRequest *infer_request = av_malloc(sizeof(TFInferRequest));
@@ -127,6 +140,38 @@ static TFInferRequest *tf_create_inference_request(void)
return infer_request;
}
+/**
+ * Start synchronous inference for the TensorFlow model.
+ *
+ * @param request pointer to the TFRequestItem for inference
+ * @retval DNN_SUCCESS if execution is successful
+ * @retval DNN_ERROR if execution fails
+ */
+static DNNReturnType tf_start_inference(void *args)
+{
+ TFRequestItem *request = args;
+ TFInferRequest *infer_request = request->infer_request;
+ InferenceItem *inference = request->inference;
+ TaskItem *task = inference->task;
+ TFModel *tf_model = task->model;
+
+ if (!request) {
+ av_log(&tf_model->ctx, AV_LOG_ERROR, "TFRequestItem is NULL\n");
+ return DNN_ERROR;
+ }
+
+ TF_SessionRun(tf_model->session, NULL,
+ infer_request->tf_input, &infer_request->input_tensor, 1,
+ infer_request->tf_outputs, infer_request->output_tensors,
+ task->nb_output, NULL, 0, NULL,
+ tf_model->status);
+ if (TF_GetCode(tf_model->status) != TF_OK) {
+ av_log(&tf_model->ctx, AV_LOG_ERROR, "%s", TF_Message(tf_model->status));
+ return DNN_ERROR;
+ }
+ return DNN_SUCCESS;
+}
+
static DNNReturnType extract_inference_from_task(TaskItem *task, Queue *inference_queue)
{
TFModel *tf_model = task->model;