summaryrefslogtreecommitdiff
path: root/tests/dnn
diff options
context:
space:
mode:
authorGuo, Yejun <yejun.guo@intel.com>2021-01-22 19:28:29 +0800
committerGuo, Yejun <yejun.guo@intel.com>2021-01-22 19:28:29 +0800
commit07a18ff4772a374aa4d78745a9f553dadef5c4ab (patch)
treeb31ac9d23db3f228f4411bde932a4e98578b97e0 /tests/dnn
parentbb96824510aad2ac4cf0bff545c85af7a256ff92 (diff)
tests/dnn: fix build issue after function name changed
Diffstat (limited to 'tests/dnn')
-rw-r--r--tests/dnn/dnn-layer-avgpool-test.c4
-rw-r--r--tests/dnn/dnn-layer-conv2d-test.c4
-rw-r--r--tests/dnn/dnn-layer-dense-test.c2
-rw-r--r--tests/dnn/dnn-layer-depth2space-test.c2
-rw-r--r--tests/dnn/dnn-layer-mathbinary-test.c6
-rw-r--r--tests/dnn/dnn-layer-mathunary-test.c2
-rw-r--r--tests/dnn/dnn-layer-maximum-test.c2
-rw-r--r--tests/dnn/dnn-layer-pad-test.c6
8 files changed, 14 insertions, 14 deletions
diff --git a/tests/dnn/dnn-layer-avgpool-test.c b/tests/dnn/dnn-layer-avgpool-test.c
index 0e6be8ba57..4a925ea22a 100644
--- a/tests/dnn/dnn-layer-avgpool-test.c
+++ b/tests/dnn/dnn-layer-avgpool-test.c
@@ -91,7 +91,7 @@ static int test_with_same(void)
operands[1].data = NULL;
input_indexes[0] = 0;
- dnn_execute_layer_avg_pool(operands, input_indexes, 1, &params, NULL);
+ ff_dnn_execute_layer_avg_pool(operands, input_indexes, 1, &params, NULL);
output = operands[1].data;
for (int i = 0; i < sizeof(expected_output) / sizeof(float); ++i) {
@@ -171,7 +171,7 @@ static int test_with_valid(void)
operands[1].data = NULL;
input_indexes[0] = 0;
- dnn_execute_layer_avg_pool(operands, input_indexes, 1, &params, NULL);
+ ff_dnn_execute_layer_avg_pool(operands, input_indexes, 1, &params, NULL);
output = operands[1].data;
for (int i = 0; i < sizeof(expected_output) / sizeof(float); ++i) {
diff --git a/tests/dnn/dnn-layer-conv2d-test.c b/tests/dnn/dnn-layer-conv2d-test.c
index b623ddac0d..5ee60eeaf0 100644
--- a/tests/dnn/dnn-layer-conv2d-test.c
+++ b/tests/dnn/dnn-layer-conv2d-test.c
@@ -118,7 +118,7 @@ static int test_with_same_dilate(void)
operands[1].data = NULL;
input_indexes[0] = 0;
- dnn_execute_layer_conv2d(operands, input_indexes, 1, &params, &ctx);
+ ff_dnn_execute_layer_conv2d(operands, input_indexes, 1, &params, &ctx);
output = operands[1].data;
for (int i = 0; i < sizeof(expected_output) / sizeof(float); i++) {
@@ -222,7 +222,7 @@ static int test_with_valid(void)
operands[1].data = NULL;
input_indexes[0] = 0;
- dnn_execute_layer_conv2d(operands, input_indexes, 1, &params, &ctx);
+ ff_dnn_execute_layer_conv2d(operands, input_indexes, 1, &params, &ctx);
output = operands[1].data;
for (int i = 0; i < sizeof(expected_output) / sizeof(float); i++) {
diff --git a/tests/dnn/dnn-layer-dense-test.c b/tests/dnn/dnn-layer-dense-test.c
index 2c11ec5218..755d3ebb31 100644
--- a/tests/dnn/dnn-layer-dense-test.c
+++ b/tests/dnn/dnn-layer-dense-test.c
@@ -107,7 +107,7 @@ static int test(void)
operands[1].data = NULL;
input_indexes[0] = 0;
- dnn_execute_layer_dense(operands, input_indexes, 1, &params, NULL);
+ ff_dnn_execute_layer_dense(operands, input_indexes, 1, &params, NULL);
output = operands[1].data;
for (int i = 0; i < sizeof(expected_output) / sizeof(float); i++) {
diff --git a/tests/dnn/dnn-layer-depth2space-test.c b/tests/dnn/dnn-layer-depth2space-test.c
index 2c641884c1..958247e675 100644
--- a/tests/dnn/dnn-layer-depth2space-test.c
+++ b/tests/dnn/dnn-layer-depth2space-test.c
@@ -81,7 +81,7 @@ static int test(void)
input_indexes[0] = 0;
params.block_size = 2;
- dnn_execute_layer_depth2space(operands, input_indexes, 1, &params, NULL);
+ ff_dnn_execute_layer_depth2space(operands, input_indexes, 1, &params, NULL);
output = operands[1].data;
for (int i = 0; i < sizeof(expected_output) / sizeof(float); i++) {
diff --git a/tests/dnn/dnn-layer-mathbinary-test.c b/tests/dnn/dnn-layer-mathbinary-test.c
index c4da3f6a86..2e41dc1ae7 100644
--- a/tests/dnn/dnn-layer-mathbinary-test.c
+++ b/tests/dnn/dnn-layer-mathbinary-test.c
@@ -71,7 +71,7 @@ static int test_broadcast_input0(DNNMathBinaryOperation op)
operands[1].data = NULL;
input_indexes[0] = 0;
- dnn_execute_layer_math_binary(operands, input_indexes, 1, &params, NULL);
+ ff_dnn_execute_layer_math_binary(operands, input_indexes, 1, &params, NULL);
output = operands[1].data;
for (int i = 0; i < sizeof(input) / sizeof(float); i++) {
@@ -111,7 +111,7 @@ static int test_broadcast_input1(DNNMathBinaryOperation op)
operands[1].data = NULL;
input_indexes[0] = 0;
- dnn_execute_layer_math_binary(operands, input_indexes, 1, &params, NULL);
+ ff_dnn_execute_layer_math_binary(operands, input_indexes, 1, &params, NULL);
output = operands[1].data;
for (int i = 0; i < sizeof(input) / sizeof(float); i++) {
@@ -159,7 +159,7 @@ static int test_no_broadcast(DNNMathBinaryOperation op)
input_indexes[0] = 0;
input_indexes[1] = 1;
- dnn_execute_layer_math_binary(operands, input_indexes, 2, &params, NULL);
+ ff_dnn_execute_layer_math_binary(operands, input_indexes, 2, &params, NULL);
output = operands[2].data;
for (int i = 0; i < sizeof(input0) / sizeof(float); i++) {
diff --git a/tests/dnn/dnn-layer-mathunary-test.c b/tests/dnn/dnn-layer-mathunary-test.c
index ce14c41311..a8c5ab0224 100644
--- a/tests/dnn/dnn-layer-mathunary-test.c
+++ b/tests/dnn/dnn-layer-mathunary-test.c
@@ -87,7 +87,7 @@ static int test(DNNMathUnaryOperation op)
operands[1].data = NULL;
input_indexes[0] = 0;
- dnn_execute_layer_math_unary(operands, input_indexes, 1, &params, NULL);
+ ff_dnn_execute_layer_math_unary(operands, input_indexes, 1, &params, NULL);
output = operands[1].data;
for (int i = 0; i < sizeof(input) / sizeof(float); ++i) {
diff --git a/tests/dnn/dnn-layer-maximum-test.c b/tests/dnn/dnn-layer-maximum-test.c
index c982670591..bf22f3719f 100644
--- a/tests/dnn/dnn-layer-maximum-test.c
+++ b/tests/dnn/dnn-layer-maximum-test.c
@@ -45,7 +45,7 @@ static int test(void)
operands[1].data = NULL;
input_indexes[0] = 0;
- dnn_execute_layer_maximum(operands, input_indexes, 1, &params, NULL);
+ ff_dnn_execute_layer_maximum(operands, input_indexes, 1, &params, NULL);
output = operands[1].data;
for (int i = 0; i < sizeof(input) / sizeof(float); i++) {
diff --git a/tests/dnn/dnn-layer-pad-test.c b/tests/dnn/dnn-layer-pad-test.c
index 6a72adb3ae..a8443ce3be 100644
--- a/tests/dnn/dnn-layer-pad-test.c
+++ b/tests/dnn/dnn-layer-pad-test.c
@@ -79,7 +79,7 @@ static int test_with_mode_symmetric(void)
operands[1].data = NULL;
input_indexes[0] = 0;
- dnn_execute_layer_pad(operands, input_indexes, 1, &params, NULL);
+ ff_dnn_execute_layer_pad(operands, input_indexes, 1, &params, NULL);
output = operands[1].data;
for (int i = 0; i < sizeof(expected_output) / sizeof(float); i++) {
@@ -144,7 +144,7 @@ static int test_with_mode_reflect(void)
operands[1].data = NULL;
input_indexes[0] = 0;
- dnn_execute_layer_pad(operands, input_indexes, 1, &params, NULL);
+ ff_dnn_execute_layer_pad(operands, input_indexes, 1, &params, NULL);
output = operands[1].data;
for (int i = 0; i < sizeof(expected_output) / sizeof(float); i++) {
@@ -210,7 +210,7 @@ static int test_with_mode_constant(void)
operands[1].data = NULL;
input_indexes[0] = 0;
- dnn_execute_layer_pad(operands, input_indexes, 1, &params, NULL);
+ ff_dnn_execute_layer_pad(operands, input_indexes, 1, &params, NULL);
output = operands[1].data;
for (int i = 0; i < sizeof(expected_output) / sizeof(float); i++) {