aboutsummaryrefslogtreecommitdiff
path: root/src/backends/cl/test/Fp16SupportTest.cpp
diff options
context:
space:
mode:
authorMike Kelly <mike.kelly@arm.com>2023-03-08 13:47:17 +0000
committerFrancis Murtagh <francis.murtagh@arm.com>2023-03-14 16:40:09 +0000
commit3ec3077b4eaedcc0c20ab5774bdbe365da541445 (patch)
treed601d2000897dec8691bf64cbddc9036f26b8034 /src/backends/cl/test/Fp16SupportTest.cpp
parenta088cd00b3cce672d26cdcb4965fc2a86b48f339 (diff)
downloadarmnn-3ec3077b4eaedcc0c20ab5774bdbe365da541445.tar.gz
IVGCVSW-3808 Add ElementwiseBinaryLayer
!android-nn-driver:9329 * Added ElementwiseBinaryLayer that can represent all ElementwiseBinary operations including Add, Div, Sub, Maximum, Mul and Minimum. * Updated Delegate to use ElementwiseBinaryLayer instead of the Add, Div, Sub, Maximum, Mul and Minimum layers. * Updated Deserializer to use ElementwiseBinaryLayer instead of the Add, Div, Sub, Maximum, Mul and Minimum layers. * Updated OnnxParser to use ElementwiseBinaryLayer instead of the Add layer. * Updated TfLiteParser to use ElementwiseBinaryLayer instead of the Add, Div, Sub, Maximum, Mul and Minimum layers. * Updated CL and Neon tests to use ElementwiseBinaryLayer. * Updated CL and Neon Backend Specific Optimizations to accept ElementBinaryLayers as well as Add, Div, Mul, Sub, Maximum and Minimum layers. Signed-off-by: Teresa Charlin <teresa.charlinreyes@arm.com> Signed-off-by: Mike Kelly <mike.kelly@arm.com> Change-Id: I7cbb96b60eb01f0e2b57b0541016d48a08b86c75
Diffstat (limited to 'src/backends/cl/test/Fp16SupportTest.cpp')
-rw-r--r--src/backends/cl/test/Fp16SupportTest.cpp4
1 files changed, 2 insertions, 2 deletions
diff --git a/src/backends/cl/test/Fp16SupportTest.cpp b/src/backends/cl/test/Fp16SupportTest.cpp
index da6ea10926..28ae4795ab 100644
--- a/src/backends/cl/test/Fp16SupportTest.cpp
+++ b/src/backends/cl/test/Fp16SupportTest.cpp
@@ -28,7 +28,7 @@ TEST_CASE("Fp16DataTypeSupport")
Layer* const inputLayer1 = graph.AddLayer<InputLayer>(1, "input1");
Layer* const inputLayer2 = graph.AddLayer<InputLayer>(2, "input2");
- Layer* const additionLayer = graph.AddLayer<AdditionLayer>("addition");
+ Layer* const additionLayer = graph.AddLayer<ElementwiseBinaryLayer>(BinaryOperation::Add, "addition");
Layer* const outputLayer = graph.AddLayer<armnn::OutputLayer>(0, "output");
TensorInfo fp16TensorInfo({1, 2, 3, 5}, armnn::DataType::Float16);
@@ -57,7 +57,7 @@ TEST_CASE("Fp16AdditionTest")
IConnectableLayer* inputLayer1 = net->AddInputLayer(0);
IConnectableLayer* inputLayer2 = net->AddInputLayer(1);
- IConnectableLayer* additionLayer = net->AddAdditionLayer();
+ IConnectableLayer* additionLayer = net->AddElementwiseBinaryLayer(BinaryOperation::Add);
IConnectableLayer* outputLayer = net->AddOutputLayer(0);
inputLayer1->GetOutputSlot(0).Connect(additionLayer->GetInputSlot(0));