aboutsummaryrefslogtreecommitdiff
path: root/src/backends/backendsCommon
diff options
context:
space:
mode:
authorNarumol Prangnawarat <narumol.prangnawarat@arm.com>2019-05-31 16:42:11 +0100
committerÁron Virginás-Tar <aron.virginas-tar@arm.com>2019-06-03 09:38:36 +0000
commit8c7324dd7b8751f31f39ba55aaae5c3b48882f3c (patch)
treea56bb3ccddd8ec5707ec14a9f3f0ec7b702d157d /src/backends/backendsCommon
parent60f54887780aecb1e7757805bf9d8cd85b52dd05 (diff)
downloadarmnn-8c7324dd7b8751f31f39ba55aaae5c3b48882f3c.tar.gz
IVGCVSW-3148 Add end to end test for Dequantize layer to Ref, Cl, Neon
Signed-off-by: Narumol Prangnawarat <narumol.prangnawarat@arm.com> Change-Id: Iaf9c290c093b7d84949993439568e55433938b4e
Diffstat (limited to 'src/backends/backendsCommon')
-rw-r--r--src/backends/backendsCommon/test/CMakeLists.txt1
-rw-r--r--src/backends/backendsCommon/test/DequantizeEndToEndTestImpl.hpp101
2 files changed, 102 insertions, 0 deletions
diff --git a/src/backends/backendsCommon/test/CMakeLists.txt b/src/backends/backendsCommon/test/CMakeLists.txt
index 8fbd74d67a..7512c89e9c 100644
--- a/src/backends/backendsCommon/test/CMakeLists.txt
+++ b/src/backends/backendsCommon/test/CMakeLists.txt
@@ -16,6 +16,7 @@ list(APPEND armnnBackendsCommonUnitTests_sources
ConvertFp32ToFp16TestImpl.hpp
DebugTestImpl.hpp
DequantizeTestImpl.hpp
+ DequantizeEndToEndTestImpl.hpp
DetectionPostProcessLayerTestImpl.hpp
DetectionPostProcessTestImpl.hpp
EndToEndTestImpl.hpp
diff --git a/src/backends/backendsCommon/test/DequantizeEndToEndTestImpl.hpp b/src/backends/backendsCommon/test/DequantizeEndToEndTestImpl.hpp
new file mode 100644
index 0000000000..e62415904c
--- /dev/null
+++ b/src/backends/backendsCommon/test/DequantizeEndToEndTestImpl.hpp
@@ -0,0 +1,101 @@
+//
+// Copyright © 2017 Arm Ltd. All rights reserved.
+// SPDX-License-Identifier: MIT
+//
+
+#pragma once
+
+#include "CommonTestUtils.hpp"
+
+#include <armnn/INetwork.hpp>
+#include <ResolveType.hpp>
+
+namespace
+{
+
+template<typename T>
+armnn::INetworkPtr CreateDequantizeNetwork(const armnn::TensorInfo& inputInfo,
+ const armnn::TensorInfo& outputInfo)
+{
+ armnn::INetworkPtr net(armnn::INetwork::Create());
+
+ armnn::IConnectableLayer* inputLayer = net->AddInputLayer(0);
+ armnn::IConnectableLayer* dequantizeLayer = net->AddDequantizeLayer("Dequantize");
+ armnn::IConnectableLayer* outputLayer = net->AddOutputLayer(0, "output");
+ Connect(inputLayer, dequantizeLayer, inputInfo, 0, 0);
+ Connect(dequantizeLayer, outputLayer, outputInfo, 0, 0);
+
+ return net;
+}
+
+template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
+void DequantizeEndToEndLayerTestImpl(const std::vector<BackendId>& backends,
+ const armnn::TensorShape& tensorShape,
+ const std::vector<T>& input,
+ const std::vector<float>& expectedOutput,
+ float scale,
+ int32_t offset)
+{
+ armnn::TensorInfo inputInfo(tensorShape, ArmnnType);
+ armnn::TensorInfo outputInfo(tensorShape, armnn::DataType::Float32);
+
+ inputInfo.SetQuantizationScale(scale);
+ inputInfo.SetQuantizationOffset(offset);
+
+ // Builds up the structure of the network
+ armnn::INetworkPtr net = CreateDequantizeNetwork<T>(inputInfo, outputInfo);
+
+ BOOST_TEST_CHECKPOINT("create a network");
+
+ std::map<int, std::vector<T>> inputTensorData = { { 0, input } };
+ std::map<int, std::vector<float>> expectedOutputData = { { 0, expectedOutput } };
+
+ EndToEndLayerTestImpl<ArmnnType, armnn::DataType::Float32>(
+ move(net), inputTensorData, expectedOutputData, backends);
+}
+
+template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
+void DequantizeEndToEndSimple(const std::vector<BackendId>& backends)
+{
+ const armnn::TensorShape tensorShape({ 1, 2, 2, 4 });
+ std::vector<T> inputData = std::vector<T>(
+ {
+ 2, 4, 6, 8,
+ 10, 12, 14, 16,
+ 18, 20, 22, 24,
+ 26, 28, 30, 32
+ });
+
+ std::vector<float> expectedOutputData = std::vector<float>(
+ {
+ 1.0f, 2.0f, 3.0f, 4.0f,
+ 5.0f, 6.0f, 7.0f, 8.0f,
+ 9.0f, 10.0f, 11.0f, 12.0f,
+ 13.0f, 14.0f, 15.0f, 16.0f
+ });
+ DequantizeEndToEndLayerTestImpl<ArmnnType>(backends, tensorShape, inputData, expectedOutputData, 0.5f, 0);
+};
+
+template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
+void DequantizeEndToEndOffset(const std::vector<BackendId>& backends)
+{
+ const armnn::TensorShape tensorShape({ 1, 2, 2, 4 });
+ std::vector<T> inputData = std::vector<T>(
+ {
+ 3, 5, 7, 9,
+ 11, 13, 15, 17,
+ 19, 21, 23, 25,
+ 27, 29, 31, 33
+ });
+
+ std::vector<float> expectedOutputData = std::vector<float>(
+ {
+ 1.0f, 2.0f, 3.0f, 4.0f,
+ 5.0f, 6.0f, 7.0f, 8.0f,
+ 9.0f, 10.0f, 11.0f, 12.0f,
+ 13.0f, 14.0f, 15.0f, 16.0f
+ });
+ DequantizeEndToEndLayerTestImpl<ArmnnType>(backends, tensorShape, inputData, expectedOutputData, 0.5f, 1);
+};
+
+} // anonymous namespace