ArmNN
 21.02
DequantizeEndToEndTestImpl.hpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #pragma once
7 
8 #include "CommonTestUtils.hpp"
9 
10 #include <armnn/INetwork.hpp>
11 #include <ResolveType.hpp>
12 
13 namespace
14 {
15 
16 template<typename T>
17 armnn::INetworkPtr CreateDequantizeNetwork(const armnn::TensorInfo& inputInfo,
18  const armnn::TensorInfo& outputInfo)
19 {
21 
22  armnn::IConnectableLayer* inputLayer = net->AddInputLayer(0);
23  armnn::IConnectableLayer* dequantizeLayer = net->AddDequantizeLayer("Dequantize");
24  armnn::IConnectableLayer* outputLayer = net->AddOutputLayer(0, "output");
25  Connect(inputLayer, dequantizeLayer, inputInfo, 0, 0);
26  Connect(dequantizeLayer, outputLayer, outputInfo, 0, 0);
27 
28  return net;
29 }
30 
31 template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
32 void DequantizeEndToEndLayerTestImpl(const std::vector<BackendId>& backends,
33  const armnn::TensorShape& tensorShape,
34  const std::vector<T>& input,
35  const std::vector<float>& expectedOutput,
36  float scale,
37  int32_t offset)
38 {
39  armnn::TensorInfo inputInfo(tensorShape, ArmnnType);
40  armnn::TensorInfo outputInfo(tensorShape, armnn::DataType::Float32);
41 
42  inputInfo.SetQuantizationScale(scale);
43  inputInfo.SetQuantizationOffset(offset);
44 
45  // Builds up the structure of the network
46  armnn::INetworkPtr net = CreateDequantizeNetwork<T>(inputInfo, outputInfo);
47 
48  BOOST_TEST_CHECKPOINT("create a network");
49 
50  std::map<int, std::vector<T>> inputTensorData = { { 0, input } };
51  std::map<int, std::vector<float>> expectedOutputData = { { 0, expectedOutput } };
52 
53  EndToEndLayerTestImpl<ArmnnType, armnn::DataType::Float32>(
54  move(net), inputTensorData, expectedOutputData, backends);
55 }
56 
57 template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
58 void DequantizeEndToEndSimple(const std::vector<BackendId>& backends)
59 {
60  const armnn::TensorShape tensorShape({ 1, 2, 2, 4 });
61  std::vector<T> inputData = std::vector<T>(
62  {
63  2, 4, 6, 8,
64  10, 12, 14, 16,
65  18, 20, 22, 24,
66  26, 28, 30, 32
67  });
68 
69  std::vector<float> expectedOutputData = std::vector<float>(
70  {
71  1.0f, 2.0f, 3.0f, 4.0f,
72  5.0f, 6.0f, 7.0f, 8.0f,
73  9.0f, 10.0f, 11.0f, 12.0f,
74  13.0f, 14.0f, 15.0f, 16.0f
75  });
76  DequantizeEndToEndLayerTestImpl<ArmnnType>(backends, tensorShape, inputData, expectedOutputData, 0.5f, 0);
77 };
78 
79 template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
80 void DequantizeEndToEndOffset(const std::vector<BackendId>& backends)
81 {
82  const armnn::TensorShape tensorShape({ 1, 2, 2, 4 });
83  std::vector<T> inputData = std::vector<T>(
84  {
85  3, 5, 7, 9,
86  11, 13, 15, 17,
87  19, 21, 23, 25,
88  27, 29, 31, 33
89  });
90 
91  std::vector<float> expectedOutputData = std::vector<float>(
92  {
93  1.0f, 2.0f, 3.0f, 4.0f,
94  5.0f, 6.0f, 7.0f, 8.0f,
95  9.0f, 10.0f, 11.0f, 12.0f,
96  13.0f, 14.0f, 15.0f, 16.0f
97  });
98  DequantizeEndToEndLayerTestImpl<ArmnnType>(backends, tensorShape, inputData, expectedOutputData, 0.5f, 1);
99 };
100 
101 } // anonymous namespace
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:62
void SetQuantizationScale(float scale)
Definition: Tensor.cpp:464
void SetQuantizationOffset(int32_t offset)
Definition: Tensor.cpp:480
void Connect(armnn::IConnectableLayer *from, armnn::IConnectableLayer *to, const armnn::TensorInfo &tensorInfo, unsigned int fromIndex, unsigned int toIndex)
Definition: TestUtils.cpp:12
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:173
static INetworkPtr Create(NetworkOptions networkOptions={})
Definition: Network.cpp:510