ArmNN
 22.08
DequantizeEndToEndTestImpl.hpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #pragma once
7 
8 #include <CommonTestUtils.hpp>
9 
10 #include <armnn/INetwork.hpp>
11 #include <ResolveType.hpp>
12 
13 #include <doctest/doctest.h>
14 
15 namespace
16 {
17 
18 template<typename T>
19 armnn::INetworkPtr CreateDequantizeNetwork(const armnn::TensorInfo& inputInfo,
20  const armnn::TensorInfo& outputInfo)
21 {
23 
24  armnn::IConnectableLayer* inputLayer = net->AddInputLayer(0);
25  armnn::IConnectableLayer* dequantizeLayer = net->AddDequantizeLayer("Dequantize");
26  armnn::IConnectableLayer* outputLayer = net->AddOutputLayer(0, "output");
27  Connect(inputLayer, dequantizeLayer, inputInfo, 0, 0);
28  Connect(dequantizeLayer, outputLayer, outputInfo, 0, 0);
29 
30  return net;
31 }
32 
33 template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
34 void DequantizeEndToEndLayerTestImpl(const std::vector<BackendId>& backends,
35  const armnn::TensorShape& tensorShape,
36  const std::vector<T>& input,
37  const std::vector<float>& expectedOutput,
38  float scale,
39  int32_t offset)
40 {
41  armnn::TensorInfo inputInfo(tensorShape, ArmnnType);
42  armnn::TensorInfo outputInfo(tensorShape, armnn::DataType::Float32);
43 
44  inputInfo.SetQuantizationScale(scale);
45  inputInfo.SetQuantizationOffset(offset);
46  inputInfo.SetConstant(true);
47 
48  // Builds up the structure of the network
49  armnn::INetworkPtr net = CreateDequantizeNetwork<T>(inputInfo, outputInfo);
50 
51  CHECK(net);
52 
53  std::map<int, std::vector<T>> inputTensorData = { { 0, input } };
54  std::map<int, std::vector<float>> expectedOutputData = { { 0, expectedOutput } };
55 
56  EndToEndLayerTestImpl<ArmnnType, armnn::DataType::Float32>(
57  move(net), inputTensorData, expectedOutputData, backends);
58 }
59 
60 template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
61 void DequantizeEndToEndSimple(const std::vector<BackendId>& backends)
62 {
63  const armnn::TensorShape tensorShape({ 1, 2, 2, 4 });
64  std::vector<T> inputData = std::vector<T>(
65  {
66  2, 4, 6, 8,
67  10, 12, 14, 16,
68  18, 20, 22, 24,
69  26, 28, 30, 32
70  });
71 
72  std::vector<float> expectedOutputData = std::vector<float>(
73  {
74  1.0f, 2.0f, 3.0f, 4.0f,
75  5.0f, 6.0f, 7.0f, 8.0f,
76  9.0f, 10.0f, 11.0f, 12.0f,
77  13.0f, 14.0f, 15.0f, 16.0f
78  });
79  DequantizeEndToEndLayerTestImpl<ArmnnType>(backends, tensorShape, inputData, expectedOutputData, 0.5f, 0);
80 };
81 
82 template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
83 void DequantizeEndToEndOffset(const std::vector<BackendId>& backends)
84 {
85  const armnn::TensorShape tensorShape({ 1, 2, 2, 4 });
86  std::vector<T> inputData = std::vector<T>(
87  {
88  3, 5, 7, 9,
89  11, 13, 15, 17,
90  19, 21, 23, 25,
91  27, 29, 31, 33
92  });
93 
94  std::vector<float> expectedOutputData = std::vector<float>(
95  {
96  1.0f, 2.0f, 3.0f, 4.0f,
97  5.0f, 6.0f, 7.0f, 8.0f,
98  9.0f, 10.0f, 11.0f, 12.0f,
99  13.0f, 14.0f, 15.0f, 16.0f
100  });
101  DequantizeEndToEndLayerTestImpl<ArmnnType>(backends, tensorShape, inputData, expectedOutputData, 0.5f, 1);
102 };
103 
104 } // anonymous namespace
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:68
void SetQuantizationScale(float scale)
Definition: Tensor.cpp:473
void SetConstant(const bool IsConstant=true)
Marks the data corresponding to this tensor info as constant.
Definition: Tensor.cpp:514
void SetQuantizationOffset(int32_t offset)
Definition: Tensor.cpp:489
void Connect(armnn::IConnectableLayer *from, armnn::IConnectableLayer *to, const armnn::TensorInfo &tensorInfo, unsigned int fromIndex, unsigned int toIndex)
Definition: TestUtils.cpp:14
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:238
static INetworkPtr Create(NetworkOptions networkOptions={})
Definition: Network.cpp:475