ArmNN
 21.08
DequantizeEndToEndTestImpl.hpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #pragma once
7 
8 #include "CommonTestUtils.hpp"
9 
10 #include <armnn/INetwork.hpp>
11 #include <ResolveType.hpp>
12 
13 #include <doctest/doctest.h>
14 
15 namespace
16 {
17 
18 template<typename T>
19 armnn::INetworkPtr CreateDequantizeNetwork(const armnn::TensorInfo& inputInfo,
20  const armnn::TensorInfo& outputInfo)
21 {
23 
24  armnn::IConnectableLayer* inputLayer = net->AddInputLayer(0);
25  armnn::IConnectableLayer* dequantizeLayer = net->AddDequantizeLayer("Dequantize");
26  armnn::IConnectableLayer* outputLayer = net->AddOutputLayer(0, "output");
27  Connect(inputLayer, dequantizeLayer, inputInfo, 0, 0);
28  Connect(dequantizeLayer, outputLayer, outputInfo, 0, 0);
29 
30  return net;
31 }
32 
33 template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
34 void DequantizeEndToEndLayerTestImpl(const std::vector<BackendId>& backends,
35  const armnn::TensorShape& tensorShape,
36  const std::vector<T>& input,
37  const std::vector<float>& expectedOutput,
38  float scale,
39  int32_t offset)
40 {
41  armnn::TensorInfo inputInfo(tensorShape, ArmnnType);
42  armnn::TensorInfo outputInfo(tensorShape, armnn::DataType::Float32);
43 
44  inputInfo.SetQuantizationScale(scale);
45  inputInfo.SetQuantizationOffset(offset);
46 
47  // Builds up the structure of the network
48  armnn::INetworkPtr net = CreateDequantizeNetwork<T>(inputInfo, outputInfo);
49 
50  CHECK(net);
51 
52  std::map<int, std::vector<T>> inputTensorData = { { 0, input } };
53  std::map<int, std::vector<float>> expectedOutputData = { { 0, expectedOutput } };
54 
55  EndToEndLayerTestImpl<ArmnnType, armnn::DataType::Float32>(
56  move(net), inputTensorData, expectedOutputData, backends);
57 }
58 
59 template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
60 void DequantizeEndToEndSimple(const std::vector<BackendId>& backends)
61 {
62  const armnn::TensorShape tensorShape({ 1, 2, 2, 4 });
63  std::vector<T> inputData = std::vector<T>(
64  {
65  2, 4, 6, 8,
66  10, 12, 14, 16,
67  18, 20, 22, 24,
68  26, 28, 30, 32
69  });
70 
71  std::vector<float> expectedOutputData = std::vector<float>(
72  {
73  1.0f, 2.0f, 3.0f, 4.0f,
74  5.0f, 6.0f, 7.0f, 8.0f,
75  9.0f, 10.0f, 11.0f, 12.0f,
76  13.0f, 14.0f, 15.0f, 16.0f
77  });
78  DequantizeEndToEndLayerTestImpl<ArmnnType>(backends, tensorShape, inputData, expectedOutputData, 0.5f, 0);
79 };
80 
81 template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
82 void DequantizeEndToEndOffset(const std::vector<BackendId>& backends)
83 {
84  const armnn::TensorShape tensorShape({ 1, 2, 2, 4 });
85  std::vector<T> inputData = std::vector<T>(
86  {
87  3, 5, 7, 9,
88  11, 13, 15, 17,
89  19, 21, 23, 25,
90  27, 29, 31, 33
91  });
92 
93  std::vector<float> expectedOutputData = std::vector<float>(
94  {
95  1.0f, 2.0f, 3.0f, 4.0f,
96  5.0f, 6.0f, 7.0f, 8.0f,
97  9.0f, 10.0f, 11.0f, 12.0f,
98  13.0f, 14.0f, 15.0f, 16.0f
99  });
100  DequantizeEndToEndLayerTestImpl<ArmnnType>(backends, tensorShape, inputData, expectedOutputData, 0.5f, 1);
101 };
102 
103 } // anonymous namespace
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:61
void SetQuantizationScale(float scale)
Definition: Tensor.cpp:475
void SetQuantizationOffset(int32_t offset)
Definition: Tensor.cpp:491
void Connect(armnn::IConnectableLayer *from, armnn::IConnectableLayer *to, const armnn::TensorInfo &tensorInfo, unsigned int fromIndex, unsigned int toIndex)
Definition: TestUtils.cpp:12
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:172
static INetworkPtr Create(NetworkOptions networkOptions={})
Definition: Network.cpp:530