ArmNN
 21.02
StaticRangeStrategy.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2021 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
7 
9 #include <armnn/Descriptors.hpp>
10 #include <armnn/Types.hpp>
11 
12 #include <limits>
13 
14 namespace armnn
15 {
16 
18  : m_RangeTracker(rangeTracker)
19 {}
20 
21 void StaticRangeStrategy::SetRange(const IConnectableLayer* layer, unsigned int outputIdx, float min, float max)
22 {
23  m_RangeTracker.SetRange(layer, outputIdx, min, max);
24 }
25 
26 void StaticRangeStrategy::ForwardParentParameters(const IConnectableLayer* layer)
27 {
28  const auto parentRange = m_RangeTracker.GetRange(layer->GetInputSlot(0).GetConnection()->GetOwningLayerGuid(), 0);
29  SetRange(layer, 0, parentRange.first, parentRange.second);
30 }
31 
32 
34  const BaseDescriptor &descriptor,
35  const std::vector<armnn::ConstTensor> &constants,
36  const char *name,
37  const armnn::LayerBindingId id)
38 {
39 IgnoreUnused(id, name);
40 
41 switch (layer->GetType())
42 {
44  {
45  const ActivationDescriptor& activationDescriptor = static_cast<const ActivationDescriptor&>(descriptor);
46 
47  switch (activationDescriptor.m_Function)
48  {
49  // Range is 0, 15 for Abs, Linear, ReLu and Soft ReLu
54  SetRange(layer, 0, 0.f, 15.f);
55  break;
57  SetRange(layer, 0, 0.f, activationDescriptor.m_A);
58  break;
60  SetRange(layer, 0, -1.f, 1.f);
61  break;
63  SetRange(layer, 0, -5.f, 15.f);
64  break;
65  default:
66  SetRange(layer, 0, -15.f, 15.f);
67  break;
68  }
69  break;
70  }
72  {
73  SetRange(layer, 0, -20.f, 20.f);
74  break;
75  }
77  {
78  ForwardParentParameters(layer);
79  break;
80  }
82  {
83  ForwardParentParameters(layer);
84  break;
85  }
87  {
88  SetRange(layer, 0, -15.0f, 15.0f);
89  break;
90  }
92  {
93  float min = std::numeric_limits<float>::max();
94  float max = std::numeric_limits<float>::lowest();
95  for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
96  {
97  const IOutputSlot* outputSlot = layer->GetInputSlot(i).GetConnection();
98  LayerGuid layerId = outputSlot->GetOwningLayerGuid();
99  unsigned int slotIndex = outputSlot->CalculateIndexOnOwner();
100  RangeTracker::MinMaxRange range = m_RangeTracker.GetRange(layerId, slotIndex);
101  min = std::min(min, range.first);
102  max = std::max(max, range.second);
103  }
104  SetRange(layer, 0, min, max);
105  break;
106  }
108  {
109 
110  if (constants[0].GetDataType() != DataType::Float32)
111  {
112  throw InvalidArgumentException("Quantization is supported only for FP32 tensors");
113  }
114 
115  // Work out the range based on the input constants
116  unsigned int inputNumElements = constants[0].GetNumElements();
117  const float* inputData = reinterpret_cast<const float*>(constants[0].GetMemoryArea());
118 
119  float min = std::numeric_limits<float>::max();
120  float max = std::numeric_limits<float>::lowest();
121 
122  for (unsigned int i = 0; i < inputNumElements; i++)
123  {
124  const float inputValue = inputData[i];
125 
126  min = std::min(min, inputValue);
127  max = std::max(max, inputValue);
128  }
129  SetRange(layer, 0, min, max);
130  break;
131  }
133  {
134  SetRange(layer, 0, -15.0f, 15.0f);
135  break;
136  }
138  {
139  SetRange(layer, 0, -15.0f, 15.0f);
140  break;
141  }
143  {
144  SetRange(layer, 0, -15.0f, 15.0f);
145  break;
146  }
148  {
149  ForwardParentParameters(layer);
150  break;
151  }
153  {
154  ForwardParentParameters(layer);
155  break;
156  }
158  {
159  ForwardParentParameters(layer);
160  break;
161  }
163  {
164  ForwardParentParameters(layer);
165  break;
166  }
168  {
169  ForwardParentParameters(layer);
170  break;
171  }
173  {
174  ForwardParentParameters(layer);
175  break;
176  }
178  {
179  SetRange(layer, 0, 0.f, 1.f);
180  break;
181  }
183  {
184  ForwardParentParameters(layer);
185  break;
186  }
187  default:
188  {
189  }
190 }
191 }
192 
193 } //namespace armnn
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:62
virtual unsigned int GetNumInputSlots() const =0
Returns the number of connectable input slots.
void ExecuteStrategy(const armnn::IConnectableLayer *layer, const BaseDescriptor &descriptor, const std::vector< armnn::ConstTensor > &constants, const char *name, const armnn::LayerBindingId id) override
StaticRangeStrategy(RangeTracker &rangeTracker)
Copyright (c) 2021 ARM Limited and Contributors.
void IgnoreUnused(Ts &&...)
std::pair< float, float > MinMaxRange
int LayerBindingId
Type of identifiers for bindable layers (inputs, outputs).
Definition: Types.hpp:210
MinMaxRange GetRange(LayerGuid guid, unsigned int idx) const
Retrieve the Range for a particular output slot on a particular layer.
Base class for all descriptors.
Definition: Descriptors.hpp:22
void SetRange(const IConnectableLayer *layer, unsigned int outputIdx, float min, float max)
Set the range for an output slot on a layer.
An output connection slot for a layer.
Definition: INetwork.hpp:38
virtual unsigned int CalculateIndexOnOwner() const =0
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
min(a, max(b, input)) ReLu1 & ReLu6.
virtual LayerType GetType() const =0
Returns the armnn::LayerType of this layer.
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:50
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
virtual const IOutputSlot * GetConnection() const =0
virtual LayerGuid GetOwningLayerGuid() const =0
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48