ArmNN
 21.02
StaticRangeStrategy Class Reference

#include <StaticRangeStrategy.hpp>

Inheritance diagram for StaticRangeStrategy:
IStrategy

Public Member Functions

 StaticRangeStrategy (RangeTracker &rangeTracker)
 
 ~StaticRangeStrategy ()=default
 
void ExecuteStrategy (const armnn::IConnectableLayer *layer, const BaseDescriptor &descriptor, const std::vector< armnn::ConstTensor > &constants, const char *name, const armnn::LayerBindingId id) override
 
- Public Member Functions inherited from IStrategy
virtual void FinishStrategy ()
 

Additional Inherited Members

- Protected Member Functions inherited from IStrategy
 IStrategy ()
 
virtual ~IStrategy ()
 

Detailed Description

Definition at line 18 of file StaticRangeStrategy.hpp.

Constructor & Destructor Documentation

◆ StaticRangeStrategy()

◆ ~StaticRangeStrategy()

~StaticRangeStrategy ( )
default

Member Function Documentation

◆ ExecuteStrategy()

void ExecuteStrategy ( const armnn::IConnectableLayer layer,
const BaseDescriptor descriptor,
const std::vector< armnn::ConstTensor > &  constants,
const char *  name,
const armnn::LayerBindingId  id 
)
overridevirtual

Implements IStrategy.

Definition at line 33 of file StaticRangeStrategy.cpp.

References armnn::Abs, armnn::Activation, armnn::Addition, armnn::ArgMinMax, armnn::BatchNormalization, armnn::BatchToSpaceNd, armnn::BoundedReLu, IOutputSlot::CalculateIndexOnOwner(), armnn::Concat, armnn::Constant, armnn::Convolution2d, armnn::DepthwiseConvolution2d, armnn::Float32, armnn::FullyConnected, IInputSlot::GetConnection(), IConnectableLayer::GetInputSlot(), IConnectableLayer::GetNumInputSlots(), IOutputSlot::GetOwningLayerGuid(), RangeTracker::GetRange(), IConnectableLayer::GetType(), armnn::IgnoreUnused(), armnn::LeakyReLu, armnn::Linear, ActivationDescriptor::m_A, ActivationDescriptor::m_Function, armnn::Permute, armnn::Pooling2d, armnn::ReLu, armnn::Reshape, armnn::Resize, armnn::Softmax, armnn::SoftReLu, armnn::SpaceToBatchNd, armnn::Splitter, armnn::StridedSlice, and armnn::TanH.

38 {
39 IgnoreUnused(id, name);
40 
41 switch (layer->GetType())
42 {
44  {
45  const ActivationDescriptor& activationDescriptor = static_cast<const ActivationDescriptor&>(descriptor);
46 
47  switch (activationDescriptor.m_Function)
48  {
49  // Range is 0, 15 for Abs, Linear, ReLu and Soft ReLu
54  SetRange(layer, 0, 0.f, 15.f);
55  break;
57  SetRange(layer, 0, 0.f, activationDescriptor.m_A);
58  break;
60  SetRange(layer, 0, -1.f, 1.f);
61  break;
63  SetRange(layer, 0, -5.f, 15.f);
64  break;
65  default:
66  SetRange(layer, 0, -15.f, 15.f);
67  break;
68  }
69  break;
70  }
72  {
73  SetRange(layer, 0, -20.f, 20.f);
74  break;
75  }
77  {
78  ForwardParentParameters(layer);
79  break;
80  }
82  {
83  ForwardParentParameters(layer);
84  break;
85  }
87  {
88  SetRange(layer, 0, -15.0f, 15.0f);
89  break;
90  }
92  {
93  float min = std::numeric_limits<float>::max();
94  float max = std::numeric_limits<float>::lowest();
95  for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
96  {
97  const IOutputSlot* outputSlot = layer->GetInputSlot(i).GetConnection();
98  LayerGuid layerId = outputSlot->GetOwningLayerGuid();
99  unsigned int slotIndex = outputSlot->CalculateIndexOnOwner();
100  RangeTracker::MinMaxRange range = m_RangeTracker.GetRange(layerId, slotIndex);
101  min = std::min(min, range.first);
102  max = std::max(max, range.second);
103  }
104  SetRange(layer, 0, min, max);
105  break;
106  }
108  {
109 
110  if (constants[0].GetDataType() != DataType::Float32)
111  {
112  throw InvalidArgumentException("Quantization is supported only for FP32 tensors");
113  }
114 
115  // Work out the range based on the input constants
116  unsigned int inputNumElements = constants[0].GetNumElements();
117  const float* inputData = reinterpret_cast<const float*>(constants[0].GetMemoryArea());
118 
119  float min = std::numeric_limits<float>::max();
120  float max = std::numeric_limits<float>::lowest();
121 
122  for (unsigned int i = 0; i < inputNumElements; i++)
123  {
124  const float inputValue = inputData[i];
125 
126  min = std::min(min, inputValue);
127  max = std::max(max, inputValue);
128  }
129  SetRange(layer, 0, min, max);
130  break;
131  }
133  {
134  SetRange(layer, 0, -15.0f, 15.0f);
135  break;
136  }
138  {
139  SetRange(layer, 0, -15.0f, 15.0f);
140  break;
141  }
143  {
144  SetRange(layer, 0, -15.0f, 15.0f);
145  break;
146  }
148  {
149  ForwardParentParameters(layer);
150  break;
151  }
153  {
154  ForwardParentParameters(layer);
155  break;
156  }
158  {
159  ForwardParentParameters(layer);
160  break;
161  }
163  {
164  ForwardParentParameters(layer);
165  break;
166  }
168  {
169  ForwardParentParameters(layer);
170  break;
171  }
173  {
174  ForwardParentParameters(layer);
175  break;
176  }
178  {
179  SetRange(layer, 0, 0.f, 1.f);
180  break;
181  }
183  {
184  ForwardParentParameters(layer);
185  break;
186  }
187  default:
188  {
189  }
190 }
191 }
virtual unsigned int GetNumInputSlots() const =0
Returns the number of connectable input slots.
void IgnoreUnused(Ts &&...)
std::pair< float, float > MinMaxRange
MinMaxRange GetRange(LayerGuid guid, unsigned int idx) const
Retrieve the Range for a particular output slot on a particular layer.
min(a, max(b, input)) ReLu1 & ReLu6.
virtual LayerType GetType() const =0
Returns the armnn::LayerType of this layer.
profiling::ProfilingGuid LayerGuid
Define LayerGuid type.
Definition: Types.hpp:275
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
virtual const IOutputSlot * GetConnection() const =0

The documentation for this class was generated from the following files: