ArmNN
 21.08
BatchNormalizationLayer.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
6 
7 #include "LayerCloneBase.hpp"
8 
9 #include <armnn/TypesUtils.hpp>
12 
13 namespace armnn
14 {
15 
17  : LayerWithParameters(1, 1, LayerType::BatchNormalization, param, name)
18 {
19 }
20 
21 std::unique_ptr<IWorkload> BatchNormalizationLayer::CreateWorkload(const IWorkloadFactory& factory) const
22 {
23  // on this level constant data should not be released..
24  ARMNN_ASSERT_MSG(m_Mean != nullptr, "BatchNormalizationLayer: Mean data should not be null.");
25  ARMNN_ASSERT_MSG(m_Variance != nullptr, "BatchNormalizationLayer: Variance data should not be null.");
26  ARMNN_ASSERT_MSG(m_Beta != nullptr, "BatchNormalizationLayer: Beta data should not be null.");
27  ARMNN_ASSERT_MSG(m_Gamma != nullptr, "BatchNormalizationLayer: Gamma data should not be null.");
28 
30  SetAdditionalInfo(descriptor);
31 
32  descriptor.m_Mean = m_Mean.get();
33  descriptor.m_Variance = m_Variance.get();
34  descriptor.m_Beta = m_Beta.get();
35  descriptor.m_Gamma = m_Gamma.get();
36 
37  return factory.CreateBatchNormalization(descriptor, PrepInfoAndDesc(descriptor));
38 }
39 
41 {
42  auto layer = CloneBase<BatchNormalizationLayer>(graph, m_Param, GetName());
43 
44  layer->m_Mean = m_Mean ? m_Mean : nullptr;
45  layer->m_Variance = m_Variance ? m_Variance : nullptr;
46  layer->m_Beta = m_Beta ? m_Beta : nullptr;
47  layer->m_Gamma = m_Gamma ? m_Gamma : nullptr;
48 
49  return std::move(layer);
50 }
51 
53 {
55 
56  const TensorShape& outputShape = GetOutputSlot(0).GetTensorInfo().GetShape();
57 
59 
60  auto inferredShapes = InferOutputShapes({ GetInputSlot(0).GetConnection()->GetTensorInfo().GetShape() });
61 
62  ARMNN_ASSERT(inferredShapes.size() == 1);
63 
64  ValidateAndCopyShape(outputShape, inferredShapes[0], m_ShapeInferenceMethod, "BatchNormalizationLayer");
65 
66 }
67 
69 {
70  return {m_Mean, m_Variance, m_Beta, m_Gamma};
71 }
72 
74 {
75  ManagedConstTensorHandle managedMean(m_Mean);
76  ManagedConstTensorHandle managedVariance(m_Variance);
77  ManagedConstTensorHandle managedBeta(m_Beta);
78  ManagedConstTensorHandle managedGamma(m_Gamma);
79 
80  ConstTensor meanTensor(managedMean.GetTensorInfo(), managedMean.Map());
81  ConstTensor varianceTensor(managedVariance.GetTensorInfo(), managedVariance.Map());
82  ConstTensor betaTensor(managedBeta.GetTensorInfo(), managedBeta.Map());
83  ConstTensor gammaTensor(managedGamma.GetTensorInfo(), managedGamma.Map());
84 
86  this, GetParameters(), meanTensor, varianceTensor, betaTensor, gammaTensor, GetName());
87 }
88 
90 {
91  ManagedConstTensorHandle managedMean(m_Mean);
92  ManagedConstTensorHandle managedVariance(m_Variance);
93  ManagedConstTensorHandle managedBeta(m_Beta);
94  ManagedConstTensorHandle managedGamma(m_Gamma);
95 
96  std::vector<armnn::ConstTensor> constTensors { { managedMean.GetTensorInfo(), managedMean.Map() },
97  { managedVariance.GetTensorInfo(), managedVariance.Map() },
98  { managedBeta.GetTensorInfo(), managedBeta.Map() },
99  { managedGamma.GetTensorInfo(), managedGamma.Map() } };
100 
101  strategy.ExecuteStrategy(this, GetParameters(), constTensors, GetName());
102 }
103 
104 } // namespace armnn
virtual std::unique_ptr< IWorkload > CreateBatchNormalization(const BatchNormalizationQueueDescriptor &descriptor, const WorkloadInfo &info) const
This layer represents a batch normalization operation.
virtual std::unique_ptr< IWorkload > CreateWorkload(const IWorkloadFactory &factory) const override
Makes a workload for the BatchNormalization type.
BatchNormalizationDescriptor m_Param
The parameters for the layer (not including tensor-valued weights etc.).
const BatchNormalizationDescriptor & GetParameters() const
BatchNormalizationLayer * Clone(Graph &graph) const override
Creates a dynamically-allocated copy of this layer.
const TensorShape & GetShape() const
Definition: Tensor.hpp:191
void ExecuteStrategy(IStrategy &strategy) const override
Apply a visitor to this layer.
std::vector< TensorShape > InferOutputShapes(const std::vector< TensorShape > &inputShapes) const override
Infer the shape of the output(s) based on the provided input shape(s)
Definition: Layer.cpp:368
virtual void ExecuteStrategy(const armnn::IConnectableLayer *layer, const armnn::BaseDescriptor &descriptor, const std::vector< armnn::ConstTensor > &constants, const char *name, const armnn::LayerBindingId id=0)=0
const ConstTensorHandle * m_Variance
std::shared_ptr< ConstTensorHandle > m_Mean
A unique pointer to store Mean values.
void VerifyShapeInferenceType(const TensorShape &outputShape, ShapeInferenceMethod shapeInferenceMethod)
Definition: Layer.cpp:433
const TensorInfo & GetTensorInfo() const
Copyright (c) 2021 ARM Limited and Contributors.
void Accept(ILayerVisitor &visitor) const override
Apply a visitor to this layer.
const IOutputSlot * GetConnection() const override
Definition: Layer.hpp:199
virtual void VisitBatchNormalizationLayer(const IConnectableLayer *layer, const BatchNormalizationDescriptor &desc, const ConstTensor &mean, const ConstTensor &variance, const ConstTensor &beta, const ConstTensor &gamma, const char *name=nullptr)=0
Function that a batch normalization layer should call back to when its Accept(ILayerVisitor&) functio...
void ValidateAndCopyShape(const TensorShape &outputShape, const TensorShape &inferredShape, const ShapeInferenceMethod shapeInferenceMethod, const std::string &layerName, const unsigned int outputSlotIndex=0)
Definition: Layer.cpp:393
std::shared_ptr< ConstTensorHandle > m_Beta
A unique pointer to store Beta values.
void VerifyLayerConnections(unsigned int expectedConnections, const CheckLocation &location) const
Definition: Layer.cpp:349
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
Definition: Layer.hpp:316
std::vector< std::reference_wrapper< std::shared_ptr< ConstTensorHandle > >> ConstantTensors
Definition: Layer.hpp:393
#define ARMNN_ASSERT_MSG(COND, MSG)
Definition: Assert.hpp:15
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:327
#define ARMNN_ASSERT(COND)
Definition: Assert.hpp:14
std::shared_ptr< ConstTensorHandle > m_Gamma
A unique pointer to store Gamma values.
void ValidateTensorShapesFromInputs() override
Check if the input tensor shape(s) will lead to a valid configuration of BatchNormalizationLayer.
#define CHECK_LOCATION()
Definition: Exceptions.hpp:197
std::shared_ptr< ConstTensorHandle > m_Variance
A unique pointer to store Variance values.
void SetAdditionalInfo(QueueDescriptor &descriptor) const
Definition: Layer.cpp:245
ConstantTensors GetConstantTensorsByRef() override
Retrieve the handles to the constant values stored by the layer.
WorkloadInfo PrepInfoAndDesc(QueueDescriptor &descriptor) const
Helper function to reduce duplication in *LayerCreateWorkload.
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
Definition: Layer.hpp:318
virtual const TensorInfo & GetTensorInfo() const =0
const char * GetName() const override
Returns the name of the layer.
Definition: Layer.hpp:311
const void * Map(bool blocking=true)
RAII Managed resource Unmaps MemoryArea once out of scope.
const TensorInfo & GetTensorInfo() const override
Definition: Layer.cpp:63
BatchNormalizationLayer(const BatchNormalizationDescriptor &param, const char *name)
Constructor to create a BatchNormalizationLayer.
ShapeInferenceMethod m_ShapeInferenceMethod
Definition: Layer.hpp:408
A BatchNormalizationDescriptor for the BatchNormalizationLayer.
LayerType
When adding a new layer, adapt also the LastLayer enum value in the enum class LayerType below...
Definition: Types.hpp:405