ArmNN
 24.05
BatchNormalizationLayer.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017-2024 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
6 
7 #include "LayerCloneBase.hpp"
8 
9 #include <armnn/TypesUtils.hpp>
12 
13 namespace armnn
14 {
15 
17  : LayerWithParameters(1, 1, LayerType::BatchNormalization, param, name)
18 {
19 }
20 
21 std::unique_ptr<IWorkload> BatchNormalizationLayer::CreateWorkload(const IWorkloadFactory& factory) const
22 {
23  // on this level constant data should not be released..
24  if (!m_Mean)
25  {
26  throw armnn::NullPointerException("BatchNormalizationLayer: Mean data should not be null.");
27  }
28 
29  if (!m_Variance)
30  {
31  throw armnn::NullPointerException("BatchNormalizationLayer: Variance data should not be null.");
32  }
33 
34  if (!m_Beta)
35  {
36  throw armnn::NullPointerException("BatchNormalizationLayer: Beta data should not be null.");
37  }
38 
39  if (!m_Gamma)
40  {
41  throw armnn::NullPointerException("BatchNormalizationLayer: Gamma data should not be null.");
42  }
43 
45  SetAdditionalInfo(descriptor);
46 
47  descriptor.m_Mean = m_Mean.get();
48  descriptor.m_Variance = m_Variance.get();
49  descriptor.m_Beta = m_Beta.get();
50  descriptor.m_Gamma = m_Gamma.get();
51 
52  return factory.CreateWorkload(LayerType::BatchNormalization, descriptor, PrepInfoAndDesc(descriptor));
53 }
54 
56 {
57  auto layer = CloneBase<BatchNormalizationLayer>(graph, m_Param, GetName());
58 
59  layer->m_Mean = m_Mean ? m_Mean : nullptr;
60  layer->m_Variance = m_Variance ? m_Variance : nullptr;
61  layer->m_Beta = m_Beta ? m_Beta : nullptr;
62  layer->m_Gamma = m_Gamma ? m_Gamma : nullptr;
63 
64  return std::move(layer);
65 }
66 
68 {
70 
71  const TensorShape& outputShape = GetOutputSlot(0).GetTensorInfo().GetShape();
72 
74 
75  auto inferredShapes = InferOutputShapes({ GetInputSlot(0).GetTensorInfo().GetShape() });
76 
77  if (inferredShapes.size() != 1)
78  {
79  throw armnn::LayerValidationException("inferredShapes has "
80  + std::to_string(inferredShapes.size()) +
81  " elements - should only have 1.");
82  }
83 
84  ValidateAndCopyShape(outputShape, inferredShapes[0], m_ShapeInferenceMethod, "BatchNormalizationLayer");
85 
86 }
87 
89 {
90  // For API stability DO NOT ALTER order and add new members to the end of vector
91  return {m_Mean, m_Variance, m_Beta, m_Gamma};
92 }
93 
95 {
96  ManagedConstTensorHandle managedMean(m_Mean);
97  ManagedConstTensorHandle managedVariance(m_Variance);
98  ManagedConstTensorHandle managedBeta(m_Beta);
99  ManagedConstTensorHandle managedGamma(m_Gamma);
100 
101  std::vector<armnn::ConstTensor> constTensors { { managedMean.GetTensorInfo(), managedMean.Map() },
102  { managedVariance.GetTensorInfo(), managedVariance.Map() },
103  { managedBeta.GetTensorInfo(), managedBeta.Map() },
104  { managedGamma.GetTensorInfo(), managedGamma.Map() } };
105 
106  strategy.ExecuteStrategy(this, GetParameters(), constTensors, GetName());
107 }
108 
109 } // namespace armnn
armnn::BatchNormalizationDescriptor
A BatchNormalizationDescriptor for the BatchNormalizationLayer.
Definition: Descriptors.hpp:828
armnn::BatchNormalizationQueueDescriptor
Definition: WorkloadData.hpp:311
armnn::LayerType::BatchNormalization
@ BatchNormalization
armnn::BatchNormalizationQueueDescriptor::m_Gamma
const ConstTensorHandle * m_Gamma
Definition: WorkloadData.hpp:324
armnn::OutputSlot::GetTensorInfo
const TensorInfo & GetTensorInfo() const override
Definition: Layer.cpp:100
armnn::BatchNormalizationLayer::m_Mean
std::shared_ptr< ConstTensorHandle > m_Mean
A unique pointer to store Mean values.
Definition: BatchNormalizationLayer.hpp:19
TypesUtils.hpp
armnn::BatchNormalizationQueueDescriptor::m_Variance
const ConstTensorHandle * m_Variance
Definition: WorkloadData.hpp:322
CHECK_LOCATION
#define CHECK_LOCATION()
Definition: Exceptions.hpp:203
armnn::Layer::ValidateAndCopyShape
void ValidateAndCopyShape(const TensorShape &outputShape, const TensorShape &inferredShape, const ShapeInferenceMethod shapeInferenceMethod, const std::string &layerName, const unsigned int outputSlotIndex=0)
Definition: Layer.cpp:457
armnn::Layer::GetOutputSlot
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
Definition: Layer.hpp:339
armnn::BatchNormalizationQueueDescriptor::m_Mean
const ConstTensorHandle * m_Mean
Definition: WorkloadData.hpp:321
armnn::IStrategy
Definition: IStrategy.hpp:16
armnn::BatchNormalizationLayer
This layer represents a batch normalization operation.
Definition: BatchNormalizationLayer.hpp:15
armnn::ManagedConstTensorHandle
Definition: TensorHandle.hpp:187
armnn::Layer::GetInputSlot
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
Definition: Layer.hpp:337
armnn::LayerWithParameters< BatchNormalizationDescriptor >::GetParameters
const BatchNormalizationDescriptor & GetParameters() const override
Definition: LayerWithParameters.hpp:19
armnn::BatchNormalizationLayer::ExecuteStrategy
void ExecuteStrategy(IStrategy &strategy) const override
Apply a visitor to this layer.
Definition: BatchNormalizationLayer.cpp:94
WorkloadFactory.hpp
armnn::LayerWithParameters
Definition: LayerWithParameters.hpp:14
armnn::Layer::GetName
const char * GetName() const override
Returns the name of the layer.
Definition: Layer.hpp:332
armnn::IConnectableLayer::ImmutableConstantTensors
std::vector< std::reference_wrapper< const std::shared_ptr< ConstTensorHandle > >> ImmutableConstantTensors
Definition: INetwork.hpp:141
armnn::InputSlot::GetTensorInfo
const TensorInfo & GetTensorInfo() const override
Gets the TensorInfo for this InputSlot.
Definition: Layer.cpp:614
armnn::BatchNormalizationLayer::m_Gamma
std::shared_ptr< ConstTensorHandle > m_Gamma
A unique pointer to store Gamma values.
Definition: BatchNormalizationLayer.hpp:25
armnn::TensorShape
Definition: Tensor.hpp:20
armnn::LayerWithParameters< BatchNormalizationDescriptor >::m_Param
BatchNormalizationDescriptor m_Param
The parameters for the layer (not including tensor-valued weights etc.).
Definition: LayerWithParameters.hpp:52
armnn::BatchNormalizationLayer::m_Variance
std::shared_ptr< ConstTensorHandle > m_Variance
A unique pointer to store Variance values.
Definition: BatchNormalizationLayer.hpp:21
BatchNormalizationLayer.hpp
armnn::LayerWithParameters< BatchNormalizationDescriptor >::PrepInfoAndDesc
WorkloadInfo PrepInfoAndDesc(QueueDescriptor &descriptor) const
Helper function to reduce duplication in *Layer::CreateWorkload.
Definition: LayerWithParameters.hpp:44
armnn::BatchNormalizationLayer::GetConstantTensorsByRef
ImmutableConstantTensors GetConstantTensorsByRef() const override
Retrieve the handles to the constant values stored by the layer.
Definition: BatchNormalizationLayer.cpp:88
armnn::LayerValidationException
Definition: Exceptions.hpp:105
armnn::IWorkloadFactory
Definition: WorkloadFactory.hpp:22
armnn::BatchNormalizationQueueDescriptor::m_Beta
const ConstTensorHandle * m_Beta
Definition: WorkloadData.hpp:323
armnn::Layer::VerifyShapeInferenceType
void VerifyShapeInferenceType(const TensorShape &outputShape, ShapeInferenceMethod shapeInferenceMethod)
Definition: Layer.cpp:526
armnn::BatchNormalizationLayer::CreateWorkload
virtual std::unique_ptr< IWorkload > CreateWorkload(const IWorkloadFactory &factory) const override
Makes a workload for the BatchNormalization type.
Definition: BatchNormalizationLayer.cpp:21
armnn::ManagedConstTensorHandle::Map
const void * Map(bool blocking=true)
RAII Managed resource Unmaps MemoryArea once out of scope.
Definition: TensorHandle.hpp:196
armnn::Layer::SetAdditionalInfo
void SetAdditionalInfo(QueueDescriptor &descriptor) const
Definition: Layer.cpp:303
armnn::BatchNormalizationLayer::Clone
BatchNormalizationLayer * Clone(Graph &graph) const override
Creates a dynamically-allocated copy of this layer.
Definition: BatchNormalizationLayer.cpp:55
armnn::BatchNormalizationLayer::m_Beta
std::shared_ptr< ConstTensorHandle > m_Beta
A unique pointer to store Beta values.
Definition: BatchNormalizationLayer.hpp:23
TensorHandle.hpp
armnn::BatchNormalizationLayer::BatchNormalizationLayer
BatchNormalizationLayer(const BatchNormalizationDescriptor &param, const char *name)
Constructor to create a BatchNormalizationLayer.
Definition: BatchNormalizationLayer.cpp:16
armnn::TensorInfo::GetShape
const TensorShape & GetShape() const
Definition: Tensor.hpp:193
armnn
Copyright (c) 2021 ARM Limited and Contributors.
Definition: 01_00_quick_start.dox:6
armnn::Layer::InferOutputShapes
std::vector< TensorShape > InferOutputShapes(const std::vector< TensorShape > &inputShapes) const override
Infer the shape of the output(s) based on the provided input shape(s)
Definition: Layer.cpp:432
armnn::Layer::VerifyLayerConnections
void VerifyLayerConnections(unsigned int expectedConnections, const CheckLocation &location) const
Definition: Layer.cpp:410
armnn::BatchNormalizationLayer::ValidateTensorShapesFromInputs
void ValidateTensorShapesFromInputs() override
Check if the input tensor shape(s) will lead to a valid configuration of BatchNormalizationLayer.
Definition: BatchNormalizationLayer.cpp:67
armnn::NullPointerException
Definition: Exceptions.hpp:146
armnn::Layer::m_ShapeInferenceMethod
ShapeInferenceMethod m_ShapeInferenceMethod
Definition: Layer.hpp:441
armnn::LayerType
LayerType
When adding a new layer, adapt also the LastLayer enum value in the enum class LayerType below.
Definition: Types.hpp:491
armnn::Graph
Definition: Graph.hpp:30
armnn::IWorkloadFactory::CreateWorkload
virtual std::unique_ptr< IWorkload > CreateWorkload(LayerType type, const QueueDescriptor &descriptor, const WorkloadInfo &info) const =0
Backends should implement their own CreateWorkload function with a switch statement.
armnn::IStrategy::ExecuteStrategy
virtual void ExecuteStrategy(const IConnectableLayer *layer, const armnn::BaseDescriptor &descriptor, const std::vector< armnn::ConstTensor > &constants, const char *name, const armnn::LayerBindingId id=0)=0
LayerCloneBase.hpp
armnn::ManagedConstTensorHandle::GetTensorInfo
const TensorInfo & GetTensorInfo() const
Definition: TensorHandle.hpp:239