ArmNN
 23.11
LayerSupportBase.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #include <armnn/Deprecated.hpp>
7 #include <armnn/Exceptions.hpp>
8 #include <armnn/Types.hpp>
9 
11 
14 
15 namespace
16 {
17 
18 bool DefaultLayerSupport(const char* func,
19  const char* file,
20  unsigned int line,
21  armnn::Optional<std::string&> reasonIfUnsupported)
22 {
23  // NOTE: We only need to return the reason if the optional parameter is not empty
24  if (reasonIfUnsupported)
25  {
26  std::stringstream message;
27  message << func << " is not implemented [" << file << ":" << line << "]";
28 
29  reasonIfUnsupported.value() = message.str();
30  }
31 
32  return false;
33 }
34 
35 } // anonymous namespace
36 
37 namespace armnn
38 {
39 
41  const std::vector<TensorInfo>& infos,
42  const BaseDescriptor& descriptor,
45  Optional<std::string&> reasonIfUnsupported) const
46 {
47  switch(type)
48  {
49  case LayerType::MemCopy:
50  return IsMemCopySupported(infos[0], infos[1], reasonIfUnsupported);
52  return IsMemImportSupported(infos[0], infos[1], reasonIfUnsupported);
53  case LayerType::StandIn:
54  {
55  auto desc = *(PolymorphicDowncast<const StandInDescriptor*>(&descriptor));
56 
57  if (infos.size() != (desc.m_NumInputs + desc.m_NumOutputs))
58  {
59  throw InvalidArgumentException("Number of StandIn layer TensorInfos does not equal "
60  "the combined number of input and output slots assigned "
61  "to the StandIn descriptor");
62  }
63 
64  std::vector<const TensorInfo*> inputInfos;
65  for (uint32_t i = 0; i < desc.m_NumInputs; i++)
66  {
67  inputInfos.push_back(&infos[i]);
68  }
69  std::vector<const TensorInfo*> outputInfos;
70  for (uint32_t i = desc.m_NumInputs; i < infos.size(); i++)
71  {
72  outputInfos.push_back(&infos[i]);
73  }
74 
75  return IsStandInSupported(inputInfos,
76  outputInfos,
77  desc,
78  reasonIfUnsupported);
79  }
80  default:
81  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
82  }
83 }
84 
86  const TensorInfo&, // scores
87  const TensorInfo&, // anchors
88  const TensorInfo&, // detectionBoxes
89  const TensorInfo&, // detectionClasses
90  const TensorInfo&, // detectionScores
91  const TensorInfo&, // numDetections
92  const DetectionPostProcessDescriptor&, //descriptor
93  Optional<std::string&> reasonIfUnsupported) const
94 {
95  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
96 }
97 
99  const armnn::TensorInfo&, // output
100  armnn::Optional<std::string &> ) const // reasonIfUnsupported
101 {
102  return true;
103 }
104 
106  const armnn::TensorInfo&, // output
107  armnn::Optional<std::string &> ) const // reasonIfUnsupported
108 {
109  return true;
110 }
111 
113  const TensorInfo&, // input1
114  const TensorInfo&, // output
115  Optional<std::string&> reasonIfUnsupported) const
116 {
117  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
118 }
119 
121  const TensorInfo&, // previousCellStateIn
122  const TensorInfo&, // previousOutputIn
123  const TensorInfo&, // cellStateOut
124  const TensorInfo&, // output
125  const QuantizedLstmInputParamsInfo&, // paramsInfo
126  Optional<std::string&> reasonIfUnsupported) const
127 {
128  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
129 }
130 
132  const TensorInfo&, // output
133  Optional<std::string&> reasonIfUnsupported) const
134 {
135  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
136 }
137 
138 bool LayerSupportBase::IsStandInSupported(const std::vector<const TensorInfo*>&, // inputs
139  const std::vector<const TensorInfo*>&, // outputs
140  const StandInDescriptor&, // descriptor
141  Optional<std::string&> reasonIfUnsupported) const
142 {
143  if (reasonIfUnsupported)
144  {
145  std::stringstream message;
146  message << "StandIn layer is not executable via backends";
147 
148  reasonIfUnsupported.value() = message.str();
149  }
150 
151  return false;
152 }
153 
154 } // namespace armnn
armnn::Optional
Definition: Optional.hpp:270
armnn::TensorInfo
Definition: Tensor.hpp:152
Deprecated.hpp
armnn::LayerSupportBase::IsStandInSupported
bool IsStandInSupported(const std::vector< const TensorInfo * > &inputs, const std::vector< const TensorInfo * > &outputs, const StandInDescriptor &descriptor, Optional< std::string & > reasonIfUnsupported=EmptyOptional()) const
Definition: LayerSupportBase.cpp:138
IgnoreUnused.hpp
armnn::QuantizedLstmInputParamsInfo
Definition: QuantizedLstmParams.hpp:119
LayerSupportBase.hpp
armnn::LayerSupportBase::IsQuantizedLstmSupported
bool IsQuantizedLstmSupported(const TensorInfo &input, const TensorInfo &previousCellStateIn, const TensorInfo &previousOutputIn, const TensorInfo &cellStateOut, const TensorInfo &output, const QuantizedLstmInputParamsInfo &paramsInfo, Optional< std::string & > reasonIfUnsupported=EmptyOptional()) const
Definition: LayerSupportBase.cpp:120
armnn::LayerSupportBase::IsDetectionPostProcessSupported
bool IsDetectionPostProcessSupported(const TensorInfo &boxEncodings, const TensorInfo &scores, const TensorInfo &anchors, const TensorInfo &detectionBoxes, const TensorInfo &detectionClasses, const TensorInfo &detectionScores, const TensorInfo &numDetections, const DetectionPostProcessDescriptor &descriptor, Optional< std::string & > reasonIfUnsupported=EmptyOptional()) const
Definition: LayerSupportBase.cpp:85
PolymorphicDowncast.hpp
armnn::LayerType::StandIn
@ StandIn
armnn::LayerSupportBase::IsMergeSupported
bool IsMergeSupported(const TensorInfo &input0, const TensorInfo &input1, const TensorInfo &output, Optional< std::string & > reasonIfUnsupported=EmptyOptional()) const
Definition: LayerSupportBase.cpp:112
armnn::InvalidArgumentException
Definition: Exceptions.hpp:80
armnn::LayerSupportBase::IsMemImportSupported
bool IsMemImportSupported(const TensorInfo &input, const TensorInfo &output, Optional< std::string & > reasonIfUnsupported=EmptyOptional()) const
Definition: LayerSupportBase.cpp:105
armnn::BaseDescriptor
Base class for all descriptors.
Definition: Descriptors.hpp:22
armnn::LayerType::MemImport
@ MemImport
armnn::StandInDescriptor
A StandInDescriptor for the StandIn layer.
Definition: Descriptors.hpp:1281
armnn::LayerSupportBase::IsShapeSupported
bool IsShapeSupported(const TensorInfo &input, const TensorInfo &output, Optional< std::string & > reasonIfUnsupported=EmptyOptional()) const
Definition: LayerSupportBase.cpp:131
armnn::LayerType::MemCopy
@ MemCopy
Exceptions.hpp
armnn
Copyright (c) 2021 ARM Limited and Contributors.
Definition: 01_00_quick_start.dox:6
Types.hpp
armnn::DetectionPostProcessDescriptor
Definition: Descriptors.hpp:713
armnn::LayerSupportBase::IsMemCopySupported
bool IsMemCopySupported(const TensorInfo &input, const TensorInfo &output, Optional< std::string & > reasonIfUnsupported=EmptyOptional()) const
Definition: LayerSupportBase.cpp:98
armnn::LayerType
LayerType
When adding a new layer, adapt also the LastLayer enum value in the enum class LayerType below.
Definition: Types.hpp:491
armnn::LayerSupportBase::IsLayerSupported
bool IsLayerSupported(const LayerType &type, const std::vector< TensorInfo > &infos, const BaseDescriptor &descriptor, const Optional< LstmInputParamsInfo > &lstmParamsInfo=EmptyOptional(), const Optional< QuantizedLstmInputParamsInfo > &quantizedLstmParamsInfo=EmptyOptional(), Optional< std::string & > reasonIfUnsupported=EmptyOptional()) const override
Default implementation of the ILayerSupport interface, Backends should implement this as a switch sta...
Definition: LayerSupportBase.cpp:40
armnn::OptionalReferenceSwitch< std::is_reference< T >::value, T >::value
const T & value() const
Definition: Optional.hpp:146