ArmNN
 20.02
LayerSupportBase.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #include <armnn/Deprecated.hpp>
7 #include <armnn/Descriptors.hpp>
8 #include <armnn/Exceptions.hpp>
9 #include <armnn/Types.hpp>
10 
12 
14 
15 namespace
16 {
17 
18 bool DefaultLayerSupport(const char* func,
19  const char* file,
20  unsigned int line,
21  armnn::Optional<std::string&> reasonIfUnsupported)
22 {
23  // NOTE: We only need to return the reason if the optional parameter is not empty
24  if (reasonIfUnsupported)
25  {
26  std::stringstream message;
27  message << func << " is not implemented [" << file << ":" << line << "]";
28 
29  reasonIfUnsupported.value() = message.str();
30  }
31 
32  return false;
33 }
34 
35 } // anonymous namespace
36 
37 namespace armnn
38 {
39 
41  const TensorInfo& /*output*/,
42  Optional<std::string &> reasonIfUnsupported) const
43 {
44  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
45 }
46 
48  const TensorInfo& /*output*/,
49  const ActivationDescriptor& /*descriptor*/,
50  Optional<std::string&> reasonIfUnsupported) const
51 {
52  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
53 }
54 
56  const TensorInfo& /*input1*/,
57  const TensorInfo& /*output*/,
58  Optional<std::string&> reasonIfUnsupported) const
59 {
60  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
61 }
62 
64  const armnn::TensorInfo &/*output*/,
65  const armnn::ArgMinMaxDescriptor& /*descriptor*/,
66  armnn::Optional<std::string &> reasonIfUnsupported) const
67 {
68  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
69 }
70 
72  const TensorInfo& /*output*/,
73  const TensorInfo& /*mean*/,
74  const TensorInfo& /*var*/,
75  const TensorInfo& /*beta*/,
76  const TensorInfo& /*gamma*/,
77  const BatchNormalizationDescriptor& /*descriptor*/,
78  Optional<std::string&> reasonIfUnsupported) const
79 {
80  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
81 }
82 
84  const TensorInfo& /*output*/,
85  const BatchToSpaceNdDescriptor& /*descriptor*/,
86  Optional<std::string&> reasonIfUnsupported) const
87 {
88  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
89 }
90 
92  const TensorInfo& /*input1*/,
93  const TensorInfo& /*output*/,
94  const ComparisonDescriptor& /*descriptor*/,
95  Optional<std::string&> reasonIfUnsupported) const
96 {
97  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
98 }
99 
100 bool LayerSupportBase::IsConcatSupported(const std::vector<const TensorInfo*> /*inputs*/,
101  const TensorInfo& /*output*/,
102  const OriginsDescriptor& /*descriptor*/,
103  Optional<std::string&> reasonIfUnsupported) const
104 {
105  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
106 }
107 
109  Optional<std::string&> reasonIfUnsupported) const
110 {
111  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
112 }
113 
115  const TensorInfo& /*output*/,
116  Optional<std::string&> reasonIfUnsupported) const
117 {
118  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
119 }
120 
122  const TensorInfo& /*output*/,
123  Optional<std::string&> reasonIfUnsupported) const
124 {
125  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
126 }
127 
129  const TensorInfo& /*output*/,
130  const Convolution2dDescriptor& /*descriptor*/,
131  const TensorInfo& /*weights*/,
132  const Optional<TensorInfo>& /*biases*/,
133  Optional<std::string&> reasonIfUnsupported) const
134 {
135  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
136 }
137 
139  const TensorInfo& /*output*/,
140  Optional<std::string&> reasonIfUnsupported) const
141 {
142  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
143 }
144 
146  const TensorInfo& /*output*/,
147  const DepthToSpaceDescriptor& /*descriptor*/,
148  Optional<std::string&> reasonIfUnsupported) const
149 {
150  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
151 }
152 
154  const TensorInfo& /*output*/,
155  const DepthwiseConvolution2dDescriptor& /*descriptor*/,
156  const TensorInfo& /*weights*/,
157  const Optional<TensorInfo>& /*biases*/,
158  Optional<std::string&> reasonIfUnsupported) const
159 {
160  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
161 }
162 
164  const TensorInfo& /*output*/,
165  Optional<std::string&> reasonIfUnsupported) const
166 {
167  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
168 }
169 
171  const TensorInfo& /*scores*/,
172  const TensorInfo& /*anchors*/,
173  const TensorInfo& /*detectionBoxes*/,
174  const TensorInfo& /*detectionClasses*/,
175  const TensorInfo& /*detectionScores*/,
176  const TensorInfo& /*numDetections*/,
177  const DetectionPostProcessDescriptor& /*descriptor*/,
178  Optional<std::string&> reasonIfUnsupported) const
179 {
180  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
181 }
182 
184  const TensorInfo& /*output*/,
185  const DepthwiseConvolution2dDescriptor& /*descriptor*/,
186  const TensorInfo& /*weights*/,
187  const Optional<TensorInfo>& /*biases*/,
188  Optional<std::string&> reasonIfUnsupported) const
189 {
190  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
191 }
192 
194  const TensorInfo& /*input1*/,
195  const TensorInfo& /*output*/,
196  Optional<std::string&> reasonIfUnsupported) const
197 {
198  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
199 }
200 
202  const TensorInfo& output,
203  const ElementwiseUnaryDescriptor& descriptor,
204  Optional<std::string&> reasonIfUnsupported) const
205 {
206  if (descriptor.m_Operation == UnaryOperation::Abs)
207  {
209  return IsAbsSupported(input, output, reasonIfUnsupported);
211  }
212  else if (descriptor.m_Operation == UnaryOperation::Rsqrt)
213  {
215  return IsRsqrtSupported(input, output, reasonIfUnsupported);
217  }
218  return false;
219 }
220 
222  const armnn::TensorInfo& /*input1*/,
223  const armnn::TensorInfo& /*output*/,
224  armnn::Optional<std::string &> reasonIfUnsupported) const
225 {
226  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
227 }
228 
230  const FakeQuantizationDescriptor& /*descriptor*/,
231  Optional<std::string&> reasonIfUnsupported) const
232 {
233  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
234 }
235 
237  const TensorInfo& /*output*/,
238  Optional<std::string&> reasonIfUnsupported) const
239 {
240  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
241 }
242 
244  const TensorInfo& /*output*/,
245  const TensorInfo& /*weights*/,
246  const TensorInfo& /*biases*/,
247  const FullyConnectedDescriptor& /*descriptor*/,
248  Optional<std::string&> reasonIfUnsupported) const
249 {
250  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
251 }
252 
254  const armnn::TensorInfo& /*input1*/,
255  const armnn::TensorInfo& /*output*/,
256  armnn::Optional<std::string&> reasonIfUnsupported) const
257 {
258  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
259 }
260 
262  const TensorInfo& /*input1*/,
263  const TensorInfo& /*output*/,
264  Optional<std::string&> reasonIfUnsupported) const
265 {
266  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
267 }
268 
270  Optional<std::string&> reasonIfUnsupported) const
271 {
272  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
273 }
274 
276  const TensorInfo& /*output*/,
277  const InstanceNormalizationDescriptor& /*descriptor*/,
278  Optional<std::string&> reasonIfUnsupported) const
279 {
280  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
281 }
282 
284  const TensorInfo& /*output*/,
285  const L2NormalizationDescriptor& /*descriptor*/,
286  Optional<std::string&> reasonIfUnsupported) const
287 {
288  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
289 }
290 
292  const TensorInfo& /*output*/,
293  const LogSoftmaxDescriptor& /*descriptor*/,
294  Optional<std::string&> reasonIfUnsupported) const
295 {
296  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
297 }
298 
300  const TensorInfo& /*outputStateIn*/,
301  const TensorInfo& /*cellStateIn*/,
302  const TensorInfo& /*scratchBuffer*/,
303  const TensorInfo& /*outputStateOut*/,
304  const TensorInfo& /*cellStateOut*/,
305  const TensorInfo& /*output*/,
306  const LstmDescriptor& /*descriptor*/,
307  const LstmInputParamsInfo& /*paramsInfo*/,
308  Optional<std::string&> reasonIfUnsupported) const
309 {
310  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
311 }
312 
314  const TensorInfo& /*input1*/,
315  const TensorInfo& /*output*/,
316  Optional<std::string&> reasonIfUnsupported) const
317 {
318  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
319 }
320 
322  const TensorInfo& /*output*/,
323  const MeanDescriptor& /*descriptor*/,
324  Optional<std::string&> reasonIfUnsupported) const
325 {
326  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
327 }
328 
330  const armnn::TensorInfo& /*output*/,
331  armnn::Optional<std::string &> /*reasonIfUnsupported*/) const
332 {
333  return true;
334 }
335 
337  const armnn::TensorInfo& /*output*/,
338  armnn::Optional<std::string &> /*reasonIfUnsupported*/) const
339 {
340  return true;
341 }
342 
344  const TensorInfo& /*input1*/,
345  const TensorInfo& /*output*/,
346  Optional<std::string&> reasonIfUnsupported) const
347 {
348  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
349 }
350 
351 bool LayerSupportBase::IsMergerSupported(const std::vector<const TensorInfo*> inputs,
352  const TensorInfo& output,
353  const OriginsDescriptor& descriptor,
354  Optional<std::string&> reasonIfUnsupported) const
355 {
356  return IsConcatSupported(inputs, output, descriptor, reasonIfUnsupported);
357 }
358 
360  const TensorInfo& /*input1*/,
361  const TensorInfo& /*output*/,
362  Optional<std::string&> reasonIfUnsupported) const
363 {
364  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
365 }
366 
368  const TensorInfo& /*input1*/,
369  const TensorInfo& /*output*/,
370  Optional<std::string&> reasonIfUnsupported) const
371 {
372  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
373 }
374 
376  const TensorInfo& /*output*/,
377  const NormalizationDescriptor& /*descriptor*/,
378  Optional<std::string&> reasonIfUnsupported) const
379 {
380  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
381 }
382 
384  Optional<std::string&> reasonIfUnsupported) const
385 {
386  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
387 }
388 
390  const TensorInfo& /*output*/,
391  const PadDescriptor& /*descriptor*/,
392  Optional<std::string&> reasonIfUnsupported) const
393 {
394  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
395 }
396 
398  const TensorInfo& /*output*/,
399  const PermuteDescriptor& /*descriptor*/,
400  Optional<std::string&> reasonIfUnsupported) const
401 {
402  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
403 }
404 
406  const TensorInfo& /*output*/,
407  const Pooling2dDescriptor& /*descriptor*/,
408  Optional<std::string&> reasonIfUnsupported) const
409 {
410  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
411 }
412 
414  const PreCompiledDescriptor& /*descriptor*/,
415  Optional<std::string&> reasonIfUnsupported) const
416 {
417  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
418 }
419 
421  const TensorInfo& /*alpha*/,
422  const TensorInfo& /*output*/,
423  Optional<std::string &> reasonIfUnsupported) const
424 {
425  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
426 }
427 
429  const armnn::TensorInfo& /*output*/,
430  armnn::Optional<std::string&> reasonIfUnsupported) const
431 {
432  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
433 }
434 
436  const TensorInfo& /*previousCellStateIn*/,
437  const TensorInfo& /*previousOutputIn*/,
438  const TensorInfo& /*cellStateOut*/,
439  const TensorInfo& /*output*/,
440  const QuantizedLstmInputParamsInfo& /*paramsInfo*/,
441  Optional<std::string&> reasonIfUnsupported) const
442 {
443  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
444 }
445 
447  const TensorInfo& /*output*/,
448  const ReshapeDescriptor& /*descriptor*/,
449  Optional<std::string&> reasonIfUnsupported) const
450 {
451  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
452 }
453 
455  const TensorInfo& /*output*/,
456  Optional<std::string&> reasonIfUnsupported) const
457 {
458  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
459 }
460 
462  const TensorInfo& /*output*/,
463  const ResizeDescriptor& /*descriptor*/,
464  Optional<std::string&> reasonIfUnsupported) const
465 {
466  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
467 }
468 
470  const TensorInfo &/*output*/,
471  Optional<std::string &> reasonIfUnsupported) const
472 {
473  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
474 }
475 
477  const TensorInfo& /*output*/,
478  const SliceDescriptor& /*descriptor*/,
479  Optional<std::string&> reasonIfUnsupported) const
480 {
481  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
482 }
483 
485  const TensorInfo& /*output*/,
486  const SoftmaxDescriptor& /*descriptor*/,
487  Optional<std::string&> reasonIfUnsupported) const
488 {
489  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
490 }
491 /**/
493  const TensorInfo& /*output*/,
494  const SpaceToBatchNdDescriptor& /*descriptor*/,
495  Optional<std::string&> reasonIfUnsupported) const
496 {
497  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
498 }
499 
501  const TensorInfo& /*output*/,
502  const SpaceToDepthDescriptor& /*descriptor*/,
503  Optional<std::string&> reasonIfUnsupported) const
504 {
505  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
506 }
507 
509  const ViewsDescriptor& /*descriptor*/,
510  Optional<std::string&> reasonIfUnsupported) const
511 {
512  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
513 }
514 
516  const std::vector<std::reference_wrapper<TensorInfo>>& /*outputs*/,
517  const ViewsDescriptor& /*descriptor*/,
518  Optional<std::string&> reasonIfUnsupported) const
519 {
520  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
521 }
522 
523 bool LayerSupportBase::IsStackSupported(const std::vector<const TensorInfo*>& /*inputs*/,
524  const TensorInfo& /*output*/,
525  const StackDescriptor& /*descriptor*/,
526  Optional<std::string&> reasonIfUnsupported) const
527 {
528  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
529 }
530 
531 bool LayerSupportBase::IsStandInSupported(const std::vector<const TensorInfo*>& /*inputs*/,
532  const std::vector<const TensorInfo*>& /*outputs*/,
533  const StandInDescriptor& /*descriptor*/,
534  Optional<std::string&> reasonIfUnsupported) const
535 {
536  if (reasonIfUnsupported)
537  {
538  std::stringstream message;
539  message << "StandIn layer is not executable via backends";
540 
541  reasonIfUnsupported.value() = message.str();
542  }
543 
544  return false;
545 }
546 
548  const TensorInfo& /*output*/,
549  const StridedSliceDescriptor& /*descriptor*/,
550  Optional<std::string&> reasonIfUnsupported) const
551 {
552  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
553 }
554 
556  const TensorInfo& /*input1*/,
557  const TensorInfo& /*output*/,
558  Optional<std::string&> reasonIfUnsupported) const
559 {
560  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
561 }
562 
564  const TensorInfo& /*input1*/,
565  const TensorInfo& /*output0*/,
566  const TensorInfo& /*output1*/,
567  Optional<std::string&> reasonIfUnsupported) const
568 {
569  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
570 }
571 
573  const TensorInfo& /*output*/,
574  const TransposeConvolution2dDescriptor& /*descriptor*/,
575  const TensorInfo& /*weights*/,
576  const Optional<TensorInfo>& /*biases*/,
577  Optional<std::string&> reasonIfUnsupported) const
578 {
579  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
580 }
581 
583  const TensorInfo& /*output*/,
584  const TransposeDescriptor& /*descriptor*/,
585  Optional<std::string&> reasonIfUnsupported) const
586 {
587  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
588 }
589 
590 } // namespace armnn
bool IsEqualSupported(const TensorInfo &input0, const TensorInfo &input1, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsInputSupported(const TensorInfo &input, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
UnaryOperation m_Operation
Specifies the elementwiseUnary operation to execute.
Definition: Descriptors.hpp:98
A ViewsDescriptor for the SplitterLayer.
A TransposeConvolution2dDescriptor for the TransposeConvolution2dLayer.
bool IsPreluSupported(const TensorInfo &input, const TensorInfo &alpha, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported) const override
A ReshapeDescriptor for the ReshapeLayer.
bool IsDetectionPostProcessSupported(const TensorInfo &boxEncodings, const TensorInfo &scores, const TensorInfo &anchors, const TensorInfo &detectionBoxes, const TensorInfo &detectionClasses, const TensorInfo &detectionScores, const TensorInfo &numDetections, const DetectionPostProcessDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsMemImportSupported(const TensorInfo &input, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
#define ARMNN_NO_DEPRECATE_WARN_BEGIN
Definition: Deprecated.hpp:33
bool IsLogSoftmaxSupported(const TensorInfo &input, const TensorInfo &output, const LogSoftmaxDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A ComparisonDescriptor for the ComparisonLayer.
Definition: Descriptors.hpp:62
bool IsTransposeSupported(const TensorInfo &input, const TensorInfo &output, const TransposeDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsQuantizeSupported(const TensorInfo &input, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsMultiplicationSupported(const TensorInfo &input0, const TensorInfo &input1, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsQuantizedLstmSupported(const TensorInfo &input, const TensorInfo &previousCellStateIn, const TensorInfo &previousOutputIn, const TensorInfo &cellStateOut, const TensorInfo &output, const QuantizedLstmInputParamsInfo &paramsInfo, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A Convolution2dDescriptor for the Convolution2dLayer.
bool IsAdditionSupported(const TensorInfo &input0, const TensorInfo &input1, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsDebugSupported(const TensorInfo &input, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsRsqrtSupported(const TensorInfo &input, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsActivationSupported(const TensorInfo &input, const TensorInfo &output, const ActivationDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsMeanSupported(const TensorInfo &input, const TensorInfo &output, const MeanDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsArgMinMaxSupported(const TensorInfo &input, const TensorInfo &output, const ArgMinMaxDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsGatherSupported(const TensorInfo &input0, const TensorInfo &input1, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsSpaceToBatchNdSupported(const TensorInfo &input, const TensorInfo &output, const SpaceToBatchNdDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsSoftmaxSupported(const TensorInfo &input, const TensorInfo &output, const SoftmaxDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsComparisonSupported(const TensorInfo &input0, const TensorInfo &input1, const TensorInfo &output, const ComparisonDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
Copyright (c) 2020 ARM Limited.
bool IsStandInSupported(const std::vector< const TensorInfo *> &inputs, const std::vector< const TensorInfo *> &outputs, const StandInDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsDepthwiseConvolutionSupported(const TensorInfo &input, const TensorInfo &output, const DepthwiseConvolution2dDescriptor &descriptor, const TensorInfo &weights, const Optional< TensorInfo > &biases, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsL2NormalizationSupported(const TensorInfo &input, const TensorInfo &output, const L2NormalizationDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A SpaceToDepthDescriptor for the SpaceToDepthLayer.
bool IsSliceSupported(const TensorInfo &input, const TensorInfo &output, const SliceDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsFakeQuantizationSupported(const TensorInfo &input, const FakeQuantizationDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A BatchToSpaceNdDescriptor for the BatchToSpaceNdLayer.
bool IsPreCompiledSupported(const TensorInfo &input, const PreCompiledDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsStridedSliceSupported(const TensorInfo &input, const TensorInfo &output, const StridedSliceDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A ResizeDescriptor for the ResizeLayer.
A StackDescriptor for the StackLayer.
A PadDescriptor for the PadLayer.
bool IsSwitchSupported(const TensorInfo &input0, const TensorInfo &input1, const TensorInfo &output0, const TensorInfo &output1, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsMergerSupported(const std::vector< const TensorInfo *> inputs, const TensorInfo &output, const OriginsDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsAbsSupported(const TensorInfo &input, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsBatchToSpaceNdSupported(const TensorInfo &input, const TensorInfo &output, const BatchToSpaceNdDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsDivisionSupported(const TensorInfo &input0, const TensorInfo &input1, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
An LstmDescriptor for the LstmLayer.
#define ARMNN_NO_DEPRECATE_WARN_END
Definition: Deprecated.hpp:34
A L2NormalizationDescriptor for the L2NormalizationLayer.
An ArgMinMaxDescriptor for ArgMinMaxLayer.
Definition: Descriptors.hpp:43
An OriginsDescriptor for the ConcatLayer.
bool IsPermuteSupported(const TensorInfo &input, const TensorInfo &output, const PermuteDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A FullyConnectedDescriptor for the FullyConnectedLayer.
A FakeQuantizationDescriptor for the FakeQuantizationLayer.
bool IsConvertFp32ToFp16Supported(const TensorInfo &input, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsElementwiseUnarySupported(const TensorInfo &input, const TensorInfo &output, const ElementwiseUnaryDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsConvertFp16ToFp32Supported(const TensorInfo &input, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A StandInDescriptor for the StandIn layer.
bool IsReshapeSupported(const TensorInfo &input, const TensorInfo &output, const ReshapeDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsMergeSupported(const TensorInfo &input0, const TensorInfo &input1, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsStackSupported(const std::vector< const TensorInfo *> &inputs, const TensorInfo &output, const StackDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsSpaceToDepthSupported(const TensorInfo &input, const TensorInfo &output, const SpaceToDepthDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
bool IsMemCopySupported(const TensorInfo &input, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsInstanceNormalizationSupported(const TensorInfo &input, const TensorInfo &output, const InstanceNormalizationDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A SliceDescriptor for the SliceLayer.
bool IsTransposeConvolution2dSupported(const TensorInfo &input, const TensorInfo &output, const TransposeConvolution2dDescriptor &descriptor, const TensorInfo &weights, const Optional< TensorInfo > &biases, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A SpaceToBatchNdDescriptor for the SpaceToBatchNdLayer.
bool IsPadSupported(const TensorInfo &input, const TensorInfo &output, const PadDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsMinimumSupported(const TensorInfo &input0, const TensorInfo &input1, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsSplitterSupported(const TensorInfo &input, const ViewsDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A ElementwiseUnaryDescriptor for the ElementwiseUnaryLayer.
Definition: Descriptors.hpp:82
bool IsConvolution2dSupported(const TensorInfo &input, const TensorInfo &output, const Convolution2dDescriptor &descriptor, const TensorInfo &weights, const Optional< TensorInfo > &biases, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsSubtractionSupported(const TensorInfo &input0, const TensorInfo &input1, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsLstmSupported(const TensorInfo &input, const TensorInfo &outputStateIn, const TensorInfo &cellStateIn, const TensorInfo &scratchBuffer, const TensorInfo &outputStateOut, const TensorInfo &cellStateOut, const TensorInfo &output, const LstmDescriptor &descriptor, const LstmInputParamsInfo &paramsInfo, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A MeanDescriptor for the MeanLayer.
bool IsFloorSupported(const TensorInfo &input, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsConstantSupported(const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsNormalizationSupported(const TensorInfo &input, const TensorInfo &output, const NormalizationDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A TransposeDescriptor for the TransposeLayer.
A StridedSliceDescriptor for the StridedSliceLayer.
bool IsFullyConnectedSupported(const TensorInfo &input, const TensorInfo &output, const TensorInfo &weights, const TensorInfo &biases, const FullyConnectedDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsOutputSupported(const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsResizeSupported(const TensorInfo &input, const TensorInfo &output, const ResizeDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsPooling2dSupported(const TensorInfo &input, const TensorInfo &output, const Pooling2dDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A PreCompiledDescriptor for the PreCompiledLayer.
A Pooling2dDescriptor for the Pooling2dLayer.
A NormalizationDescriptor for the NormalizationLayer.
bool IsMaximumSupported(const TensorInfo &input0, const TensorInfo &input1, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
An InstanceNormalizationDescriptor for InstanceNormalizationLayer.
bool IsDepthToSpaceSupported(const TensorInfo &input, const TensorInfo &output, const DepthToSpaceDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A SoftmaxDescriptor for the SoftmaxLayer.
bool IsConcatSupported(const std::vector< const TensorInfo *> inputs, const TensorInfo &output, const OriginsDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsBatchNormalizationSupported(const TensorInfo &input, const TensorInfo &output, const TensorInfo &mean, const TensorInfo &var, const TensorInfo &beta, const TensorInfo &gamma, const BatchNormalizationDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsResizeBilinearSupported(const TensorInfo &input, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsGreaterSupported(const TensorInfo &input0, const TensorInfo &input1, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A DepthwiseConvolution2dDescriptor for the DepthwiseConvolution2dLayer.
A BatchNormalizationDescriptor for the BatchNormalizationLayer.
bool IsDequantizeSupported(const TensorInfo &input, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A PermuteDescriptor for the PermuteLayer.
bool IsDilatedDepthwiseConvolutionSupported(const TensorInfo &input, const TensorInfo &output, const DepthwiseConvolution2dDescriptor &descriptor, const TensorInfo &weights, const Optional< TensorInfo > &biases, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override