ArmNN
 20.05
LayerSupportBase.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #include <armnn/Deprecated.hpp>
7 #include <armnn/Descriptors.hpp>
8 #include <armnn/Exceptions.hpp>
9 #include <armnn/Types.hpp>
10 
12 
14 
15 namespace
16 {
17 
18 bool DefaultLayerSupport(const char* func,
19  const char* file,
20  unsigned int line,
21  armnn::Optional<std::string&> reasonIfUnsupported)
22 {
23  // NOTE: We only need to return the reason if the optional parameter is not empty
24  if (reasonIfUnsupported)
25  {
26  std::stringstream message;
27  message << func << " is not implemented [" << file << ":" << line << "]";
28 
29  reasonIfUnsupported.value() = message.str();
30  }
31 
32  return false;
33 }
34 
35 } // anonymous namespace
36 
37 namespace armnn
38 {
39 
41  const TensorInfo& /*output*/,
42  Optional<std::string &> reasonIfUnsupported) const
43 {
44  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
45 }
46 
48  const TensorInfo& /*output*/,
49  const ActivationDescriptor& /*descriptor*/,
50  Optional<std::string&> reasonIfUnsupported) const
51 {
52  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
53 }
54 
56  const TensorInfo& /*input1*/,
57  const TensorInfo& /*output*/,
58  Optional<std::string&> reasonIfUnsupported) const
59 {
60  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
61 }
62 
64  const armnn::TensorInfo &/*output*/,
65  const armnn::ArgMinMaxDescriptor& /*descriptor*/,
66  armnn::Optional<std::string &> reasonIfUnsupported) const
67 {
68  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
69 }
70 
72  const TensorInfo& /*output*/,
73  const TensorInfo& /*mean*/,
74  const TensorInfo& /*var*/,
75  const TensorInfo& /*beta*/,
76  const TensorInfo& /*gamma*/,
77  const BatchNormalizationDescriptor& /*descriptor*/,
78  Optional<std::string&> reasonIfUnsupported) const
79 {
80  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
81 }
82 
84  const TensorInfo& /*output*/,
85  const BatchToSpaceNdDescriptor& /*descriptor*/,
86  Optional<std::string&> reasonIfUnsupported) const
87 {
88  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
89 }
90 
92  const TensorInfo& /*input1*/,
93  const TensorInfo& /*output*/,
94  const ComparisonDescriptor& /*descriptor*/,
95  Optional<std::string&> reasonIfUnsupported) const
96 {
97  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
98 }
99 
100 bool LayerSupportBase::IsConcatSupported(const std::vector<const TensorInfo*> /*inputs*/,
101  const TensorInfo& /*output*/,
102  const OriginsDescriptor& /*descriptor*/,
103  Optional<std::string&> reasonIfUnsupported) const
104 {
105  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
106 }
107 
109  Optional<std::string&> reasonIfUnsupported) const
110 {
111  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
112 }
113 
115  const TensorInfo& /*output*/,
116  Optional<std::string&> reasonIfUnsupported) const
117 {
118  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
119 }
120 
122  const TensorInfo& /*output*/,
123  Optional<std::string&> reasonIfUnsupported) const
124 {
125  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
126 }
127 
129  const TensorInfo& /*output*/,
130  Optional<std::string&> reasonIfUnsupported) const
131 {
132  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
133 }
134 
135 
137  const TensorInfo& /*output*/,
138  Optional<std::string&> reasonIfUnsupported) const
139 {
140  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
141 }
142 
144  const TensorInfo& /*output*/,
145  const Convolution2dDescriptor& /*descriptor*/,
146  const TensorInfo& /*weights*/,
147  const Optional<TensorInfo>& /*biases*/,
148  Optional<std::string&> reasonIfUnsupported) const
149 {
150  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
151 }
152 
154  const TensorInfo& /*output*/,
155  Optional<std::string&> reasonIfUnsupported) const
156 {
157  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
158 }
159 
161  const TensorInfo& /*output*/,
162  const DepthToSpaceDescriptor& /*descriptor*/,
163  Optional<std::string&> reasonIfUnsupported) const
164 {
165  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
166 }
167 
169  const TensorInfo& /*output*/,
170  const DepthwiseConvolution2dDescriptor& /*descriptor*/,
171  const TensorInfo& /*weights*/,
172  const Optional<TensorInfo>& /*biases*/,
173  Optional<std::string&> reasonIfUnsupported) const
174 {
175  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
176 }
177 
179  const TensorInfo& /*output*/,
180  Optional<std::string&> reasonIfUnsupported) const
181 {
182  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
183 }
184 
186  const TensorInfo& /*scores*/,
187  const TensorInfo& /*anchors*/,
188  const TensorInfo& /*detectionBoxes*/,
189  const TensorInfo& /*detectionClasses*/,
190  const TensorInfo& /*detectionScores*/,
191  const TensorInfo& /*numDetections*/,
192  const DetectionPostProcessDescriptor& /*descriptor*/,
193  Optional<std::string&> reasonIfUnsupported) const
194 {
195  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
196 }
197 
199  const TensorInfo& /*output*/,
200  const DepthwiseConvolution2dDescriptor& /*descriptor*/,
201  const TensorInfo& /*weights*/,
202  const Optional<TensorInfo>& /*biases*/,
203  Optional<std::string&> reasonIfUnsupported) const
204 {
205  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
206 }
207 
209  const TensorInfo& /*input1*/,
210  const TensorInfo& /*output*/,
211  Optional<std::string&> reasonIfUnsupported) const
212 {
213  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
214 }
215 
217  const TensorInfo& output,
218  const ElementwiseUnaryDescriptor& descriptor,
219  Optional<std::string&> reasonIfUnsupported) const
220 {
221  if (descriptor.m_Operation == UnaryOperation::Abs)
222  {
224  return IsAbsSupported(input, output, reasonIfUnsupported);
226  }
227  else if (descriptor.m_Operation == UnaryOperation::Rsqrt)
228  {
230  return IsRsqrtSupported(input, output, reasonIfUnsupported);
232  }
233  return false;
234 }
235 
237  const armnn::TensorInfo& /*input1*/,
238  const armnn::TensorInfo& /*output*/,
239  armnn::Optional<std::string &> reasonIfUnsupported) const
240 {
241  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
242 }
243 
245  const FakeQuantizationDescriptor& /*descriptor*/,
246  Optional<std::string&> reasonIfUnsupported) const
247 {
248  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
249 }
250 
252  const TensorInfo& /*output*/,
253  Optional<std::string&> reasonIfUnsupported) const
254 {
255  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
256 }
257 
259  const TensorInfo& /*output*/,
260  const TensorInfo& /*weights*/,
261  const TensorInfo& /*biases*/,
262  const FullyConnectedDescriptor& /*descriptor*/,
263  Optional<std::string&> reasonIfUnsupported) const
264 {
265  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
266 }
267 
269  const armnn::TensorInfo& /*input1*/,
270  const armnn::TensorInfo& /*output*/,
271  armnn::Optional<std::string&> reasonIfUnsupported) const
272 {
273  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
274 }
275 
277  const TensorInfo& /*input1*/,
278  const TensorInfo& /*output*/,
279  Optional<std::string&> reasonIfUnsupported) const
280 {
281  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
282 }
283 
285  Optional<std::string&> reasonIfUnsupported) const
286 {
287  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
288 }
289 
291  const TensorInfo& /*output*/,
292  const InstanceNormalizationDescriptor& /*descriptor*/,
293  Optional<std::string&> reasonIfUnsupported) const
294 {
295  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
296 }
297 
299  const TensorInfo& /*output*/,
300  const L2NormalizationDescriptor& /*descriptor*/,
301  Optional<std::string&> reasonIfUnsupported) const
302 {
303  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
304 }
305 
307  const TensorInfo& /*output*/,
308  const LogSoftmaxDescriptor& /*descriptor*/,
309  Optional<std::string&> reasonIfUnsupported) const
310 {
311  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
312 }
313 
315  const TensorInfo& /*outputStateIn*/,
316  const TensorInfo& /*cellStateIn*/,
317  const TensorInfo& /*scratchBuffer*/,
318  const TensorInfo& /*outputStateOut*/,
319  const TensorInfo& /*cellStateOut*/,
320  const TensorInfo& /*output*/,
321  const LstmDescriptor& /*descriptor*/,
322  const LstmInputParamsInfo& /*paramsInfo*/,
323  Optional<std::string&> reasonIfUnsupported) const
324 {
325  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
326 }
327 
329  const TensorInfo& /*input1*/,
330  const TensorInfo& /*output*/,
331  Optional<std::string&> reasonIfUnsupported) const
332 {
333  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
334 }
335 
337  const TensorInfo& /*output*/,
338  const MeanDescriptor& /*descriptor*/,
339  Optional<std::string&> reasonIfUnsupported) const
340 {
341  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
342 }
343 
345  const armnn::TensorInfo& /*output*/,
346  armnn::Optional<std::string &> /*reasonIfUnsupported*/) const
347 {
348  return true;
349 }
350 
352  const armnn::TensorInfo& /*output*/,
353  armnn::Optional<std::string &> /*reasonIfUnsupported*/) const
354 {
355  return true;
356 }
357 
359  const TensorInfo& /*input1*/,
360  const TensorInfo& /*output*/,
361  Optional<std::string&> reasonIfUnsupported) const
362 {
363  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
364 }
365 
366 bool LayerSupportBase::IsMergerSupported(const std::vector<const TensorInfo*> inputs,
367  const TensorInfo& output,
368  const OriginsDescriptor& descriptor,
369  Optional<std::string&> reasonIfUnsupported) const
370 {
371  return IsConcatSupported(inputs, output, descriptor, reasonIfUnsupported);
372 }
373 
375  const TensorInfo& /*input1*/,
376  const TensorInfo& /*output*/,
377  Optional<std::string&> reasonIfUnsupported) const
378 {
379  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
380 }
381 
383  const TensorInfo& /*input1*/,
384  const TensorInfo& /*output*/,
385  Optional<std::string&> reasonIfUnsupported) const
386 {
387  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
388 }
389 
391  const TensorInfo& /*output*/,
392  const NormalizationDescriptor& /*descriptor*/,
393  Optional<std::string&> reasonIfUnsupported) const
394 {
395  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
396 }
397 
399  Optional<std::string&> reasonIfUnsupported) const
400 {
401  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
402 }
403 
405  const TensorInfo& /*output*/,
406  const PadDescriptor& /*descriptor*/,
407  Optional<std::string&> reasonIfUnsupported) const
408 {
409  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
410 }
411 
413  const TensorInfo& /*output*/,
414  const PermuteDescriptor& /*descriptor*/,
415  Optional<std::string&> reasonIfUnsupported) const
416 {
417  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
418 }
419 
421  const TensorInfo& /*output*/,
422  const Pooling2dDescriptor& /*descriptor*/,
423  Optional<std::string&> reasonIfUnsupported) const
424 {
425  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
426 }
427 
429  const PreCompiledDescriptor& /*descriptor*/,
430  Optional<std::string&> reasonIfUnsupported) const
431 {
432  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
433 }
434 
436  const TensorInfo& /*alpha*/,
437  const TensorInfo& /*output*/,
438  Optional<std::string &> reasonIfUnsupported) const
439 {
440  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
441 }
442 
444  const armnn::TensorInfo& /*output*/,
445  armnn::Optional<std::string&> reasonIfUnsupported) const
446 {
447  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
448 }
449 
451  const TensorInfo& /*previousOutputIn*/,
452  const TensorInfo& /*previousCellStateIn*/,
453  const TensorInfo& /*outputStateOut*/,
454  const TensorInfo& /*cellStateOut*/,
455  const TensorInfo& /*output*/,
456  const QLstmDescriptor& /*descriptor*/,
457  const LstmInputParamsInfo& /*paramsInfo*/,
458  Optional<std::string&> reasonIfUnsupported) const
459 {
460  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
461 }
462 
464  const TensorInfo& /*previousCellStateIn*/,
465  const TensorInfo& /*previousOutputIn*/,
466  const TensorInfo& /*cellStateOut*/,
467  const TensorInfo& /*output*/,
468  const QuantizedLstmInputParamsInfo& /*paramsInfo*/,
469  Optional<std::string&> reasonIfUnsupported) const
470 {
471  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
472 }
473 
475  const TensorInfo& /*output*/,
476  const ReshapeDescriptor& /*descriptor*/,
477  Optional<std::string&> reasonIfUnsupported) const
478 {
479  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
480 }
481 
483  const TensorInfo& /*output*/,
484  Optional<std::string&> reasonIfUnsupported) const
485 {
486  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
487 }
488 
490  const TensorInfo& /*output*/,
491  const ResizeDescriptor& /*descriptor*/,
492  Optional<std::string&> reasonIfUnsupported) const
493 {
494  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
495 }
496 
498  const TensorInfo &/*output*/,
499  Optional<std::string &> reasonIfUnsupported) const
500 {
501  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
502 }
503 
505  const TensorInfo& /*output*/,
506  const SliceDescriptor& /*descriptor*/,
507  Optional<std::string&> reasonIfUnsupported) const
508 {
509  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
510 }
511 
513  const TensorInfo& /*output*/,
514  const SoftmaxDescriptor& /*descriptor*/,
515  Optional<std::string&> reasonIfUnsupported) const
516 {
517  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
518 }
519 /**/
521  const TensorInfo& /*output*/,
522  const SpaceToBatchNdDescriptor& /*descriptor*/,
523  Optional<std::string&> reasonIfUnsupported) const
524 {
525  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
526 }
527 
529  const TensorInfo& /*output*/,
530  const SpaceToDepthDescriptor& /*descriptor*/,
531  Optional<std::string&> reasonIfUnsupported) const
532 {
533  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
534 }
535 
537  const ViewsDescriptor& /*descriptor*/,
538  Optional<std::string&> reasonIfUnsupported) const
539 {
540  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
541 }
542 
544  const std::vector<std::reference_wrapper<TensorInfo>>& /*outputs*/,
545  const ViewsDescriptor& /*descriptor*/,
546  Optional<std::string&> reasonIfUnsupported) const
547 {
548  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
549 }
550 
551 bool LayerSupportBase::IsStackSupported(const std::vector<const TensorInfo*>& /*inputs*/,
552  const TensorInfo& /*output*/,
553  const StackDescriptor& /*descriptor*/,
554  Optional<std::string&> reasonIfUnsupported) const
555 {
556  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
557 }
558 
559 bool LayerSupportBase::IsStandInSupported(const std::vector<const TensorInfo*>& /*inputs*/,
560  const std::vector<const TensorInfo*>& /*outputs*/,
561  const StandInDescriptor& /*descriptor*/,
562  Optional<std::string&> reasonIfUnsupported) const
563 {
564  if (reasonIfUnsupported)
565  {
566  std::stringstream message;
567  message << "StandIn layer is not executable via backends";
568 
569  reasonIfUnsupported.value() = message.str();
570  }
571 
572  return false;
573 }
574 
576  const TensorInfo& /*output*/,
577  const StridedSliceDescriptor& /*descriptor*/,
578  Optional<std::string&> reasonIfUnsupported) const
579 {
580  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
581 }
582 
584  const TensorInfo& /*input1*/,
585  const TensorInfo& /*output*/,
586  Optional<std::string&> reasonIfUnsupported) const
587 {
588  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
589 }
590 
592  const TensorInfo& /*input1*/,
593  const TensorInfo& /*output0*/,
594  const TensorInfo& /*output1*/,
595  Optional<std::string&> reasonIfUnsupported) const
596 {
597  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
598 }
599 
601  const TensorInfo& /*output*/,
602  const TransposeConvolution2dDescriptor& /*descriptor*/,
603  const TensorInfo& /*weights*/,
604  const Optional<TensorInfo>& /*biases*/,
605  Optional<std::string&> reasonIfUnsupported) const
606 {
607  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
608 }
609 
611  const TensorInfo& /*output*/,
612  const TransposeDescriptor& /*descriptor*/,
613  Optional<std::string&> reasonIfUnsupported) const
614 {
615  return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
616 }
617 
618 } // namespace armnn
bool IsEqualSupported(const TensorInfo &input0, const TensorInfo &input1, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsInputSupported(const TensorInfo &input, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
UnaryOperation m_Operation
Specifies the elementwiseUnary operation to execute.
A ViewsDescriptor for the SplitterLayer.
bool IsQLstmSupported(const TensorInfo &input, const TensorInfo &previousOutputIn, const TensorInfo &previousCellStateIn, const TensorInfo &outputStateOut, const TensorInfo &cellStateOut, const TensorInfo &output, const QLstmDescriptor &descriptor, const LstmInputParamsInfo &paramsInfo, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A TransposeConvolution2dDescriptor for the TransposeConvolution2dLayer.
bool IsPreluSupported(const TensorInfo &input, const TensorInfo &alpha, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported) const override
A ReshapeDescriptor for the ReshapeLayer.
bool IsDetectionPostProcessSupported(const TensorInfo &boxEncodings, const TensorInfo &scores, const TensorInfo &anchors, const TensorInfo &detectionBoxes, const TensorInfo &detectionClasses, const TensorInfo &detectionScores, const TensorInfo &numDetections, const DetectionPostProcessDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsMemImportSupported(const TensorInfo &input, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
#define ARMNN_NO_DEPRECATE_WARN_BEGIN
Definition: Deprecated.hpp:33
bool IsLogSoftmaxSupported(const TensorInfo &input, const TensorInfo &output, const LogSoftmaxDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A ComparisonDescriptor for the ComparisonLayer.
Definition: Descriptors.hpp:70
bool IsTransposeSupported(const TensorInfo &input, const TensorInfo &output, const TransposeDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsQuantizeSupported(const TensorInfo &input, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsMultiplicationSupported(const TensorInfo &input0, const TensorInfo &input1, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsQuantizedLstmSupported(const TensorInfo &input, const TensorInfo &previousCellStateIn, const TensorInfo &previousOutputIn, const TensorInfo &cellStateOut, const TensorInfo &output, const QuantizedLstmInputParamsInfo &paramsInfo, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A Convolution2dDescriptor for the Convolution2dLayer.
bool IsAdditionSupported(const TensorInfo &input0, const TensorInfo &input1, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsDebugSupported(const TensorInfo &input, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsRsqrtSupported(const TensorInfo &input, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsActivationSupported(const TensorInfo &input, const TensorInfo &output, const ActivationDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsMeanSupported(const TensorInfo &input, const TensorInfo &output, const MeanDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsArgMinMaxSupported(const TensorInfo &input, const TensorInfo &output, const ArgMinMaxDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsGatherSupported(const TensorInfo &input0, const TensorInfo &input1, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsSpaceToBatchNdSupported(const TensorInfo &input, const TensorInfo &output, const SpaceToBatchNdDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsSoftmaxSupported(const TensorInfo &input, const TensorInfo &output, const SoftmaxDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsComparisonSupported(const TensorInfo &input0, const TensorInfo &input1, const TensorInfo &output, const ComparisonDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
Copyright (c) 2020 ARM Limited.
bool IsStandInSupported(const std::vector< const TensorInfo *> &inputs, const std::vector< const TensorInfo *> &outputs, const StandInDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsDepthwiseConvolutionSupported(const TensorInfo &input, const TensorInfo &output, const DepthwiseConvolution2dDescriptor &descriptor, const TensorInfo &weights, const Optional< TensorInfo > &biases, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsL2NormalizationSupported(const TensorInfo &input, const TensorInfo &output, const L2NormalizationDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A SpaceToDepthDescriptor for the SpaceToDepthLayer.
bool IsSliceSupported(const TensorInfo &input, const TensorInfo &output, const SliceDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsFakeQuantizationSupported(const TensorInfo &input, const FakeQuantizationDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A BatchToSpaceNdDescriptor for the BatchToSpaceNdLayer.
bool IsPreCompiledSupported(const TensorInfo &input, const PreCompiledDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsStridedSliceSupported(const TensorInfo &input, const TensorInfo &output, const StridedSliceDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A ResizeDescriptor for the ResizeLayer.
A StackDescriptor for the StackLayer.
A PadDescriptor for the PadLayer.
bool IsSwitchSupported(const TensorInfo &input0, const TensorInfo &input1, const TensorInfo &output0, const TensorInfo &output1, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsMergerSupported(const std::vector< const TensorInfo *> inputs, const TensorInfo &output, const OriginsDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsAbsSupported(const TensorInfo &input, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsBatchToSpaceNdSupported(const TensorInfo &input, const TensorInfo &output, const BatchToSpaceNdDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsDivisionSupported(const TensorInfo &input0, const TensorInfo &input1, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
An LstmDescriptor for the LstmLayer.
#define ARMNN_NO_DEPRECATE_WARN_END
Definition: Deprecated.hpp:34
A L2NormalizationDescriptor for the L2NormalizationLayer.
An ArgMinMaxDescriptor for ArgMinMaxLayer.
Definition: Descriptors.hpp:51
An OriginsDescriptor for the ConcatLayer.
bool IsPermuteSupported(const TensorInfo &input, const TensorInfo &output, const PermuteDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A FullyConnectedDescriptor for the FullyConnectedLayer.
A FakeQuantizationDescriptor for the FakeQuantizationLayer.
bool IsConvertFp32ToFp16Supported(const TensorInfo &input, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsElementwiseUnarySupported(const TensorInfo &input, const TensorInfo &output, const ElementwiseUnaryDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsConvertFp16ToFp32Supported(const TensorInfo &input, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A StandInDescriptor for the StandIn layer.
bool IsReshapeSupported(const TensorInfo &input, const TensorInfo &output, const ReshapeDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A QLstmDescriptor for the QLstmLayer.
bool IsMergeSupported(const TensorInfo &input0, const TensorInfo &input1, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsStackSupported(const std::vector< const TensorInfo *> &inputs, const TensorInfo &output, const StackDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsSpaceToDepthSupported(const TensorInfo &input, const TensorInfo &output, const SpaceToDepthDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
bool IsMemCopySupported(const TensorInfo &input, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsInstanceNormalizationSupported(const TensorInfo &input, const TensorInfo &output, const InstanceNormalizationDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A SliceDescriptor for the SliceLayer.
bool IsTransposeConvolution2dSupported(const TensorInfo &input, const TensorInfo &output, const TransposeConvolution2dDescriptor &descriptor, const TensorInfo &weights, const Optional< TensorInfo > &biases, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A SpaceToBatchNdDescriptor for the SpaceToBatchNdLayer.
bool IsPadSupported(const TensorInfo &input, const TensorInfo &output, const PadDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsMinimumSupported(const TensorInfo &input0, const TensorInfo &input1, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsConvertBf16ToFp32Supported(const TensorInfo &input, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsSplitterSupported(const TensorInfo &input, const ViewsDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A ElementwiseUnaryDescriptor for the ElementwiseUnaryLayer.
Definition: Descriptors.hpp:90
bool IsConvolution2dSupported(const TensorInfo &input, const TensorInfo &output, const Convolution2dDescriptor &descriptor, const TensorInfo &weights, const Optional< TensorInfo > &biases, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsSubtractionSupported(const TensorInfo &input0, const TensorInfo &input1, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsLstmSupported(const TensorInfo &input, const TensorInfo &outputStateIn, const TensorInfo &cellStateIn, const TensorInfo &scratchBuffer, const TensorInfo &outputStateOut, const TensorInfo &cellStateOut, const TensorInfo &output, const LstmDescriptor &descriptor, const LstmInputParamsInfo &paramsInfo, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A MeanDescriptor for the MeanLayer.
bool IsFloorSupported(const TensorInfo &input, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsConstantSupported(const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsNormalizationSupported(const TensorInfo &input, const TensorInfo &output, const NormalizationDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A TransposeDescriptor for the TransposeLayer.
A StridedSliceDescriptor for the StridedSliceLayer.
bool IsFullyConnectedSupported(const TensorInfo &input, const TensorInfo &output, const TensorInfo &weights, const TensorInfo &biases, const FullyConnectedDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsOutputSupported(const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsResizeSupported(const TensorInfo &input, const TensorInfo &output, const ResizeDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsPooling2dSupported(const TensorInfo &input, const TensorInfo &output, const Pooling2dDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A PreCompiledDescriptor for the PreCompiledLayer.
A Pooling2dDescriptor for the Pooling2dLayer.
A NormalizationDescriptor for the NormalizationLayer.
bool IsConvertFp32ToBf16Supported(const TensorInfo &input, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsMaximumSupported(const TensorInfo &input0, const TensorInfo &input1, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
An InstanceNormalizationDescriptor for InstanceNormalizationLayer.
bool IsDepthToSpaceSupported(const TensorInfo &input, const TensorInfo &output, const DepthToSpaceDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A SoftmaxDescriptor for the SoftmaxLayer.
bool IsConcatSupported(const std::vector< const TensorInfo *> inputs, const TensorInfo &output, const OriginsDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsBatchNormalizationSupported(const TensorInfo &input, const TensorInfo &output, const TensorInfo &mean, const TensorInfo &var, const TensorInfo &beta, const TensorInfo &gamma, const BatchNormalizationDescriptor &descriptor, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsResizeBilinearSupported(const TensorInfo &input, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
bool IsGreaterSupported(const TensorInfo &input0, const TensorInfo &input1, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A DepthwiseConvolution2dDescriptor for the DepthwiseConvolution2dLayer.
A BatchNormalizationDescriptor for the BatchNormalizationLayer.
bool IsDequantizeSupported(const TensorInfo &input, const TensorInfo &output, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override
A PermuteDescriptor for the PermuteLayer.
bool IsDilatedDepthwiseConvolutionSupported(const TensorInfo &input, const TensorInfo &output, const DepthwiseConvolution2dDescriptor &descriptor, const TensorInfo &weights, const Optional< TensorInfo > &biases, Optional< std::string &> reasonIfUnsupported=EmptyOptional()) const override