aboutsummaryrefslogtreecommitdiff
path: root/src/backends
diff options
context:
space:
mode:
authorDerek Lamberti <derek.lamberti@arm.com>2019-10-21 10:46:16 +0100
committerMatteo Martincigh <matteo.martincigh@arm.com>2019-10-21 12:59:23 +0000
commit013c390c2d9829fede2d8b1d59c3f2a497730462 (patch)
tree7918e1e4e3445a741695772864d200b3979dd1f8 /src/backends
parente80ebd101b516751a798aa1b1d669e9117a32266 (diff)
downloadarmnn-013c390c2d9829fede2d8b1d59c3f2a497730462.tar.gz
IVGCVSW-4009 StandInLayer frontend API
Change-Id: I058c57b554769799c6775813215070ef47790e3d Signed-off-by: Derek Lamberti <derek.lamberti@arm.com>
Diffstat (limited to 'src/backends')
-rw-r--r--src/backends/backendsCommon/LayerSupportBase.cpp16
-rw-r--r--src/backends/backendsCommon/LayerSupportBase.hpp5
-rw-r--r--src/backends/backendsCommon/WorkloadFactory.cpp41
-rw-r--r--src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp21
4 files changed, 83 insertions, 0 deletions
diff --git a/src/backends/backendsCommon/LayerSupportBase.cpp b/src/backends/backendsCommon/LayerSupportBase.cpp
index 358106e5e9..9ffad7b8e2 100644
--- a/src/backends/backendsCommon/LayerSupportBase.cpp
+++ b/src/backends/backendsCommon/LayerSupportBase.cpp
@@ -502,6 +502,22 @@ bool LayerSupportBase::IsStackSupported(const std::vector<const TensorInfo*>& in
return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
}
+bool LayerSupportBase::IsStandInSupported(const std::vector<const TensorInfo*>& inputs,
+ const std::vector<const TensorInfo*>& outputs,
+ const StandInDescriptor& descriptor,
+ Optional<std::string&> reasonIfUnsupported) const
+{
+ if (reasonIfUnsupported)
+ {
+ std::stringstream message;
+ message << "StandIn layer is not executable via backends";
+
+ reasonIfUnsupported.value() = message.str();
+ }
+
+ return false;
+}
+
bool LayerSupportBase::IsStridedSliceSupported(const TensorInfo& input,
const TensorInfo& output,
const StridedSliceDescriptor& descriptor,
diff --git a/src/backends/backendsCommon/LayerSupportBase.hpp b/src/backends/backendsCommon/LayerSupportBase.hpp
index d4c37c1a91..e99cb67614 100644
--- a/src/backends/backendsCommon/LayerSupportBase.hpp
+++ b/src/backends/backendsCommon/LayerSupportBase.hpp
@@ -312,6 +312,11 @@ public:
const StackDescriptor& descriptor,
Optional<std::string&> reasonIfUnsupported = EmptyOptional()) const override;
+ bool IsStandInSupported(const std::vector<const TensorInfo*>& inputs,
+ const std::vector<const TensorInfo*>& outputs,
+ const StandInDescriptor& descriptor,
+ Optional<std::string&> reasonIfUnsupported = EmptyOptional()) const override;
+
bool IsStridedSliceSupported(const TensorInfo& input,
const TensorInfo& output,
const StridedSliceDescriptor& descriptor,
diff --git a/src/backends/backendsCommon/WorkloadFactory.cpp b/src/backends/backendsCommon/WorkloadFactory.cpp
index 30dfa023f9..34e4cbe579 100644
--- a/src/backends/backendsCommon/WorkloadFactory.cpp
+++ b/src/backends/backendsCommon/WorkloadFactory.cpp
@@ -902,6 +902,47 @@ bool IWorkloadFactory::IsLayerSupported(const BackendId& backendId,
break;
}
+ case LayerType::StandIn:
+ {
+ auto cLayer = boost::polymorphic_downcast<const StandInLayer*>(&layer);
+
+ // Get vector of all inputs.
+ auto getTensorInfoIn = [&dataType](const InputSlot& slot)
+ {
+ return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
+ };
+ auto getTensorInfoOut = [&dataType](const OutputSlot& slot)
+ {
+ return OverrideDataType(slot.GetTensorInfo(), dataType);
+ };
+ auto beginI = boost::make_transform_iterator(layer.GetInputSlots().begin(), getTensorInfoIn);
+ auto endI = boost::make_transform_iterator(layer.GetInputSlots().end(), getTensorInfoIn);
+ std::vector<TensorInfo> inputs(beginI, endI);
+
+ auto beginO = boost::make_transform_iterator(layer.GetOutputSlots().begin(), getTensorInfoOut);
+ auto endO = boost::make_transform_iterator(layer.GetOutputSlots().end(), getTensorInfoOut);
+ std::vector<TensorInfo> outputs(beginO, endO);
+
+
+ auto getTensorInfoPtr = [](const TensorInfo& info)
+ {
+ return &info;
+ };
+ auto beginPtrI = boost::make_transform_iterator(inputs.begin(), getTensorInfoPtr);
+ auto endPtrI = boost::make_transform_iterator(inputs.end(), getTensorInfoPtr);
+ std::vector<const TensorInfo*> inputPtrs(beginPtrI, endPtrI);
+
+ auto beginPtrO = boost::make_transform_iterator(outputs.begin(), getTensorInfoPtr);
+ auto endPtrO = boost::make_transform_iterator(outputs.end(), getTensorInfoPtr);
+ std::vector<const TensorInfo*> outputPtrs(beginPtrO, endPtrO);
+
+
+ result = layerSupportObject->IsStandInSupported(inputPtrs,
+ outputPtrs,
+ cLayer->GetParameters(),
+ reason);
+ break;
+ }
case LayerType::StridedSlice:
{
auto cLayer = boost::polymorphic_downcast<const StridedSliceLayer*>(&layer);
diff --git a/src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp b/src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp
index 9bddae9759..c52d6a9511 100644
--- a/src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp
+++ b/src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp
@@ -380,6 +380,25 @@ struct LayerTypePolicy<armnn::LayerType::name, DataType> \
// Use this version for layers whose constructor takes 2 parameters(descriptor and name).
#define DECLARE_LAYER_POLICY_2_PARAM(name) DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, armnn::name##Descriptor)
+
+#define DECLARE_LAYER_POLICY_EXCEPTION(name, descType) \
+template<armnn::DataType DataType> \
+struct LayerTypePolicy<armnn::LayerType::name, DataType> \
+{ \
+ using Type = armnn::name##Layer; \
+ using Desc = descType; \
+ constexpr static const char* NameStr = #name; \
+ \
+ static std::unique_ptr<armnn::IWorkload> MakeDummyWorkload(armnn::IWorkloadFactory *factory, \
+ unsigned int nIn, unsigned int nOut) \
+ { \
+ return std::unique_ptr<armnn::IWorkload>(); \
+ } \
+};
+
+#define DECLARE_LAYER_POLICY_EXCEPTION_1_PARAM(name) DECLARE_LAYER_POLICY_EXCEPTION(name, void)
+#define DECLARE_LAYER_POLICY_EXCEPTION_2_PARAM(name) DECLARE_LAYER_POLICY_EXCEPTION(name, armnn::name##Descriptor)
+
// Layer policy template.
template<armnn::LayerType Type, armnn::DataType DataType>
struct LayerTypePolicy;
@@ -489,6 +508,8 @@ DECLARE_LAYER_POLICY_2_PARAM(Splitter)
DECLARE_LAYER_POLICY_2_PARAM(Stack)
+DECLARE_LAYER_POLICY_EXCEPTION_2_PARAM(StandIn)
+
DECLARE_LAYER_POLICY_2_PARAM(StridedSlice)
DECLARE_LAYER_POLICY_1_PARAM(Subtraction)