aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSadik Armagan <sadik.armagan@arm.com>2019-08-19 09:49:58 +0100
committerSadik Armagan <sadik.armagan@arm.com>2019-08-19 09:49:58 +0100
commit64b19b548bd23f25c81006f378b7631d0288e26e (patch)
tree79b025e8431384c8802d0f4dd632e57b915352ec
parent999e209da5f4bc3307f9c07021cdb2655add0aea (diff)
downloadandroid-nn-driver-64b19b548bd23f25c81006f378b7631d0288e26e.tar.gz
IVGCVSW-3618 Fix VTS/Neural Netwok Test Failures
* Added IsReshapeSupported check on available backend(s) after adding Reshape layer. Signed-off-by: Sadik Armagan <sadik.armagan@arm.com> Change-Id: I38a36d5a3db0d67f0e4b4be2ef672dedb75bff22
-rw-r--r--1.2/HalPolicy.cpp18
-rw-r--r--ConversionUtils.hpp54
2 files changed, 61 insertions, 11 deletions
diff --git a/1.2/HalPolicy.cpp b/1.2/HalPolicy.cpp
index 7fe5f88e..3cd7ce48 100644
--- a/1.2/HalPolicy.cpp
+++ b/1.2/HalPolicy.cpp
@@ -545,7 +545,11 @@ bool HalPolicy::ConvertMaximum(const Operation& operation, const Model& model, C
armnn::IConnectableLayer* layer = data.m_Network->AddMaximumLayer();
assert(layer != nullptr);
- BroadcastTensor(input0, input1, layer, *data.m_Network);
+ bool isReshapeSupported = BroadcastTensor(input0, input1, layer, data);
+ if (!isReshapeSupported)
+ {
+ return false;
+ }
return SetupAndTrackLayerOutputSlot<hal_1_2::HalPolicy>(operation, 0, *layer, model, data);
}
@@ -596,7 +600,11 @@ bool HalPolicy::ConvertMinimum(const Operation& operation, const Model& model, C
armnn::IConnectableLayer* const layer = data.m_Network->AddMinimumLayer();
assert(layer != nullptr);
- BroadcastTensor(input0, input1, layer, *data.m_Network);
+ bool isReshapeSupported = BroadcastTensor(input0, input1, layer, data);
+ if (!isReshapeSupported)
+ {
+ return false;
+ }
return SetupAndTrackLayerOutputSlot<hal_1_2::HalPolicy>(operation, 0, *layer, model, data);
}
@@ -755,7 +763,11 @@ bool HalPolicy::ConvertPrelu(const Operation& operation, const Model& model, Con
return Fail("%s: AddPreluLayer failed", __func__);
}
- BroadcastTensor(input, alpha, layer, *data.m_Network);
+ bool isReshapeSupported = BroadcastTensor(input, alpha, layer, data);
+ if (!isReshapeSupported)
+ {
+ return false;
+ }
return SetupAndTrackLayerOutputSlot<hal_1_2::HalPolicy>(operation, 0, *layer, model, data);
}
diff --git a/ConversionUtils.hpp b/ConversionUtils.hpp
index 41b3c1cc..cc06ccee 100644
--- a/ConversionUtils.hpp
+++ b/ConversionUtils.hpp
@@ -232,8 +232,8 @@ armnn::IConnectableLayer& AddReshapeLayer(armnn::INetwork& network, LayerHandleT
return *reshapeLayer;
}
-void BroadcastTensor(LayerInputHandle& input0, LayerInputHandle& input1,
- armnn::IConnectableLayer* startLayer, armnn::INetwork& network)
+bool BroadcastTensor(LayerInputHandle& input0, LayerInputHandle& input1,
+ armnn::IConnectableLayer* startLayer, ConversionData& data)
{
BOOST_ASSERT(startLayer != nullptr);
@@ -249,7 +249,7 @@ void BroadcastTensor(LayerInputHandle& input0, LayerInputHandle& input1,
input0.Connect(startLayer->GetInputSlot(0));
input1.Connect(startLayer->GetInputSlot(1));
- return;
+ return true;
}
// Since the number of dimensions do not match then we need to add degenerate dimensions
@@ -273,7 +273,24 @@ void BroadcastTensor(LayerInputHandle& input0, LayerInputHandle& input1,
armnn::TensorInfo reshapedInfo = smallInfo;
reshapedInfo.SetShape(armnn::TensorShape{ boost::numeric_cast<unsigned int>(reshapedDimensions.size()),
reshapedDimensions.data() });
- armnn::IConnectableLayer& reshapeLayer = AddReshapeLayer(network, smallInputHandle, reshapedInfo);
+
+ // RehsapeDescriptor that is ignored in the IsReshapeSupported function
+ armnn::ReshapeDescriptor reshapeDescriptor;
+
+ bool isSupported = false;
+ FORWARD_LAYER_SUPPORT_FUNC(__func__,
+ IsReshapeSupported,
+ data.m_Backends,
+ isSupported,
+ reshapedInfo,
+ reshapeDescriptor);
+ if (!isSupported)
+ {
+ return false;
+ }
+
+ BOOST_ASSERT(data.m_Network != nullptr);
+ armnn::IConnectableLayer& reshapeLayer = AddReshapeLayer(*data.m_Network, smallInputHandle, reshapedInfo);
if (input0IsSmaller)
{
@@ -301,6 +318,8 @@ void BroadcastTensor(LayerInputHandle& input0, LayerInputHandle& input1,
input0.Connect(startLayer->GetInputSlot(0));
reshapeLayer.GetOutputSlot(0).Connect(startLayer->GetInputSlot(1));
}
+
+ return true;
}
void CalcPadding(uint32_t input, uint32_t kernel, uint32_t stride, uint32_t& outPadHead, uint32_t& outPadTail,
@@ -1489,7 +1508,12 @@ bool ConvertAdd(const Operation& operation, const Model& model, ConversionData&
if (endLayer != nullptr)
{
- BroadcastTensor(input0, input1, startLayer, *data.m_Network);
+ bool isReshapeSupported = BroadcastTensor(input0, input1, startLayer, data);
+ if (!isReshapeSupported)
+ {
+ return false;
+ }
+
return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *endLayer, model, data);
}
else
@@ -2095,7 +2119,12 @@ bool ConvertDiv(const Operation& operation, const Model& model, ConversionData&
if (endLayer)
{
- BroadcastTensor(input0, input1, startLayer, *data.m_Network);
+ bool isReshapeSupported = BroadcastTensor(input0, input1, startLayer, data);
+ if (!isReshapeSupported)
+ {
+ return false;
+ }
+
return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *endLayer, model, data);
}
return Fail("%s: ProcessActivation failed", __func__);
@@ -2536,7 +2565,12 @@ bool ConvertMul(const Operation& operation, const Model& model, ConversionData&
if (endLayer != nullptr)
{
- BroadcastTensor(input0, input1, startLayer, *data.m_Network);
+ bool isReshapeSupported = BroadcastTensor(input0, input1, startLayer, data);
+ if (!isReshapeSupported)
+ {
+ return false;
+ }
+
return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *endLayer, model, data);
}
else
@@ -2739,7 +2773,11 @@ bool ConvertSub(const Operation& operation, const Model& model, ConversionData&
if (endLayer)
{
- BroadcastTensor(input0, input1, startLayer, *data.m_Network);
+ bool isReshapeSupported = BroadcastTensor(input0, input1, startLayer, data);
+ if (!isReshapeSupported)
+ {
+ return false;
+ }
return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *endLayer, model, data);
}