aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorKevin May <kevin.may@arm.com>2019-12-12 16:33:31 +0000
committerDavid Monahan <david.monahan@arm.com>2019-12-18 08:14:26 +0000
commitdbbcc3948190d874d80f1f095fcbda65d693519d (patch)
tree670b55e661763e733208f98a22de7aca1647ff65
parent0518d71d5a99ad1f45091d4695126c5e44ae615d (diff)
downloadandroid-nn-driver-dbbcc3948190d874d80f1f095fcbda65d693519d.tar.gz
IVGCVSW-4262 Add Calls to IsReshapeSupported and IsPermuteSupported
!armnn:2486 * Add calls before addition of these layers in ConvertConcatenation * Add outputInfo parameter wherever needed for IsReshapeSupported Signed-off-by: Kevin May <kevin.may@arm.com> Change-Id: Ic5d142ea046161960ff2fc137bd261ebb4e6ac0c
-rw-r--r--1.2/HalPolicy.cpp7
-rw-r--r--ConversionUtils.hpp100
2 files changed, 98 insertions, 9 deletions
diff --git a/1.2/HalPolicy.cpp b/1.2/HalPolicy.cpp
index e6f8acbb..2cb84972 100644
--- a/1.2/HalPolicy.cpp
+++ b/1.2/HalPolicy.cpp
@@ -702,6 +702,7 @@ bool HalPolicy::ConvertExpandDims(const Operation& operation, const Model& model
data.m_Backends,
isSupported,
input.GetTensorInfo(),
+ outputInfo,
reshapeDescriptor);
if (!isSupported)
@@ -1299,7 +1300,7 @@ bool HalPolicy::ConvertMaximum(const Operation& operation, const Model& model, C
IConnectableLayer* layer = data.m_Network->AddMaximumLayer();
assert(layer != nullptr);
- bool isReshapeSupported = BroadcastTensor(input0, input1, layer, data);
+ bool isReshapeSupported = BroadcastTensor(input0, input1, outInfo, layer, data);
if (!isReshapeSupported)
{
return false;
@@ -1354,7 +1355,7 @@ bool HalPolicy::ConvertMinimum(const Operation& operation, const Model& model, C
IConnectableLayer* const layer = data.m_Network->AddMinimumLayer();
assert(layer != nullptr);
- bool isReshapeSupported = BroadcastTensor(input0, input1, layer, data);
+ bool isReshapeSupported = BroadcastTensor(input0, input1, outputInfo, layer, data);
if (!isReshapeSupported)
{
return false;
@@ -1517,7 +1518,7 @@ bool HalPolicy::ConvertPrelu(const Operation& operation, const Model& model, Con
return Fail("%s: AddPreluLayer failed", __func__);
}
- bool isReshapeSupported = BroadcastTensor(input, alpha, layer, data);
+ bool isReshapeSupported = BroadcastTensor(input, alpha, outputInfo, layer, data);
if (!isReshapeSupported)
{
return false;
diff --git a/ConversionUtils.hpp b/ConversionUtils.hpp
index b695aa65..4e4b4d6b 100644
--- a/ConversionUtils.hpp
+++ b/ConversionUtils.hpp
@@ -236,7 +236,7 @@ armnn::IConnectableLayer& AddReshapeLayer(armnn::INetwork& network, LayerHandleT
return *reshapeLayer;
}
-bool BroadcastTensor(LayerInputHandle& input0, LayerInputHandle& input1,
+bool BroadcastTensor(LayerInputHandle& input0, LayerInputHandle& input1, const armnn::TensorInfo& outputInfo,
armnn::IConnectableLayer* startLayer, ConversionData& data)
{
BOOST_ASSERT(startLayer != nullptr);
@@ -287,6 +287,7 @@ bool BroadcastTensor(LayerInputHandle& input0, LayerInputHandle& input1,
data.m_Backends,
isSupported,
reshapedInfo,
+ outputInfo,
reshapeDescriptor);
if (!isSupported)
{
@@ -551,6 +552,41 @@ void SwizzleInputs(armnn::INetwork& network,
}
}
+bool CheckReshapeSupported(ConversionData& data,
+ std::vector<LayerInputHandle>& inputs,
+ std::vector<armnn::TensorShape>& inputShapes,
+ const armnn::PermutationVector& mapping,
+ const armnn::TensorInfo& outputInfo)
+{
+ if (!mapping.IsEqual(IdentityPermutation4D))
+ {
+ size_t nInputs = inputs.size();
+ for (size_t i=0; i<nInputs; ++i)
+ {
+ // check permute layer
+ armnn::PermuteDescriptor permuteDesc;
+ permuteDesc.m_DimMappings = mapping;
+
+ bool isSupported = false;
+ FORWARD_LAYER_SUPPORT_FUNC(__func__,
+ IsPermuteSupported,
+ data.m_Backends,
+ isSupported,
+ inputs[i].GetTensorInfo(),
+ outputInfo,
+ permuteDesc);
+ if (!isSupported)
+ {
+ return false;
+ }
+
+ }
+ SwizzleInputs(*data.m_Network, inputs, inputShapes, mapping);
+ }
+ return true;
+}
+
+
bool CreateConcatPermutationParameters(const unsigned int numberOfDimensions,
int32_t & concatDimension,
std::pair<armnn::PermutationVector, armnn::PermutationVector> & permutationPair)
@@ -1548,7 +1584,7 @@ bool ConvertAdd(const Operation& operation, const Model& model, ConversionData&
if (endLayer != nullptr)
{
- bool isReshapeSupported = BroadcastTensor(input0, input1, startLayer, data);
+ bool isReshapeSupported = BroadcastTensor(input0, input1, outputInfo, startLayer, data);
if (!isReshapeSupported)
{
return false;
@@ -1733,6 +1769,22 @@ bool ConvertConcatenation(const Operation& operation, const Model& model, Conver
tensorDimensionsAdded = 2;
}
+ armnn::ReshapeDescriptor reshapeDescriptor;
+ reshapeDescriptor.m_TargetShape = reshapeInfo.GetShape();
+
+ bool isSupported = false;
+ FORWARD_LAYER_SUPPORT_FUNC(__func__,
+ IsReshapeSupported,
+ data.m_Backends,
+ isSupported,
+ operandInputHandle.GetTensorInfo(),
+ reshapeInfo,
+ reshapeDescriptor);
+ if (!isSupported)
+ {
+ return false;
+ }
+
armnn::IConnectableLayer& newReshape = AddReshapeLayer(
*data.m_Network,
operandInputHandle,
@@ -1788,7 +1840,10 @@ bool ConvertConcatenation(const Operation& operation, const Model& model, Conver
// this is no-op for identity swizzles, otherwise it replaces both
// the handles and shapes with the swizzled layer output handles and shapes
- SwizzleInputs(*data.m_Network, inputHandles, inputShapes, permutationPair.first);
+ if (!CheckReshapeSupported(data, inputHandles, inputShapes, permutationPair.first, outputInfo))
+ {
+ return false;
+ }
// Create an armnn concat layer descriptor - this will also perform validation on the input shapes
armnn::OriginsDescriptor concatDescriptor;
@@ -1844,6 +1899,21 @@ bool ConvertConcatenation(const Operation& operation, const Model& model, Conver
if (needPermute)
{
+ armnn::PermuteDescriptor permuteDesc;
+ permuteDesc.m_DimMappings = permutationPair.second;
+
+ bool isSupported = false;
+ FORWARD_LAYER_SUPPORT_FUNC(__func__,
+ IsPermuteSupported,
+ data.m_Backends,
+ isSupported,
+ layer->GetOutputSlot(0).GetTensorInfo(),
+ outputInfo,
+ permuteDesc);
+ if (!isSupported)
+ {
+ return false;
+ }
// Add permutation layer and connect the output to it, the permutation becomes the output layer
armnn::IConnectableLayer& deswizzleLayer = AddPermuteLayer(*data.m_Network,
layer->GetOutputSlot(0),
@@ -1866,6 +1936,22 @@ bool ConvertConcatenation(const Operation& operation, const Model& model, Conver
afterConcatInfo.SetShape(armnn::TensorShape({ afterConcatInfo.GetShape()[2] }));
}
+ armnn::ReshapeDescriptor reshapeDescriptor;
+ reshapeDescriptor.m_TargetShape = afterConcatInfo.GetShape();
+
+ bool isSupported = false;
+ FORWARD_LAYER_SUPPORT_FUNC(__func__,
+ IsReshapeSupported,
+ data.m_Backends,
+ isSupported,
+ layer->GetOutputSlot(0).GetTensorInfo(),
+ afterConcatInfo,
+ reshapeDescriptor);
+ if (!isSupported)
+ {
+ return false;
+ }
+
layer = &AddReshapeLayer(
*data.m_Network,
layer->GetOutputSlot(0),
@@ -2312,7 +2398,7 @@ bool ConvertDiv(const Operation& operation, const Model& model, ConversionData&
if (endLayer)
{
- bool isReshapeSupported = BroadcastTensor(input0, input1, startLayer, data);
+ bool isReshapeSupported = BroadcastTensor(input0, input1, outputInfo, startLayer, data);
if (!isReshapeSupported)
{
return false;
@@ -2888,7 +2974,7 @@ bool ConvertMul(const Operation& operation, const Model& model, ConversionData&
if (endLayer != nullptr)
{
- bool isReshapeSupported = BroadcastTensor(input0, input1, startLayer, data);
+ bool isReshapeSupported = BroadcastTensor(input0, input1, outputInfo, startLayer, data);
if (!isReshapeSupported)
{
return false;
@@ -3027,6 +3113,7 @@ bool ConvertReshape(const Operation& operation, const Model& model, ConversionDa
data.m_Backends,
isSupported,
input.GetTensorInfo(),
+ GetTensorInfoForOperand(*outputOperand),
reshapeDescriptor);
if (!isSupported)
{
@@ -3096,7 +3183,7 @@ bool ConvertSub(const Operation& operation, const Model& model, ConversionData&
if (endLayer)
{
- bool isReshapeSupported = BroadcastTensor(input0, input1, startLayer, data);
+ bool isReshapeSupported = BroadcastTensor(input0, input1, outputInfo, startLayer, data);
if (!isReshapeSupported)
{
return false;
@@ -3180,6 +3267,7 @@ bool ConvertSqueeze(const Operation& operation, const Model& model, ConversionDa
data.m_Backends,
isSupported,
inputInfo,
+ outputInfo,
reshapeDesc);
if (!isSupported)
{