diff options
author | Kevin May <kevin.may@arm.com> | 2019-12-12 16:33:31 +0000 |
---|---|---|
committer | David Monahan <david.monahan@arm.com> | 2019-12-18 08:14:26 +0000 |
commit | dbbcc3948190d874d80f1f095fcbda65d693519d (patch) | |
tree | 670b55e661763e733208f98a22de7aca1647ff65 /1.2 | |
parent | 0518d71d5a99ad1f45091d4695126c5e44ae615d (diff) | |
download | android-nn-driver-dbbcc3948190d874d80f1f095fcbda65d693519d.tar.gz |
IVGCVSW-4262 Add Calls to IsReshapeSupported and IsPermuteSupported
!armnn:2486
* Add calls before addition of these layers in ConvertConcatenation
* Add outputInfo parameter wherever needed for IsReshapeSupported
Signed-off-by: Kevin May <kevin.may@arm.com>
Change-Id: Ic5d142ea046161960ff2fc137bd261ebb4e6ac0c
Diffstat (limited to '1.2')
-rw-r--r-- | 1.2/HalPolicy.cpp | 7 |
1 files changed, 4 insertions, 3 deletions
diff --git a/1.2/HalPolicy.cpp b/1.2/HalPolicy.cpp index e6f8acbb..2cb84972 100644 --- a/1.2/HalPolicy.cpp +++ b/1.2/HalPolicy.cpp @@ -702,6 +702,7 @@ bool HalPolicy::ConvertExpandDims(const Operation& operation, const Model& model data.m_Backends, isSupported, input.GetTensorInfo(), + outputInfo, reshapeDescriptor); if (!isSupported) @@ -1299,7 +1300,7 @@ bool HalPolicy::ConvertMaximum(const Operation& operation, const Model& model, C IConnectableLayer* layer = data.m_Network->AddMaximumLayer(); assert(layer != nullptr); - bool isReshapeSupported = BroadcastTensor(input0, input1, layer, data); + bool isReshapeSupported = BroadcastTensor(input0, input1, outInfo, layer, data); if (!isReshapeSupported) { return false; @@ -1354,7 +1355,7 @@ bool HalPolicy::ConvertMinimum(const Operation& operation, const Model& model, C IConnectableLayer* const layer = data.m_Network->AddMinimumLayer(); assert(layer != nullptr); - bool isReshapeSupported = BroadcastTensor(input0, input1, layer, data); + bool isReshapeSupported = BroadcastTensor(input0, input1, outputInfo, layer, data); if (!isReshapeSupported) { return false; @@ -1517,7 +1518,7 @@ bool HalPolicy::ConvertPrelu(const Operation& operation, const Model& model, Con return Fail("%s: AddPreluLayer failed", __func__); } - bool isReshapeSupported = BroadcastTensor(input, alpha, layer, data); + bool isReshapeSupported = BroadcastTensor(input, alpha, outputInfo, layer, data); if (!isReshapeSupported) { return false; |