aboutsummaryrefslogtreecommitdiff
path: root/1.2
diff options
context:
space:
mode:
authorKevin May <kevin.may@arm.com>2019-12-12 16:33:31 +0000
committerTeresaARM <teresa.charlinreyes@arm.com>2020-01-24 16:47:12 +0000
commitaed08ac9b985766a6ce442695ff1742defb0a189 (patch)
tree7614d931fd0b4561cf7fb1843402ee00cca30870 /1.2
parent829ad30a4f13bc0b9cb32bab5a4efeeeab541f76 (diff)
downloadandroid-nn-driver-aed08ac9b985766a6ce442695ff1742defb0a189.tar.gz
IVGCVSW-4262 Add Calls to IsReshapeSupported and IsPermuteSupported
!armnn:2643 * Add calls before addition of these layers in ConvertConcatenation * Add outputInfo parameter wherever needed for IsReshapeSupported Signed-off-by: Kevin May <kevin.may@arm.com> Change-Id: Ic5d142ea046161960ff2fc137bd261ebb4e6ac0c
Diffstat (limited to '1.2')
-rw-r--r--1.2/HalPolicy.cpp7
1 files changed, 4 insertions, 3 deletions
diff --git a/1.2/HalPolicy.cpp b/1.2/HalPolicy.cpp
index 61daeef5..bf4525aa 100644
--- a/1.2/HalPolicy.cpp
+++ b/1.2/HalPolicy.cpp
@@ -749,6 +749,7 @@ bool HalPolicy::ConvertExpandDims(const Operation& operation, const Model& model
data.m_Backends,
isSupported,
input.GetTensorInfo(),
+ outputInfo,
reshapeDescriptor);
if (!isSupported)
@@ -1346,7 +1347,7 @@ bool HalPolicy::ConvertMaximum(const Operation& operation, const Model& model, C
IConnectableLayer* layer = data.m_Network->AddMaximumLayer();
assert(layer != nullptr);
- bool isReshapeSupported = BroadcastTensor(input0, input1, layer, data);
+ bool isReshapeSupported = BroadcastTensor(input0, input1, outInfo, layer, data);
if (!isReshapeSupported)
{
return false;
@@ -1401,7 +1402,7 @@ bool HalPolicy::ConvertMinimum(const Operation& operation, const Model& model, C
IConnectableLayer* const layer = data.m_Network->AddMinimumLayer();
assert(layer != nullptr);
- bool isReshapeSupported = BroadcastTensor(input0, input1, layer, data);
+ bool isReshapeSupported = BroadcastTensor(input0, input1, outputInfo, layer, data);
if (!isReshapeSupported)
{
return false;
@@ -1564,7 +1565,7 @@ bool HalPolicy::ConvertPrelu(const Operation& operation, const Model& model, Con
return Fail("%s: AddPreluLayer failed", __func__);
}
- bool isReshapeSupported = BroadcastTensor(input, alpha, layer, data);
+ bool isReshapeSupported = BroadcastTensor(input, alpha, outputInfo, layer, data);
if (!isReshapeSupported)
{
return false;