aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDerek Lamberti <derek.lamberti@arm.com>2019-12-19 15:45:35 +0000
committerTeresaARM <teresa.charlinreyes@arm.com>2020-01-27 15:05:09 +0000
commit6fd4cebb1737555a34a70393ea88a785937534ec (patch)
tree455f750fbd2321df3c3a2f7c4a53815c7f696852
parentaed08ac9b985766a6ce442695ff1742defb0a189 (diff)
downloadandroid-nn-driver-6fd4cebb1737555a34a70393ea88a785937534ec.tar.gz
IVGCVSW-4301 Correctly validate reshape for broadcastable inputs
!android-nn-driver:2642 Signed-off-by: Derek Lamberti <derek.lamberti@arm.com> Change-Id: I4db6ea4ed0a192c85f124c4a9ced60b1666a3870 Signed-off-by: Teresa Charlin <teresa.charlinreyes@arm.com>
-rw-r--r--1.2/HalPolicy.cpp6
-rw-r--r--ConversionUtils.hpp11
2 files changed, 8 insertions, 9 deletions
diff --git a/1.2/HalPolicy.cpp b/1.2/HalPolicy.cpp
index bf4525a..c9b4dad 100644
--- a/1.2/HalPolicy.cpp
+++ b/1.2/HalPolicy.cpp
@@ -1347,7 +1347,7 @@ bool HalPolicy::ConvertMaximum(const Operation& operation, const Model& model, C
IConnectableLayer* layer = data.m_Network->AddMaximumLayer();
assert(layer != nullptr);
- bool isReshapeSupported = BroadcastTensor(input0, input1, outInfo, layer, data);
+ bool isReshapeSupported = BroadcastTensor(input0, input1, layer, data);
if (!isReshapeSupported)
{
return false;
@@ -1402,7 +1402,7 @@ bool HalPolicy::ConvertMinimum(const Operation& operation, const Model& model, C
IConnectableLayer* const layer = data.m_Network->AddMinimumLayer();
assert(layer != nullptr);
- bool isReshapeSupported = BroadcastTensor(input0, input1, outputInfo, layer, data);
+ bool isReshapeSupported = BroadcastTensor(input0, input1, layer, data);
if (!isReshapeSupported)
{
return false;
@@ -1565,7 +1565,7 @@ bool HalPolicy::ConvertPrelu(const Operation& operation, const Model& model, Con
return Fail("%s: AddPreluLayer failed", __func__);
}
- bool isReshapeSupported = BroadcastTensor(input, alpha, outputInfo, layer, data);
+ bool isReshapeSupported = BroadcastTensor(input, alpha, layer, data);
if (!isReshapeSupported)
{
return false;
diff --git a/ConversionUtils.hpp b/ConversionUtils.hpp
index b3f1ac7..4881ec9 100644
--- a/ConversionUtils.hpp
+++ b/ConversionUtils.hpp
@@ -241,7 +241,6 @@ armnn::IConnectableLayer& AddReshapeLayer(armnn::INetwork& network,
bool BroadcastTensor(LayerInputHandle& input0,
LayerInputHandle& input1,
- const armnn::TensorInfo& outputInfo,
armnn::IConnectableLayer* startLayer,
ConversionData& data)
{
@@ -292,8 +291,8 @@ bool BroadcastTensor(LayerInputHandle& input0,
IsReshapeSupported,
data.m_Backends,
isSupported,
+ smallInfo,
reshapedInfo,
- outputInfo,
reshapeDescriptor);
if (!isSupported)
{
@@ -1594,7 +1593,7 @@ bool ConvertAdd(const HalOperation& operation, const HalModel& model, Conversion
if (endLayer != nullptr)
{
- bool isReshapeSupported = BroadcastTensor(input0, input1, outputInfo, startLayer, data);
+ bool isReshapeSupported = BroadcastTensor(input0, input1, startLayer, data);
if (!isReshapeSupported)
{
return false;
@@ -2408,7 +2407,7 @@ bool ConvertDiv(const HalOperation& operation, const HalModel& model, Conversion
if (endLayer)
{
- bool isReshapeSupported = BroadcastTensor(input0, input1, outputInfo, startLayer, data);
+ bool isReshapeSupported = BroadcastTensor(input0, input1, startLayer, data);
if (!isReshapeSupported)
{
return false;
@@ -3009,7 +3008,7 @@ bool ConvertMul(const HalOperation& operation, const HalModel& model, Conversion
if (endLayer != nullptr)
{
- bool isReshapeSupported = BroadcastTensor(input0, input1, outputInfo, startLayer, data);
+ bool isReshapeSupported = BroadcastTensor(input0, input1, startLayer, data);
if (!isReshapeSupported)
{
return false;
@@ -3218,7 +3217,7 @@ bool ConvertSub(const HalOperation& operation, const HalModel& model, Conversion
if (endLayer)
{
- bool isReshapeSupported = BroadcastTensor(input0, input1, outputInfo, startLayer, data);
+ bool isReshapeSupported = BroadcastTensor(input0, input1, startLayer, data);
if (!isReshapeSupported)
{
return false;