From 6fd4cebb1737555a34a70393ea88a785937534ec Mon Sep 17 00:00:00 2001 From: Derek Lamberti Date: Thu, 19 Dec 2019 15:45:35 +0000 Subject: IVGCVSW-4301 Correctly validate reshape for broadcastable inputs !android-nn-driver:2642 Signed-off-by: Derek Lamberti Change-Id: I4db6ea4ed0a192c85f124c4a9ced60b1666a3870 Signed-off-by: Teresa Charlin --- 1.2/HalPolicy.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to '1.2/HalPolicy.cpp') diff --git a/1.2/HalPolicy.cpp b/1.2/HalPolicy.cpp index bf4525aa..c9b4dadb 100644 --- a/1.2/HalPolicy.cpp +++ b/1.2/HalPolicy.cpp @@ -1347,7 +1347,7 @@ bool HalPolicy::ConvertMaximum(const Operation& operation, const Model& model, C IConnectableLayer* layer = data.m_Network->AddMaximumLayer(); assert(layer != nullptr); - bool isReshapeSupported = BroadcastTensor(input0, input1, outInfo, layer, data); + bool isReshapeSupported = BroadcastTensor(input0, input1, layer, data); if (!isReshapeSupported) { return false; @@ -1402,7 +1402,7 @@ bool HalPolicy::ConvertMinimum(const Operation& operation, const Model& model, C IConnectableLayer* const layer = data.m_Network->AddMinimumLayer(); assert(layer != nullptr); - bool isReshapeSupported = BroadcastTensor(input0, input1, outputInfo, layer, data); + bool isReshapeSupported = BroadcastTensor(input0, input1, layer, data); if (!isReshapeSupported) { return false; @@ -1565,7 +1565,7 @@ bool HalPolicy::ConvertPrelu(const Operation& operation, const Model& model, Con return Fail("%s: AddPreluLayer failed", __func__); } - bool isReshapeSupported = BroadcastTensor(input, alpha, outputInfo, layer, data); + bool isReshapeSupported = BroadcastTensor(input, alpha, layer, data); if (!isReshapeSupported) { return false; -- cgit v1.2.1