From 366e0a66f4566cf71dff3f850556350709ee66a8 Mon Sep 17 00:00:00 2001 From: Aron Virginas-Tar Date: Wed, 10 Jul 2019 13:01:41 +0100 Subject: IVGCVSW-3482 Report operations with dynamic output size as unsupported Signed-off-by: Aron Virginas-Tar Change-Id: Ifafe2a6fbfd6019b3395d51ed9967db794d2b034 --- 1.1/HalPolicy.cpp | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) (limited to '1.1') diff --git a/1.1/HalPolicy.cpp b/1.1/HalPolicy.cpp index 78f157dd..dbd380ab 100644 --- a/1.1/HalPolicy.cpp +++ b/1.1/HalPolicy.cpp @@ -5,6 +5,8 @@ #include "HalPolicy.hpp" +#include "OutputShapeUtils.hpp" + #include "../1.0/HalPolicy.hpp" namespace @@ -176,20 +178,24 @@ bool HalPolicy::ConvertSub(const Operation& operation, const Model& model, Conve return false; } - const armnn::TensorInfo& outInfo = GetTensorInfoForOperand(*outputOperand); + const armnn::TensorInfo& outputInfo = GetTensorInfoForOperand(*outputOperand); + if (IsDynamicOutput(outputInfo)) + { + return Fail("%s: Dynamic output not supported", __func__); + } if (!IsLayerSupportedForAnyBackend(__func__, armnn::IsSubtractionSupported, data.m_Backends, input0.GetTensorInfo(), input1.GetTensorInfo(), - outInfo)) + outputInfo)) { return false; } armnn::IConnectableLayer* const startLayer = data.m_Network->AddSubtractionLayer(); - armnn::IConnectableLayer* const endLayer = ProcessActivation(outInfo, activationFunction, startLayer, data); + armnn::IConnectableLayer* const endLayer = ProcessActivation(outputInfo, activationFunction, startLayer, data); const armnn::TensorInfo& inputTensorInfo0 = input0.GetTensorInfo(); const armnn::TensorInfo& inputTensorInfo1 = input1.GetTensorInfo(); @@ -292,6 +298,10 @@ bool HalPolicy::ConvertPad(const Operation& operation, const Model& model, Conve } const armnn::TensorInfo& outputInfo = GetTensorInfoForOperand(*output); + if (IsDynamicOutput(outputInfo)) + { + return Fail("%s: Dynamic output not supported", __func__); + } if (!IsLayerSupportedForAnyBackend(__func__, armnn::IsPadSupported, -- cgit v1.2.1