From 0a87936e7261fc074797111d274c40fca17d9509 Mon Sep 17 00:00:00 2001 From: Mike Kelly Date: Mon, 29 Jul 2019 16:56:31 +0100 Subject: IVGCVSW-3591 Fixed unexpectedly skipped SUB tests * A model that has Inputs with different quantized scales is not compliant with 1.1 Signed-off-by: Mike Kelly Change-Id: Ifb8277d78f05b5ef017effa879322a08c0efc851 --- 1.1/HalPolicy.cpp | 56 +------------------------------------------------------ 1 file changed, 1 insertion(+), 55 deletions(-) (limited to '1.1') diff --git a/1.1/HalPolicy.cpp b/1.1/HalPolicy.cpp index ab8224a0..93ee70e1 100644 --- a/1.1/HalPolicy.cpp +++ b/1.1/HalPolicy.cpp @@ -166,61 +166,7 @@ bool HalPolicy::ConvertDiv(const Operation& operation, const Model& model, Conve bool HalPolicy::ConvertSub(const Operation& operation, const Model& model, ConversionData& data) { ALOGV("hal_1_1::HalPolicy::ConvertSub()"); - - LayerInputHandle input0 = ConvertToLayerInputHandle(operation, 0, model, data); - LayerInputHandle input1 = ConvertToLayerInputHandle(operation, 1, model, data); - - if (!input0.IsValid() || !input1.IsValid()) - { - return Fail("%s: Operation has invalid inputs", __func__); - } - - // The FuseActivation parameter is always the input index 2 - // and it should be optional - ActivationFn activationFunction; - if (!GetOptionalInputActivation(operation, 2, activationFunction, model, data)) - { - return Fail("%s: Operation has invalid inputs", __func__); - } - - const Operand* output = GetOutputOperand(operation, 0, model); - if (!output) - { - return Fail("%s: Could not read output 0", __func__); - } - - const armnn::TensorInfo& outputInfo = GetTensorInfoForOperand(*output); - if (IsDynamicTensor(outputInfo)) - { - return Fail("%s: Dynamic output tensors are not supported", __func__); - } - - bool isSupported = false; - FORWARD_LAYER_SUPPORT_FUNC(__func__, - IsSubtractionSupported, - data.m_Backends, - isSupported, - input0.GetTensorInfo(), - input1.GetTensorInfo(), - outputInfo); - if (!isSupported) - { - return false; - } - - armnn::IConnectableLayer* const startLayer = data.m_Network->AddSubtractionLayer(); - armnn::IConnectableLayer* const endLayer = ProcessActivation(outputInfo, activationFunction, startLayer, data); - - const armnn::TensorInfo& inputTensorInfo0 = input0.GetTensorInfo(); - const armnn::TensorInfo& inputTensorInfo1 = input1.GetTensorInfo(); - - if (endLayer) - { - BroadcastTensor(input0, input1, startLayer, *data.m_Network); - return SetupAndTrackLayerOutputSlot(operation, 0, *endLayer, model, data); - } - - return Fail("%s: ProcessActivation failed", __func__); + return ::ConvertSub(operation, model, data); } bool HalPolicy::ConvertMean(const Operation& operation, const Model& model, ConversionData& data) -- cgit v1.2.1