aboutsummaryrefslogtreecommitdiff
path: root/ConversionUtils.hpp
diff options
context:
space:
mode:
authorMike Kelly <mike.kelly@arm.com>2019-07-29 16:56:31 +0100
committermike.kelly <mike.kelly@arm.com>2019-07-30 08:10:43 +0000
commit0a87936e7261fc074797111d274c40fca17d9509 (patch)
tree87d17283cf0930608bc2f4077b98fff8a56f68ab /ConversionUtils.hpp
parenta5e2a458d6279560bbe24deafc12db286be2ca10 (diff)
downloadandroid-nn-driver-0a87936e7261fc074797111d274c40fca17d9509.tar.gz
IVGCVSW-3591 Fixed unexpectedly skipped SUB tests
* A model that has Inputs with different quantized scales is not compliant with 1.1 Signed-off-by: Mike Kelly <mike.kelly@arm.com> Change-Id: Ifb8277d78f05b5ef017effa879322a08c0efc851
Diffstat (limited to 'ConversionUtils.hpp')
-rw-r--r--ConversionUtils.hpp63
1 files changed, 63 insertions, 0 deletions
diff --git a/ConversionUtils.hpp b/ConversionUtils.hpp
index 52bfd5cc..fc6d3653 100644
--- a/ConversionUtils.hpp
+++ b/ConversionUtils.hpp
@@ -1738,4 +1738,67 @@ bool ConvertPad(HalOperation& operation, const HalModel& model, ConversionData&
return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *layer, model, data);
}
+template<typename HalPolicy,
+ typename Operation = typename HalPolicy::Operation,
+ typename Operand = typename HalPolicy::Operand,
+ typename Model = typename HalPolicy::Model>
+bool ConvertSub(const Operation& operation, const Model& model, ConversionData& data)
+{
+ LayerInputHandle input0 = ConvertToLayerInputHandle<HalPolicy>(operation, 0, model, data);
+ LayerInputHandle input1 = ConvertToLayerInputHandle<HalPolicy>(operation, 1, model, data);
+
+ if (!input0.IsValid() || !input1.IsValid())
+ {
+ return Fail("%s: Operation has invalid inputs", __func__);
+ }
+
+ // The FuseActivation parameter is always the input index 2
+ // and it should be optional
+ ActivationFn activationFunction;
+ if (!GetOptionalInputActivation<HalPolicy>(operation, 2, activationFunction, model, data))
+ {
+ return Fail("%s: Operation has invalid inputs", __func__);
+ }
+
+ const Operand* output = GetOutputOperand<HalPolicy>(operation, 0, model);
+ if (!output)
+ {
+ return Fail("%s: Could not read output 0", __func__);
+ }
+
+ const armnn::TensorInfo& outputInfo = GetTensorInfoForOperand(*output);
+ if (IsDynamicTensor(outputInfo))
+ {
+ return Fail("%s: Dynamic output tensors are not supported", __func__);
+ }
+
+ bool isSupported = false;
+ FORWARD_LAYER_SUPPORT_FUNC(__func__,
+ IsSubtractionSupported,
+ data.m_Backends,
+ isSupported,
+ input0.GetTensorInfo(),
+ input1.GetTensorInfo(),
+ outputInfo);
+ if (!isSupported)
+ {
+ return false;
+ }
+
+ armnn::IConnectableLayer* const startLayer = data.m_Network->AddSubtractionLayer();
+ armnn::IConnectableLayer* const endLayer = ProcessActivation(outputInfo, activationFunction, startLayer, data);
+
+ const armnn::TensorInfo& inputTensorInfo0 = input0.GetTensorInfo();
+ const armnn::TensorInfo& inputTensorInfo1 = input1.GetTensorInfo();
+
+ if (endLayer)
+ {
+ BroadcastTensor(input0, input1, startLayer, *data.m_Network);
+ return SetupAndTrackLayerOutputSlot<HalPolicy>(operation, 0, *endLayer, model, data);
+ }
+
+ return Fail("%s: ProcessActivation failed", __func__);
+}
+
+
} // namespace armnn_driver