aboutsummaryrefslogtreecommitdiff
path: root/ConversionUtils.hpp
diff options
context:
space:
mode:
authorSadik Armagan <sadik.armagan@arm.com>2019-07-25 09:09:40 +0100
committerAron Virginas-Tar <Aron.Virginas-Tar@arm.com>2019-07-25 11:31:48 +0100
commit6111316eb609bd71589b963cf6fc56b18ba3d241 (patch)
tree105d8a0aa72d18cfc246b37911d460f747c13220 /ConversionUtils.hpp
parentc921f6baf18ca05f14a41097b4e075e2d4fc7479 (diff)
downloadandroid-nn-driver-6111316eb609bd71589b963cf6fc56b18ba3d241.tar.gz
IVGCVSW-3530 Fix DynamicOutput Tests for Android Q NeuralNetworks 1.0 & 1.1
* Fixed for failing Conv2d, DepthwiseConv2d, and Activation tests on Hal 1.0 and 1.1 in Q Signed-off-by: Sadik Armagan <sadik.armagan@arm.com> Signed-off-by: Aron Virginas-Tar <aron.virginas-tar@arm.com> Change-Id: I435338b90b6c501320083f2fd9372e3a4ac3c32c
Diffstat (limited to 'ConversionUtils.hpp')
-rw-r--r--ConversionUtils.hpp95
1 files changed, 71 insertions, 24 deletions
diff --git a/ConversionUtils.hpp b/ConversionUtils.hpp
index f84dc108..790382d6 100644
--- a/ConversionUtils.hpp
+++ b/ConversionUtils.hpp
@@ -194,6 +194,11 @@ inline bool IsBool(V1_0::Operand)
return false;
}
+inline bool Is12Operand(V1_0::Operand)
+{
+ return false;
+}
+
#ifdef ARMNN_ANDROID_NN_V1_2
inline bool IsBool(V1_2::Operand operand)
@@ -201,6 +206,12 @@ inline bool IsBool(V1_2::Operand operand)
return operand.type == V1_2::OperandType::BOOL;
}
+/// Checks if a operand is 1_2 Operand
+inline bool Is12Operand(V1_2::Operand)
+{
+ return true;
+}
+
#endif
template<typename LayerHandleType>
@@ -1161,8 +1172,15 @@ bool ConvertToActivation(const HalOperation& operation,
armnn::TensorInfo outInfo = GetTensorInfoForOperand(*outputOperand);
if (IsDynamicTensor(outInfo))
{
- ALOGD("Output shape not set, will infer from input");
- outInfo.SetShape(input.GetTensorInfo().GetShape());
+ if (Is12Operand(*outputOperand))
+ {
+ ALOGD("Output shape not set, will infer from input");
+ outInfo.SetShape(input.GetTensorInfo().GetShape());
+ }
+ else
+ {
+ return Fail("%s: Dynamic OutputShapes are not supported in this HAL version", __func__);
+ }
}
bool isSupported = false;
@@ -1190,6 +1208,55 @@ bool ConvertToActivation(const HalOperation& operation,
}
template<typename HalPolicy,
+ typename HalOperation = typename HalPolicy::Operation,
+ typename HalModel = typename HalPolicy::Model>
+bool ConvertReLu(const HalOperation& operation, const HalModel& model, ConversionData& data)
+{
+ armnn::ActivationDescriptor desc;
+ desc.m_Function = armnn::ActivationFunction::ReLu;
+
+ return ConvertToActivation<HalPolicy>(operation, __func__, desc, model, data);
+}
+
+template<typename HalPolicy,
+ typename HalOperation = typename HalPolicy::Operation,
+ typename HalModel = typename HalPolicy::Model>
+bool ConvertReLu1(const HalOperation& operation, const HalModel& model, ConversionData& data)
+{
+ armnn::ActivationDescriptor desc;
+ desc.m_Function = armnn::ActivationFunction::BoundedReLu;
+ desc.m_A = 1.0f;
+ desc.m_B = -1.0f;
+
+ return ConvertToActivation<HalPolicy>(operation, __func__, desc, model, data);
+}
+
+template<typename HalPolicy,
+ typename HalOperation = typename HalPolicy::Operation,
+ typename HalModel = typename HalPolicy::Model>
+bool ConvertReLu6(const HalOperation& operation, const HalModel& model, ConversionData& data)
+{
+ armnn::ActivationDescriptor desc;
+ desc.m_Function = armnn::ActivationFunction::BoundedReLu;
+ desc.m_A = 6.0f;
+
+ return ConvertToActivation<HalPolicy>(operation, __func__, desc, model, data);
+}
+
+template<typename HalPolicy,
+ typename HalOperation = typename HalPolicy::Operation,
+ typename HalModel = typename HalPolicy::Model>
+bool ConvertTanH(const HalOperation& operation, const HalModel& model, ConversionData& data)
+{
+ armnn::ActivationDescriptor desc;
+ desc.m_Function = armnn::ActivationFunction::TanH;
+ desc.m_A = 1.0f; // android nn does not support tanH parameters
+ desc.m_B = 1.0f; // set to 1.0f for unity scaling
+
+ return ConvertToActivation<HalPolicy>(operation, __func__, desc, model, data);
+}
+
+template<typename HalPolicy,
typename HalOperation = typename HalPolicy::Operation,
typename HalModel = typename HalPolicy::Model>
bool ConvertPaddings(const HalOperation& operation,
@@ -1420,17 +1487,7 @@ bool ConvertConv2d(const HalOperation& operation, const HalModel& model, Convers
if (IsDynamicTensor(outputInfo))
{
- try
- {
- ALOGD("Output shape not set, will infer from inputs");
- outputInfo.SetShape(InferConvolution2dOutputShape(inputInfo.GetShape(),
- weights.GetInfo().GetShape(),
- desc));
- }
- catch (armnn::Exception& e)
- {
- return Fail("%s: Could not infer dynamic output shape: %s", __func__, e.what());
- }
+ return Fail("%s: Dynamic OutputShapes are not supported", __func__);
}
bool isSupported = false;
@@ -1600,17 +1657,7 @@ bool ConvertDepthwiseConv2d(const HalOperation& operation, const HalModel& model
if (IsDynamicTensor(outputInfo))
{
- try
- {
- ALOGD("Output shape not set, will infer from inputs");
- outputInfo.SetShape(InferDepthwiseConvolution2dOutputShape(inputInfo.GetShape(),
- weights.GetInfo().GetShape(),
- desc));
- }
- catch (armnn::Exception& e)
- {
- return Fail("%s: Could not infer dynamic output shape: %s", __func__, e.what());
- }
+ return Fail("%s: Dynamic OutputShapes are not supported", __func__);
}
bool isSupported = false;