From 6111316eb609bd71589b963cf6fc56b18ba3d241 Mon Sep 17 00:00:00 2001 From: Sadik Armagan Date: Thu, 25 Jul 2019 09:09:40 +0100 Subject: IVGCVSW-3530 Fix DynamicOutput Tests for Android Q NeuralNetworks 1.0 & 1.1 * Fixed for failing Conv2d, DepthwiseConv2d, and Activation tests on Hal 1.0 and 1.1 in Q Signed-off-by: Sadik Armagan Signed-off-by: Aron Virginas-Tar Change-Id: I435338b90b6c501320083f2fd9372e3a4ac3c32c --- 1.2/HalPolicy.cpp | 36 ++++++++++++++++++++++++++++++++---- 1 file changed, 32 insertions(+), 4 deletions(-) (limited to '1.2/HalPolicy.cpp') diff --git a/1.2/HalPolicy.cpp b/1.2/HalPolicy.cpp index 2d6d7970..5f327c20 100644 --- a/1.2/HalPolicy.cpp +++ b/1.2/HalPolicy.cpp @@ -42,14 +42,10 @@ bool HandledByV1_0(V1_2::OperationType operationType) case V1_0::OperationType::LSTM: case V1_0::OperationType::MAX_POOL_2D: case V1_0::OperationType::MUL: - case V1_0::OperationType::RELU: - case V1_0::OperationType::RELU1: - case V1_0::OperationType::RELU6: case V1_0::OperationType::RESHAPE: case V1_0::OperationType::RNN: case V1_0::OperationType::SPACE_TO_DEPTH: case V1_0::OperationType::SVDF: - case V1_0::OperationType::TANH: case V1_0::OperationType::OEM_OPERATION: return true; default: @@ -151,12 +147,20 @@ bool HalPolicy::ConvertOperation(const Operation& operation, const Model& model, return ConvertPadV2(operation, model, data); case V1_2::OperationType::PRELU: return ConvertPrelu(operation, model, data); + case V1_2::OperationType::RELU: + return ConvertReLu(operation, model, data); + case V1_2::OperationType::RELU1: + return ConvertReLu1(operation, model, data); + case V1_2::OperationType::RELU6: + return ConvertReLu6(operation, model, data); case V1_2::OperationType::RESIZE_BILINEAR: return ConvertResize(operation, model, data, armnn::ResizeMethod::Bilinear); case V1_2::OperationType::RESIZE_NEAREST_NEIGHBOR: return ConvertResize(operation, model, data, armnn::ResizeMethod::NearestNeighbor); case V1_2::OperationType::SOFTMAX: return ConvertSoftmax(operation, model, data); + case V1_2::OperationType::TANH: + return ConvertTanH(operation, model, data); default: return Fail("%s: Operation type %s not supported in ArmnnDriver", __func__, toString(operation.type).c_str()); @@ -779,6 +783,24 @@ bool HalPolicy::ConvertPrelu(const Operation& operation, const Model& model, Con armnn::Optional(outputInfo)); } +bool HalPolicy::ConvertReLu(const Operation& operation, const Model& model, ConversionData& data) +{ + ALOGV("hal_1_2::HalPolicy::ConvertReLu()"); + return ::ConvertReLu(operation, model, data); +} + +bool HalPolicy::ConvertReLu1(const Operation& operation, const Model& model, ConversionData& data) +{ + ALOGV("hal_1_2::HalPolicy::ConvertReLu1()"); + return ::ConvertReLu1(operation, model, data); +} + +bool HalPolicy::ConvertReLu6(const Operation& operation, const Model& model, ConversionData& data) +{ + ALOGV("hal_1_2::HalPolicy::ConvertReLu6()"); + return ::ConvertReLu6(operation, model, data); +} + bool HalPolicy::ConvertResize(const Operation& operation, const Model& model, ConversionData& data, @@ -1030,5 +1052,11 @@ bool HalPolicy::ConvertSoftmax(const Operation& operation, const Model& model, C armnn::Optional(outputInfo)); } +bool HalPolicy::ConvertTanH(const Operation& operation, const Model& model, ConversionData& data) +{ + ALOGV("hal_1_2::HalPolicy::ConvertTanH()"); + return ::ConvertTanH(operation, model, data); +} + } // namespace hal_1_2 } // namespace armnn_driver -- cgit v1.2.1