aboutsummaryrefslogtreecommitdiff
path: root/src/backends
diff options
context:
space:
mode:
authorColm Donelan <Colm.Donelan@arm.com>2020-02-26 15:39:23 +0000
committerColm Donelan <colm.donelan@arm.com>2020-03-02 21:31:49 +0000
commit03fbeaf532f2575381edc2336f834973117f6e0f (patch)
tree97585ec459e4069853c16468ca82513f0d899200 /src/backends
parentc9ea45adefdde2890e9aa191a5b31563a3dd35ea (diff)
downloadarmnn-03fbeaf532f2575381edc2336f834973117f6e0f.tar.gz
IVGCVSW-4440 : Add HARD_SWISH Support to Activation in CpuRef
* Add a new Activiation type of HardSwish. * Add CpuRef support and tests. Signed-off-by: Colm Donelan <Colm.Donelan@arm.com> Change-Id: I68c3840aa45b7a27d5e416a5d50fe8f99f003ce8
Diffstat (limited to 'src/backends')
-rw-r--r--src/backends/backendsCommon/test/layerTests/ActivationTestImpl.cpp63
-rw-r--r--src/backends/backendsCommon/test/layerTests/ActivationTestImpl.hpp16
-rw-r--r--src/backends/reference/RefLayerSupport.cpp1
-rw-r--r--src/backends/reference/test/RefLayerTests.cpp4
-rw-r--r--src/backends/reference/workloads/Activation.cpp8
5 files changed, 92 insertions, 0 deletions
diff --git a/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.cpp
index 1b6e782060..6993b9e9b1 100644
--- a/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.cpp
@@ -1076,6 +1076,69 @@ LayerTestResult<int16_t, 4> EluInt16Test(
template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
+LayerTestResult<T, 4> HardSwishTestCommon(
+ armnn::IWorkloadFactory& workloadFactory,
+ const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
+ float qScale,
+ int32_t qOffset)
+{
+ std::vector<float> inputData = {
+ -0.1f, -0.2f, -0.3f, -0.4f,
+ 0.1f, 0.2f, 0.3f, 0.4f,
+ -1.0f, -2.0f, -3.0f, -4.0f,
+ 1.0f, 2.0f, 3.0f, 4.0f
+ };
+ // Calculate output values for input.
+ auto f = [](float x)
+ {
+ // Break down the calculation to help with verification.
+ // hard_swish(x) = x * relu6(x+3) / 6
+ // relu6(x) = min(max(x,0),6)
+ float reLu6_step1 = std::max((x + 3),0.0f);
+ float reLu6Complete = std::min(reLu6_step1, 6.0f);
+ float hardSwish_step1 = x * reLu6Complete;
+ float result = hardSwish_step1 / 6;
+ return result;
+ };
+ std::vector<float> outputExpectedData(inputData.size());
+ std::transform(inputData.begin(), inputData.end(), outputExpectedData.begin(), f);
+
+ return SimpleActivationTest<ArmnnType>(workloadFactory,
+ memoryManager,
+ armnn::ActivationFunction::HardSwish,
+ 0.f,
+ 0.f,
+ qScale,
+ qOffset,
+ inputData,
+ qScale,
+ qOffset,
+ outputExpectedData);
+}
+
+LayerTestResult<float, 4> HardSwishTest(
+ armnn::IWorkloadFactory& workloadFactory,
+ const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
+{
+ return HardSwishTestCommon<armnn::DataType::Float32>(workloadFactory, memoryManager, 0.1f, 0);
+}
+
+LayerTestResult<uint8_t, 4> HardSwishUint8Test(
+ armnn::IWorkloadFactory& workloadFactory,
+ const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
+{
+ return HardSwishTestCommon<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager, 0.1f, 64);
+}
+
+LayerTestResult<int16_t, 4> HardSwishInt16Test(
+ armnn::IWorkloadFactory& workloadFactory,
+ const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
+{
+ return HardSwishTestCommon<armnn::DataType::QSymmS16>(workloadFactory, memoryManager, 0.1f, 0);
+}
+
+
+template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
LayerTestResult<T,4> CompareActivationTestImpl(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
diff --git a/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.hpp b/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.hpp
index 28301188d5..2bd517180f 100644
--- a/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.hpp
+++ b/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.hpp
@@ -217,6 +217,22 @@ LayerTestResult<int16_t, 4> SquareInt16Test(
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
//
+// HardSwish
+//
+
+LayerTestResult<float, 4> HardSwishTest(
+ armnn::IWorkloadFactory& workloadFactory,
+ const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
+
+LayerTestResult<uint8_t, 4> HardSwishUint8Test(
+ armnn::IWorkloadFactory& workloadFactory,
+ const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
+
+LayerTestResult<int16_t, 4> HardSwishInt16Test(
+ armnn::IWorkloadFactory& workloadFactory,
+ const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
+
+//
// Other
//
diff --git a/src/backends/reference/RefLayerSupport.cpp b/src/backends/reference/RefLayerSupport.cpp
index 25334c3b52..7d5c3b509e 100644
--- a/src/backends/reference/RefLayerSupport.cpp
+++ b/src/backends/reference/RefLayerSupport.cpp
@@ -109,6 +109,7 @@ bool RefLayerSupport::IsActivationSupported(const TensorInfo& input,
case ActivationFunction::Abs:
case ActivationFunction::BoundedReLu:
case ActivationFunction::Elu:
+ case ActivationFunction::HardSwish:
case ActivationFunction::LeakyReLu:
case ActivationFunction::Linear:
case ActivationFunction::ReLu:
diff --git a/src/backends/reference/test/RefLayerTests.cpp b/src/backends/reference/test/RefLayerTests.cpp
index ed2b995bd5..40bf600331 100644
--- a/src/backends/reference/test/RefLayerTests.cpp
+++ b/src/backends/reference/test/RefLayerTests.cpp
@@ -466,6 +466,10 @@ ARMNN_AUTO_TEST_CASE(TanhInt16, TanhInt16Test)
ARMNN_AUTO_TEST_CASE(Elu, EluTest)
ARMNN_AUTO_TEST_CASE(EluUint8, EluUint8Test)
ARMNN_AUTO_TEST_CASE(EluInt16, EluInt16Test)
+// HardSwish Activation
+ARMNN_AUTO_TEST_CASE(HardSwish, HardSwishTest)
+ARMNN_AUTO_TEST_CASE(HardSwishUint8, HardSwishUint8Test)
+ARMNN_AUTO_TEST_CASE(HardSwishInt16, HardSwishInt16Test)
// Fully Connected
ARMNN_AUTO_TEST_CASE(SimpleFullyConnected, FullyConnectedFloat32Test, false, false)
diff --git a/src/backends/reference/workloads/Activation.cpp b/src/backends/reference/workloads/Activation.cpp
index 82dd919de9..798c6e48d5 100644
--- a/src/backends/reference/workloads/Activation.cpp
+++ b/src/backends/reference/workloads/Activation.cpp
@@ -9,6 +9,7 @@
namespace armnn
{
+
float Activation(float in,
ActivationFunction function,
float a,
@@ -74,6 +75,13 @@ float Activation(float in,
output = (in >= 0) ? in : a * (expf(in) - 1);
break;
}
+ case ActivationFunction::HardSwish:
+ {
+ // hard_swish(x) = x * relu6(x+3) / 6
+ // relu6(x) = min(max(x,0),6)
+ output = in * (std::min(std::max((in + 3),0.0f),6.0f)) / 6;
+ break;
+ }
default:
{
throw InvalidArgumentException("Unsupported activation function");