diff options
Diffstat (limited to 'src/backends/reference')
-rw-r--r-- | src/backends/reference/RefLayerSupport.cpp | 1 | ||||
-rw-r--r-- | src/backends/reference/test/RefLayerTests.cpp | 4 | ||||
-rw-r--r-- | src/backends/reference/workloads/Activation.cpp | 8 |
3 files changed, 13 insertions, 0 deletions
diff --git a/src/backends/reference/RefLayerSupport.cpp b/src/backends/reference/RefLayerSupport.cpp index 25334c3b52..7d5c3b509e 100644 --- a/src/backends/reference/RefLayerSupport.cpp +++ b/src/backends/reference/RefLayerSupport.cpp @@ -109,6 +109,7 @@ bool RefLayerSupport::IsActivationSupported(const TensorInfo& input, case ActivationFunction::Abs: case ActivationFunction::BoundedReLu: case ActivationFunction::Elu: + case ActivationFunction::HardSwish: case ActivationFunction::LeakyReLu: case ActivationFunction::Linear: case ActivationFunction::ReLu: diff --git a/src/backends/reference/test/RefLayerTests.cpp b/src/backends/reference/test/RefLayerTests.cpp index ed2b995bd5..40bf600331 100644 --- a/src/backends/reference/test/RefLayerTests.cpp +++ b/src/backends/reference/test/RefLayerTests.cpp @@ -466,6 +466,10 @@ ARMNN_AUTO_TEST_CASE(TanhInt16, TanhInt16Test) ARMNN_AUTO_TEST_CASE(Elu, EluTest) ARMNN_AUTO_TEST_CASE(EluUint8, EluUint8Test) ARMNN_AUTO_TEST_CASE(EluInt16, EluInt16Test) +// HardSwish Activation +ARMNN_AUTO_TEST_CASE(HardSwish, HardSwishTest) +ARMNN_AUTO_TEST_CASE(HardSwishUint8, HardSwishUint8Test) +ARMNN_AUTO_TEST_CASE(HardSwishInt16, HardSwishInt16Test) // Fully Connected ARMNN_AUTO_TEST_CASE(SimpleFullyConnected, FullyConnectedFloat32Test, false, false) diff --git a/src/backends/reference/workloads/Activation.cpp b/src/backends/reference/workloads/Activation.cpp index 82dd919de9..798c6e48d5 100644 --- a/src/backends/reference/workloads/Activation.cpp +++ b/src/backends/reference/workloads/Activation.cpp @@ -9,6 +9,7 @@ namespace armnn { + float Activation(float in, ActivationFunction function, float a, @@ -74,6 +75,13 @@ float Activation(float in, output = (in >= 0) ? in : a * (expf(in) - 1); break; } + case ActivationFunction::HardSwish: + { + // hard_swish(x) = x * relu6(x+3) / 6 + // relu6(x) = min(max(x,0),6) + output = in * (std::min(std::max((in + 3),0.0f),6.0f)) / 6; + break; + } default: { throw InvalidArgumentException("Unsupported activation function"); |