aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDavid Monahan <david.monahan@arm.com>2020-02-25 09:03:29 +0000
committerDavid Monahan <david.monahan@arm.com>2020-02-28 09:47:17 +0000
commit3b3c381963a1bfe12e083928a3abb5a9852b199b (patch)
tree7f6b2d7416240490172e01070953d949d32b4407
parentdd3f71b64072c44cec65a7a883d0c3a29659645c (diff)
downloadarmnn-3b3c381963a1bfe12e083928a3abb5a9852b199b.tar.gz
IVGCVSW-4439: Adding Elu support to Activation
* Added CpuRef implementation * Added Unit Tests * Added Quantizer Test * Enabled Tests for Neon and CL backends on fp32 only * Added to Serializer Signed-off-by: David Monahan <david.monahan@arm.com> Change-Id: Ic23e1797dbc9352b40678c389d7fe2b836b582ea
-rw-r--r--include/armnn/Types.hpp3
-rw-r--r--include/armnn/TypesUtils.hpp1
-rw-r--r--src/armnn/test/QuantizerTest.cpp56
-rw-r--r--src/armnnDeserializer/Deserializer.cpp2
-rw-r--r--src/armnnSerializer/ArmnnSchema.fbs3
-rw-r--r--src/armnnSerializer/Serializer.cpp2
-rw-r--r--src/backends/aclCommon/ArmComputeUtils.hpp1
-rw-r--r--src/backends/backendsCommon/test/layerTests/ActivationTestImpl.cpp58
-rw-r--r--src/backends/backendsCommon/test/layerTests/ActivationTestImpl.hpp16
-rw-r--r--src/backends/cl/test/ClLayerTests.cpp3
-rw-r--r--src/backends/neon/test/NeonLayerTests.cpp3
-rw-r--r--src/backends/reference/RefLayerSupport.cpp1
-rw-r--r--src/backends/reference/test/RefLayerTests.cpp4
-rw-r--r--src/backends/reference/workloads/Activation.cpp5
14 files changed, 156 insertions, 2 deletions
diff --git a/include/armnn/Types.hpp b/include/armnn/Types.hpp
index af331a227f..b00a8bf986 100644
--- a/include/armnn/Types.hpp
+++ b/include/armnn/Types.hpp
@@ -62,7 +62,8 @@ enum class ActivationFunction
LeakyReLu = 6,
Abs = 7,
Sqrt = 8,
- Square = 9
+ Square = 9,
+ Elu = 10
};
enum class ArgMinMaxFunction
diff --git a/include/armnn/TypesUtils.hpp b/include/armnn/TypesUtils.hpp
index bf54c15ef8..eca8a5e233 100644
--- a/include/armnn/TypesUtils.hpp
+++ b/include/armnn/TypesUtils.hpp
@@ -38,6 +38,7 @@ constexpr char const* GetActivationFunctionAsCString(ActivationFunction activati
case ActivationFunction::Abs: return "Abs";
case ActivationFunction::Sqrt: return "Sqrt";
case ActivationFunction::Square: return "Square";
+ case ActivationFunction::Elu: return "Elu";
default: return "Unknown";
}
}
diff --git a/src/armnn/test/QuantizerTest.cpp b/src/armnn/test/QuantizerTest.cpp
index faadf24892..6d5d212fc9 100644
--- a/src/armnn/test/QuantizerTest.cpp
+++ b/src/armnn/test/QuantizerTest.cpp
@@ -707,6 +707,62 @@ BOOST_AUTO_TEST_CASE(QuantizeLeakyReLuActivation)
VisitLayersTopologically(quantizedNetworkQSymmS16.get(), validatorQSymmS16);
}
+
+BOOST_AUTO_TEST_CASE(QuantizeELuActivation)
+{
+ class TestEluActivationQuantization : public TestQuantization
+ {
+ public:
+ TestEluActivationQuantization(const TensorShape& inputShape, const TensorShape& outputShape)
+ : TestQuantization(inputShape, outputShape) {}
+
+ TestEluActivationQuantization(const QuantizerOptions& options,
+ const TensorShape& inputShape,
+ const TensorShape& outputShape)
+ : TestQuantization(options, inputShape, outputShape) {}
+
+ void VisitActivationLayer(const IConnectableLayer* layer,
+ const ActivationDescriptor& descriptor,
+ const char* name = nullptr) override
+ {
+ boost::ignore_unused(descriptor, name);
+ TensorInfo info = layer->GetOutputSlot(0).GetTensorInfo();
+
+ // Based off default static range [-15.0f, 15.0f]
+ TestQuantizationParams(
+ info, {30.0f / g_AsymmU8QuantizationBase, 128},
+ {30.0f / g_AsymmS8QuantizationBase, 0},
+ {15.0f / g_SymmS8QuantizationBase, 0},
+ {15.0f / g_SymmS16QuantizationBase, 0});
+ }
+ };
+
+ ActivationDescriptor descriptor;
+ descriptor.m_Function = ActivationFunction::Elu;
+
+ const TensorShape shape{1U};
+ INetworkPtr network = CreateNetworkWithActivationLayer(descriptor, shape);
+
+ INetworkPtr quantizedNetworkQAsymmU8 = INetworkQuantizer::Create(network.get())->ExportNetwork();
+ TestEluActivationQuantization validatorQAsymmU8(shape, shape);
+ VisitLayersTopologically(quantizedNetworkQAsymmU8.get(), validatorQAsymmU8);
+
+ const QuantizerOptions qAsymmS8Options(DataType::QAsymmS8);
+ INetworkPtr quantizedNetworkQAsymmS8 = INetworkQuantizer::Create(network.get(), qAsymmS8Options)->ExportNetwork();
+ TestEluActivationQuantization validatorQAsymmS8(qAsymmS8Options, shape, shape);
+ VisitLayersTopologically(quantizedNetworkQAsymmS8.get(), validatorQAsymmS8);
+
+ const QuantizerOptions qSymmS8Options(DataType::QSymmS8);
+ INetworkPtr quantizedNetworkQSymmS8 = INetworkQuantizer::Create(network.get(), qSymmS8Options)->ExportNetwork();
+ TestEluActivationQuantization validatorQSymmS8(qSymmS8Options, shape, shape);
+ VisitLayersTopologically(quantizedNetworkQSymmS8.get(), validatorQSymmS8);
+
+ const QuantizerOptions qSymmS16options(DataType::QSymmS16);
+ INetworkPtr quantizedNetworkQSymmS16 = INetworkQuantizer::Create(network.get(), qSymmS16options)->ExportNetwork();
+ TestEluActivationQuantization validatorQSymmS16(qSymmS16options, shape, shape);
+ VisitLayersTopologically(quantizedNetworkQSymmS16.get(), validatorQSymmS16);
+}
+
BOOST_AUTO_TEST_CASE(QuantizeBatchNorm)
{
class TestBatchNormalizationQuantization : public TestQuantization
diff --git a/src/armnnDeserializer/Deserializer.cpp b/src/armnnDeserializer/Deserializer.cpp
index 3bbd71a972..61a38f9cf3 100644
--- a/src/armnnDeserializer/Deserializer.cpp
+++ b/src/armnnDeserializer/Deserializer.cpp
@@ -421,6 +421,8 @@ armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFuncti
return armnn::ActivationFunction::Sqrt;
case armnnSerializer::ActivationFunction_Square:
return armnn::ActivationFunction::Square;
+ case armnnSerializer::ActivationFunction_Elu:
+ return armnn::ActivationFunction::Elu;
default:
return armnn::ActivationFunction::Sigmoid;
}
diff --git a/src/armnnSerializer/ArmnnSchema.fbs b/src/armnnSerializer/ArmnnSchema.fbs
index be6616d1e9..0697517a0f 100644
--- a/src/armnnSerializer/ArmnnSchema.fbs
+++ b/src/armnnSerializer/ArmnnSchema.fbs
@@ -19,7 +19,8 @@ enum ActivationFunction : byte {
LeakyReLu = 6,
Abs = 7,
Sqrt = 8,
- Square = 9
+ Square = 9,
+ Elu = 10
}
enum ArgMinMaxFunction : byte {
diff --git a/src/armnnSerializer/Serializer.cpp b/src/armnnSerializer/Serializer.cpp
index b43f26c652..3c01842c95 100644
--- a/src/armnnSerializer/Serializer.cpp
+++ b/src/armnnSerializer/Serializer.cpp
@@ -46,6 +46,8 @@ serializer::ActivationFunction GetFlatBufferActivationFunction(armnn::Activation
return serializer::ActivationFunction::ActivationFunction_Sqrt;
case armnn::ActivationFunction::Square:
return serializer::ActivationFunction::ActivationFunction_Square;
+ case armnn::ActivationFunction::Elu:
+ return serializer::ActivationFunction::ActivationFunction_Elu;
default:
return serializer::ActivationFunction::ActivationFunction_Sigmoid;
}
diff --git a/src/backends/aclCommon/ArmComputeUtils.hpp b/src/backends/aclCommon/ArmComputeUtils.hpp
index 4d690901c6..01a5445844 100644
--- a/src/backends/aclCommon/ArmComputeUtils.hpp
+++ b/src/backends/aclCommon/ArmComputeUtils.hpp
@@ -65,6 +65,7 @@ ConvertActivationFunctionToAclActivationFunction(ActivationFunction armnnFunctio
case ActivationFunction::Sqrt: return AclActivationFunction::SQRT;
case ActivationFunction::Square: return AclActivationFunction::SQUARE;
case ActivationFunction::TanH: return AclActivationFunction::TANH;
+ case ActivationFunction::Elu: return AclActivationFunction::ELU;
default: throw InvalidArgumentException("Unsupported activation function");
}
}
diff --git a/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.cpp
index 3f5e6c39e3..1b6e782060 100644
--- a/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.cpp
@@ -1016,6 +1016,64 @@ LayerTestResult<int16_t, 4> TanhInt16Test(
}
+template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
+LayerTestResult<T, 4> EluTestCommon(
+ armnn::IWorkloadFactory& workloadFactory,
+ const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
+ float qScale,
+ int32_t qOffset)
+{
+ std::vector<float> inputData = {
+ -0.1f, -0.2f, -0.3f, -0.4f,
+ 0.1f, 0.2f, 0.3f, 0.4f,
+ -1.0f, -2.0f, -3.0f, -4.0f,
+ 1.0f, 2.0f, 3.0f, 4.0f
+ };
+
+
+ const float a = 0.01f;
+ // Calculate output values for input.
+ auto f = [a](float value)
+ {
+ return (value >= 0) ? value : a * (expf(value) - 1);
+ };
+ std::vector<float> outputExpectedData(inputData.size());
+ std::transform(inputData.begin(), inputData.end(), outputExpectedData.begin(), f);
+
+ return SimpleActivationTest<ArmnnType>(workloadFactory,
+ memoryManager,
+ armnn::ActivationFunction::Elu,
+ a,
+ 0.0f,
+ qScale,
+ qOffset,
+ inputData,
+ qScale,
+ qOffset,
+ outputExpectedData);
+}
+
+LayerTestResult<float, 4> EluTest(
+ armnn::IWorkloadFactory& workloadFactory,
+ const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
+{
+ return EluTestCommon<armnn::DataType::Float32>(workloadFactory, memoryManager, 0.1f, 0);
+}
+
+LayerTestResult<uint8_t, 4> EluUint8Test(
+ armnn::IWorkloadFactory& workloadFactory,
+ const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
+{
+ return EluTestCommon<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager, 0.1f, 64);
+}
+
+LayerTestResult<int16_t, 4> EluInt16Test(
+ armnn::IWorkloadFactory& workloadFactory,
+ const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
+{
+ return EluTestCommon<armnn::DataType::QSymmS16>(workloadFactory, memoryManager, 0.1f, 0);
+}
+
template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
LayerTestResult<T,4> CompareActivationTestImpl(
diff --git a/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.hpp b/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.hpp
index 2251ceb8dd..28301188d5 100644
--- a/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.hpp
+++ b/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.hpp
@@ -149,6 +149,22 @@ LayerTestResult<int16_t, 4> LeakyReLuInt16Test(
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
//
+// Elu
+//
+
+LayerTestResult<float, 4> EluTest(
+ armnn::IWorkloadFactory& workloadFactory,
+ const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
+
+LayerTestResult<uint8_t, 4> EluUint8Test(
+ armnn::IWorkloadFactory& workloadFactory,
+ const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
+
+LayerTestResult<int16_t, 4> EluInt16Test(
+ armnn::IWorkloadFactory& workloadFactory,
+ const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
+
+//
// Abs
//
diff --git a/src/backends/cl/test/ClLayerTests.cpp b/src/backends/cl/test/ClLayerTests.cpp
index 294c77bf7f..cfec81a4c3 100644
--- a/src/backends/cl/test/ClLayerTests.cpp
+++ b/src/backends/cl/test/ClLayerTests.cpp
@@ -39,6 +39,9 @@ ARMNN_AUTO_TEST_CASE(ConstantLinearActivation, ConstantLinearActivationTest)
ARMNN_AUTO_TEST_CASE(ReLu1Uint8, BoundedReLuUint8UpperAndLowerBoundTest)
ARMNN_AUTO_TEST_CASE(ReLu6Uint8, BoundedReLuUint8UpperBoundOnlyTest)
+// Elu Activation
+ARMNN_AUTO_TEST_CASE(Elu, EluTest)
+
ARMNN_AUTO_TEST_CASE(Logistic, SimpleSigmoidTest)
ARMNN_AUTO_TEST_CASE(LogisticUint8, SimpleSigmoidUint8Test)
diff --git a/src/backends/neon/test/NeonLayerTests.cpp b/src/backends/neon/test/NeonLayerTests.cpp
index 1a9d7a9c50..18658a34a1 100644
--- a/src/backends/neon/test/NeonLayerTests.cpp
+++ b/src/backends/neon/test/NeonLayerTests.cpp
@@ -418,6 +418,9 @@ ARMNN_AUTO_TEST_CASE(ConstantLinearActivation, ConstantLinearActivationTest)
ARMNN_AUTO_TEST_CASE(ReLu1Uint8, BoundedReLuUint8UpperAndLowerBoundTest)
ARMNN_AUTO_TEST_CASE(ReLu6Uint8, BoundedReLuUint8UpperBoundOnlyTest)
+// Elu Activation
+ARMNN_AUTO_TEST_CASE(Elu, EluTest)
+
// Sigmoid
ARMNN_AUTO_TEST_CASE(SimpleSigmoid, SimpleSigmoidTest)
ARMNN_AUTO_TEST_CASE(SimpleSigmoidUint8, SimpleSigmoidUint8Test)
diff --git a/src/backends/reference/RefLayerSupport.cpp b/src/backends/reference/RefLayerSupport.cpp
index 5cb36c4299..8f1f170c5c 100644
--- a/src/backends/reference/RefLayerSupport.cpp
+++ b/src/backends/reference/RefLayerSupport.cpp
@@ -108,6 +108,7 @@ bool RefLayerSupport::IsActivationSupported(const TensorInfo& input,
{
case ActivationFunction::Abs:
case ActivationFunction::BoundedReLu:
+ case ActivationFunction::Elu:
case ActivationFunction::LeakyReLu:
case ActivationFunction::Linear:
case ActivationFunction::ReLu:
diff --git a/src/backends/reference/test/RefLayerTests.cpp b/src/backends/reference/test/RefLayerTests.cpp
index 99468e0006..d5c67ef6c7 100644
--- a/src/backends/reference/test/RefLayerTests.cpp
+++ b/src/backends/reference/test/RefLayerTests.cpp
@@ -462,6 +462,10 @@ ARMNN_AUTO_TEST_CASE(Tanh, TanhTest)
ARMNN_AUTO_TEST_CASE(TanhUint8, TanhUint8Test)
ARMNN_AUTO_TEST_CASE(TanhInt16, TanhInt16Test)
+// Elu Activation
+ARMNN_AUTO_TEST_CASE(Elu, EluTest)
+ARMNN_AUTO_TEST_CASE(EluUint8, EluUint8Test)
+ARMNN_AUTO_TEST_CASE(EluInt16, EluInt16Test)
// Fully Connected
ARMNN_AUTO_TEST_CASE(SimpleFullyConnected, FullyConnectedFloat32Test, false, false)
diff --git a/src/backends/reference/workloads/Activation.cpp b/src/backends/reference/workloads/Activation.cpp
index 814a0ddd13..82dd919de9 100644
--- a/src/backends/reference/workloads/Activation.cpp
+++ b/src/backends/reference/workloads/Activation.cpp
@@ -69,6 +69,11 @@ float Activation(float in,
output = a * tanhf(b * in);
break;
}
+ case ActivationFunction::Elu:
+ {
+ output = (in >= 0) ? in : a * (expf(in) - 1);
+ break;
+ }
default:
{
throw InvalidArgumentException("Unsupported activation function");