diff options
author | telsoa01 <telmo.soares@arm.com> | 2018-03-09 14:13:49 +0000 |
---|---|---|
committer | telsoa01 <telmo.soares@arm.com> | 2018-03-09 14:13:49 +0000 |
commit | 4fcda0101ec3d110c1d6d7bee5c83416b645528a (patch) | |
tree | c9a70aeb2887006160c1b3d265c27efadb7bdbae /src/armnn/backends/RefWorkloads/Activation.cpp | |
download | armnn-4fcda0101ec3d110c1d6d7bee5c83416b645528a.tar.gz |
Release 18.02
Change-Id: Id3c11dc5ee94ef664374a988fcc6901e9a232fa6
Diffstat (limited to 'src/armnn/backends/RefWorkloads/Activation.cpp')
-rw-r--r-- | src/armnn/backends/RefWorkloads/Activation.cpp | 91 |
1 files changed, 91 insertions, 0 deletions
diff --git a/src/armnn/backends/RefWorkloads/Activation.cpp b/src/armnn/backends/RefWorkloads/Activation.cpp new file mode 100644 index 0000000000..ede283cbf9 --- /dev/null +++ b/src/armnn/backends/RefWorkloads/Activation.cpp @@ -0,0 +1,91 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// See LICENSE file in the project root for full license information. +// + +#include "Activation.hpp" + +#include <boost/log/trivial.hpp> + +#include <cmath> + +namespace armnn +{ + +void Activation(const float* in, + float* out, + const TensorInfo& tensorInfo, + ActivationFunction function, + float a, + float b) +{ + for (size_t i = 0; i<tensorInfo.GetNumElements(); i++) + { + float input = in[i]; + float output; + + // compute the result of the activation function + switch (function) + { + case ActivationFunction::Linear: + { + output = a * input + b; + break; + } + case ActivationFunction::Sigmoid: + { + output = 1.f / (1.f + expf(-input)); + break; + } + case ActivationFunction::ReLu: + { + output = std::max(0.f, input); + break; + } + case ActivationFunction::BoundedReLu: + { + output = std::min(a, std::max(b, input)); + break; + } + case ActivationFunction::SoftReLu: + { + output = logf(1.0f + expf(input)); + break; + } + case ActivationFunction::LeakyReLu: + { + output = input > 0.0f ? input : (input * a); + break; + } + case ActivationFunction::Abs: + { + output = input < 0 ? -input : input; + break; + } + case ActivationFunction::Sqrt: + { + output = sqrtf(input); + break; + } + case ActivationFunction::Square: + { + output = input * input; + break; + } + case ActivationFunction::TanH: + { + output = a * tanhf(b * input); + break; + } + default: + { + BOOST_LOG_TRIVIAL(error) << "Unsupported activation function"; + return; + } + } + + out[i] = output; + } +} + +} //namespace armnn |