diff options
author | Nattapat Chaimanowong <nattapat.chaimanowong@arm.com> | 2019-04-24 16:19:57 +0100 |
---|---|---|
committer | Matteo Martincigh <matteo.martincigh@arm.com> | 2019-04-29 11:04:20 +0000 |
commit | ae2c5f0350a7033f58578f9c509345445a639865 (patch) | |
tree | 4fd6266fc7d917130a0087938eafcadd1b7f31be /src/backends/reference/workloads/Activation.cpp | |
parent | bb73e576e75fde75acb28362fb7be194dfcbcada (diff) | |
download | armnn-ae2c5f0350a7033f58578f9c509345445a639865.tar.gz |
IVGCVSW-2982 Refactor reference Activation workload
Change-Id: Ia3b9a56787cc68822a3c1635de82e03ecc0aae27
Signed-off-by: Nattapat Chaimanowong <nattapat.chaimanowong@arm.com>
Diffstat (limited to 'src/backends/reference/workloads/Activation.cpp')
-rw-r--r-- | src/backends/reference/workloads/Activation.cpp | 150 |
1 files changed, 86 insertions, 64 deletions
diff --git a/src/backends/reference/workloads/Activation.cpp b/src/backends/reference/workloads/Activation.cpp index ef4903074b..760c9a0ccd 100644 --- a/src/backends/reference/workloads/Activation.cpp +++ b/src/backends/reference/workloads/Activation.cpp @@ -11,6 +11,91 @@ namespace armnn { +float Activation(float in, + ActivationFunction function, + float a, + float b) +{ + float output; + + // Compute the result of the activation function. + switch (function) + { + case ActivationFunction::Linear: + { + output = a * in + b; + break; + } + case ActivationFunction::Sigmoid: + { + output = 1.f / (1.f + expf(-in)); + break; + } + case ActivationFunction::ReLu: + { + output = std::max(0.f, in); + break; + } + case ActivationFunction::BoundedReLu: + { + output = std::min(a, std::max(b, in)); + break; + } + case ActivationFunction::SoftReLu: + { + output = logf(1.0f + expf(in)); + break; + } + case ActivationFunction::LeakyReLu: + { + output = in > 0.0f ? in : (in * a); + break; + } + case ActivationFunction::Abs: + { + output = in < 0 ? -in : in; + break; + } + case ActivationFunction::Sqrt: + { + output = sqrtf(in); + break; + } + case ActivationFunction::Square: + { + output = in * in; + break; + } + case ActivationFunction::TanH: + { + output = a * tanhf(b * in); + break; + } + default: + { + throw InvalidArgumentException("Unsupported activation function"); + } + } + + return output; +} + + +void Activation(Decoder<float>& in, + Encoder<float>& out, + const TensorInfo& tensorInfo, + ActivationFunction function, + float a, + float b) +{ + for (size_t i = 0; i<tensorInfo.GetNumElements(); i++) + { + out.Set(Activation(in.Get(), function, a, b)); + + ++in; + ++out; + } +} void Activation(const float* in, float* out, @@ -21,70 +106,7 @@ void Activation(const float* in, { for (size_t i = 0; i<tensorInfo.GetNumElements(); i++) { - float input = in[i]; - float output; - - // Compute the result of the activation function. - switch (function) - { - case ActivationFunction::Linear: - { - output = a * input + b; - break; - } - case ActivationFunction::Sigmoid: - { - output = 1.f / (1.f + expf(-input)); - break; - } - case ActivationFunction::ReLu: - { - output = std::max(0.f, input); - break; - } - case ActivationFunction::BoundedReLu: - { - output = std::min(a, std::max(b, input)); - break; - } - case ActivationFunction::SoftReLu: - { - output = logf(1.0f + expf(input)); - break; - } - case ActivationFunction::LeakyReLu: - { - output = input > 0.0f ? input : (input * a); - break; - } - case ActivationFunction::Abs: - { - output = input < 0 ? -input : input; - break; - } - case ActivationFunction::Sqrt: - { - output = sqrtf(input); - break; - } - case ActivationFunction::Square: - { - output = input * input; - break; - } - case ActivationFunction::TanH: - { - output = a * tanhf(b * input); - break; - } - default: - { - BOOST_LOG_TRIVIAL(error) << "Unsupported activation function"; - return; - } - } - - out[i] = output; + out[i] = Activation(in[i], function, a, b); } } |