diff options
Diffstat (limited to 'src/armnn/backends/RefWorkloads/Activation.cpp')
-rw-r--r-- | src/armnn/backends/RefWorkloads/Activation.cpp | 91 |
1 files changed, 0 insertions, 91 deletions
diff --git a/src/armnn/backends/RefWorkloads/Activation.cpp b/src/armnn/backends/RefWorkloads/Activation.cpp deleted file mode 100644 index ef4903074b..0000000000 --- a/src/armnn/backends/RefWorkloads/Activation.cpp +++ /dev/null @@ -1,91 +0,0 @@ -// -// Copyright © 2017 Arm Ltd. All rights reserved. -// SPDX-License-Identifier: MIT -// - -#include "Activation.hpp" - -#include <boost/log/trivial.hpp> - -#include <cmath> - -namespace armnn -{ - -void Activation(const float* in, - float* out, - const TensorInfo& tensorInfo, - ActivationFunction function, - float a, - float b) -{ - for (size_t i = 0; i<tensorInfo.GetNumElements(); i++) - { - float input = in[i]; - float output; - - // Compute the result of the activation function. - switch (function) - { - case ActivationFunction::Linear: - { - output = a * input + b; - break; - } - case ActivationFunction::Sigmoid: - { - output = 1.f / (1.f + expf(-input)); - break; - } - case ActivationFunction::ReLu: - { - output = std::max(0.f, input); - break; - } - case ActivationFunction::BoundedReLu: - { - output = std::min(a, std::max(b, input)); - break; - } - case ActivationFunction::SoftReLu: - { - output = logf(1.0f + expf(input)); - break; - } - case ActivationFunction::LeakyReLu: - { - output = input > 0.0f ? input : (input * a); - break; - } - case ActivationFunction::Abs: - { - output = input < 0 ? -input : input; - break; - } - case ActivationFunction::Sqrt: - { - output = sqrtf(input); - break; - } - case ActivationFunction::Square: - { - output = input * input; - break; - } - case ActivationFunction::TanH: - { - output = a * tanhf(b * input); - break; - } - default: - { - BOOST_LOG_TRIVIAL(error) << "Unsupported activation function"; - return; - } - } - - out[i] = output; - } -} - -} //namespace armnn |