aboutsummaryrefslogtreecommitdiff
path: root/src/armnn/backends/RefWorkloads/Softmax.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/armnn/backends/RefWorkloads/Softmax.cpp')
-rw-r--r--src/armnn/backends/RefWorkloads/Softmax.cpp49
1 files changed, 0 insertions, 49 deletions
diff --git a/src/armnn/backends/RefWorkloads/Softmax.cpp b/src/armnn/backends/RefWorkloads/Softmax.cpp
deleted file mode 100644
index 4f1016e86c..0000000000
--- a/src/armnn/backends/RefWorkloads/Softmax.cpp
+++ /dev/null
@@ -1,49 +0,0 @@
-//
-// Copyright © 2017 Arm Ltd. All rights reserved.
-// SPDX-License-Identifier: MIT
-//
-
-#include "Softmax.hpp"
-
-#include <cmath>
-#include <vector>
-
-namespace armnn
-{
-
-/// Computes the softmax function on some inputs, into outputs, with a shape given by tensorInfo.
-void Softmax(const float* in, float* out, const TensorInfo& tensorInfo, float beta)
-{
- unsigned int numChannels = tensorInfo.GetShape()[1];
- for (unsigned int n = 0; n < tensorInfo.GetShape()[0]; n++)
- {
- // Find maximum channel.
- float max = in[n * numChannels];
- for (unsigned int c = 1; c < numChannels; c++)
- {
- float val = in[n * numChannels + c];
- if (val > max)
- {
- max = val;
- }
- }
-
- // Exponentiate all values and sum.
- std::vector<float> exponentials(numChannels);
- float sum = 0.0f;
- for (unsigned int c = 0; c < numChannels; c++)
- {
- float val = in[n * numChannels + c];
- exponentials[c] = expf((val - max) * beta);
- sum += exponentials[c];
- }
-
- // Divide exponentials by sum to give outputs.
- for (unsigned int c = 0; c < numChannels; c++)
- {
- out[n * numChannels + c] = exponentials[c] / sum;
- }
- }
-}
-
-} //namespace armnn