aboutsummaryrefslogtreecommitdiff
path: root/src/armnn/backends/RefWorkloads/Softmax.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/armnn/backends/RefWorkloads/Softmax.cpp')
-rw-r--r--src/armnn/backends/RefWorkloads/Softmax.cpp49
1 files changed, 49 insertions, 0 deletions
diff --git a/src/armnn/backends/RefWorkloads/Softmax.cpp b/src/armnn/backends/RefWorkloads/Softmax.cpp
new file mode 100644
index 0000000000..58840e3076
--- /dev/null
+++ b/src/armnn/backends/RefWorkloads/Softmax.cpp
@@ -0,0 +1,49 @@
+//
+// Copyright © 2017 Arm Ltd. All rights reserved.
+// See LICENSE file in the project root for full license information.
+//
+
+#include "Softmax.hpp"
+
+#include <cmath>
+#include <vector>
+
+namespace armnn
+{
+
+/// Computes the softmax function on some inputs, into outputs, with a shape given by tensorInfo
+void Softmax(const float* in, float* out, const TensorInfo& tensorInfo, float beta)
+{
+ unsigned int numChannels = tensorInfo.GetShape()[1];
+ for (unsigned int n = 0; n < tensorInfo.GetShape()[0]; n++)
+ {
+ // find maximum channel
+ float max = in[n * numChannels];
+ for (unsigned int c = 1; c < numChannels; c++)
+ {
+ float val = in[n * numChannels + c];
+ if (val > max)
+ {
+ max = val;
+ }
+ }
+
+ // exponentiate all values and sum
+ std::vector<float> exponentials(numChannels);
+ float sum = 0.0f;
+ for (unsigned int c = 0; c < numChannels; c++)
+ {
+ float val = in[n * numChannels + c];
+ exponentials[c] = expf((val - max) * beta);
+ sum += exponentials[c];
+ }
+
+ // divide exponentials by sum to give outputs
+ for (unsigned int c = 0; c < numChannels; c++)
+ {
+ out[n * numChannels + c] = exponentials[c] / sum;
+ }
+ }
+}
+
+} //namespace armnn