aboutsummaryrefslogtreecommitdiff
path: root/src/backends/reference/workloads/Softmax.cpp
blob: 6cb219a6cc88aafdd502f863d37bdc807d2f41b7 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
//
// Copyright © 2017 Arm Ltd. All rights reserved.
// SPDX-License-Identifier: MIT
//

#include "Softmax.hpp"

#include <cmath>
#include <vector>

namespace armnn
{

/// Computes the softmax function on some inputs, into outputs, with a shape given by tensorInfo.
void Softmax(Decoder<float>& in, Encoder<float>& out, const TensorInfo& inputTensorInfo, float beta)
{
    unsigned int numChannels = inputTensorInfo.GetShape()[1];

    for (unsigned int n = 0; n < inputTensorInfo.GetShape()[0]; n++)
    {
        // Find maximum channel.
        in[n * numChannels];
        float max = in.Get();
        for (unsigned int c = 1; c < numChannels; c++)
        {
            in[n * numChannels + c];
            float val = in.Get();
            if (val > max)
            {
                max = val;
            }
        }

        // Exponentiate all values and sum.
        std::vector<float> exponentials(numChannels);
        float              sum = 0.0f;
        for (unsigned int c = 0; c < numChannels; c++)
        {
            in[n * numChannels + c];
            float val = in.Get();
            exponentials[c] = expf((val - max) * beta);
            sum += exponentials[c];
        }

        // Divide exponentials by sum to give outputs.
        for (unsigned int c = 0; c < numChannels; c++)
        {
            out[n * numChannels + c];
            out.Set(exponentials[c] / sum);
        }
    }
}

} //namespace armnn