aboutsummaryrefslogtreecommitdiff
path: root/src/armnn/backends/RefWorkloads/Softmax.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/armnn/backends/RefWorkloads/Softmax.cpp')
-rw-r--r--src/armnn/backends/RefWorkloads/Softmax.cpp8
1 files changed, 4 insertions, 4 deletions
diff --git a/src/armnn/backends/RefWorkloads/Softmax.cpp b/src/armnn/backends/RefWorkloads/Softmax.cpp
index 58840e3076..c9f0bc5e59 100644
--- a/src/armnn/backends/RefWorkloads/Softmax.cpp
+++ b/src/armnn/backends/RefWorkloads/Softmax.cpp
@@ -11,13 +11,13 @@
namespace armnn
{
-/// Computes the softmax function on some inputs, into outputs, with a shape given by tensorInfo
+/// Computes the softmax function on some inputs, into outputs, with a shape given by tensorInfo.
void Softmax(const float* in, float* out, const TensorInfo& tensorInfo, float beta)
{
unsigned int numChannels = tensorInfo.GetShape()[1];
for (unsigned int n = 0; n < tensorInfo.GetShape()[0]; n++)
{
- // find maximum channel
+ // Find maximum channel.
float max = in[n * numChannels];
for (unsigned int c = 1; c < numChannels; c++)
{
@@ -28,7 +28,7 @@ void Softmax(const float* in, float* out, const TensorInfo& tensorInfo, float be
}
}
- // exponentiate all values and sum
+ // Exponentiate all values and sum.
std::vector<float> exponentials(numChannels);
float sum = 0.0f;
for (unsigned int c = 0; c < numChannels; c++)
@@ -38,7 +38,7 @@ void Softmax(const float* in, float* out, const TensorInfo& tensorInfo, float be
sum += exponentials[c];
}
- // divide exponentials by sum to give outputs
+ // Divide exponentials by sum to give outputs.
for (unsigned int c = 0; c < numChannels; c++)
{
out[n * numChannels + c] = exponentials[c] / sum;