aboutsummaryrefslogtreecommitdiff
path: root/arm_compute/core
diff options
context:
space:
mode:
Diffstat (limited to 'arm_compute/core')
-rw-r--r--arm_compute/core/Helpers.h2
-rw-r--r--arm_compute/core/Helpers.inl2
-rw-r--r--arm_compute/core/NEON/kernels/NESoftmaxLayerKernel.h6
3 files changed, 6 insertions, 4 deletions
diff --git a/arm_compute/core/Helpers.h b/arm_compute/core/Helpers.h
index 1be24e1841..fdbb46fc78 100644
--- a/arm_compute/core/Helpers.h
+++ b/arm_compute/core/Helpers.h
@@ -501,7 +501,7 @@ bool auto_init_if_empty(ITensorInfo &info,
*
* @return True if the tensor info has been initialized
*/
-bool auto_init_if_empty(ITensorInfo &info_sink, ITensorInfo &info_source);
+bool auto_init_if_empty(ITensorInfo &info_sink, const ITensorInfo &info_source);
/* Set the shape to the specified value if the current assignment is empty.
*
diff --git a/arm_compute/core/Helpers.inl b/arm_compute/core/Helpers.inl
index 1e565344b7..3672692814 100644
--- a/arm_compute/core/Helpers.inl
+++ b/arm_compute/core/Helpers.inl
@@ -217,7 +217,7 @@ inline bool auto_init_if_empty(ITensorInfo &info,
return false;
}
-inline bool auto_init_if_empty(ITensorInfo &info_sink, ITensorInfo &info_source)
+inline bool auto_init_if_empty(ITensorInfo &info_sink, const ITensorInfo &info_source)
{
if(info_sink.tensor_shape().total_size() == 0)
{
diff --git a/arm_compute/core/NEON/kernels/NESoftmaxLayerKernel.h b/arm_compute/core/NEON/kernels/NESoftmaxLayerKernel.h
index cce21569d9..c3e25181b6 100644
--- a/arm_compute/core/NEON/kernels/NESoftmaxLayerKernel.h
+++ b/arm_compute/core/NEON/kernels/NESoftmaxLayerKernel.h
@@ -78,14 +78,15 @@ public:
* @param[in] max Max values tensor. Data types supported: same as @p input.
* @param[out] output Destination tensor. Data types supported: same as @p input.
* @param[out] sum Sum of 1D logits tensor. Data types supported: same as @p input.
+ * @param[in] beta (Optional) A scaling factor for the exponent. QS8/QS16 only support a beta value of 1.
*/
- void configure(const ITensor *input, const ITensor *max, ITensor *output, ITensor *sum);
+ void configure(const ITensor *input, const ITensor *max, ITensor *output, ITensor *sum, float beta = 1.0f);
// Inherited methods overridden:
void run(const Window &window, const ThreadInfo &info) override;
private:
- using Logits1DShiftExpSumFunction = void(const ITensor *in, const ITensor *max, ITensor *out, ITensor *sum, const Window &window);
+ using Logits1DShiftExpSumFunction = void(const ITensor *in, const ITensor *max, ITensor *out, ITensor *sum, const Window &window, float beta);
private:
Logits1DShiftExpSumFunction *_func;
@@ -93,6 +94,7 @@ private:
const ITensor *_max;
ITensor *_output;
ITensor *_sum;
+ float _beta;
};
/** Interface for calculating the final step of the Softmax Layer where each logit value is multiplied by the inverse of the sum of the logits. */