From 388d3ec5289d5aa7415d6599137a74c4e5eaeeaf Mon Sep 17 00:00:00 2001 From: Georgios Pinitas Date: Thu, 2 Nov 2017 12:17:56 +0000 Subject: COMPMID-556: Support beta for all softmax data types. Change-Id: I4c0ca033dc53829fb7ac3dd7c7469d143be74e73 Reviewed-on: http://mpd-gerrit.cambridge.arm.com/94251 Tested-by: Kaizen Reviewed-by: Anthony Barbier --- arm_compute/runtime/CL/functions/CLSoftmaxLayer.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'arm_compute/runtime/CL/functions/CLSoftmaxLayer.h') diff --git a/arm_compute/runtime/CL/functions/CLSoftmaxLayer.h b/arm_compute/runtime/CL/functions/CLSoftmaxLayer.h index e87deb6d15..d84297e9a1 100644 --- a/arm_compute/runtime/CL/functions/CLSoftmaxLayer.h +++ b/arm_compute/runtime/CL/functions/CLSoftmaxLayer.h @@ -39,7 +39,7 @@ class ICLTensor; /** Basic function to compute a SoftmaxLayer. * * Softmax is calculated by : - * @f[ out = exp(x - max(x)) / sum(exp(x - max(x))) @f] + * @f[ out = exp((x - max(x)) * beta) / sum(exp((x - max(x)) * beta)) @f] * * This function runs the following kernels: * -# @ref CLLogits1DMaxKernel @@ -54,7 +54,7 @@ public: /** Set the input and output tensors. * * @param[in] input Source tensor. Data types supported: QS8/QS16/F16/F32 - * @param[in] beta A scaling factor for the exponent. QS8/QS16/F16 only support a beta value of 1. + * @param[in] beta A scaling factor for the exponent. * @param[out] output Destination tensor. Data types supported: same as @p input */ void configure(const ICLTensor *input, ICLTensor *output, float beta = 1.0f); -- cgit v1.2.1