From 388d3ec5289d5aa7415d6599137a74c4e5eaeeaf Mon Sep 17 00:00:00 2001 From: Georgios Pinitas Date: Thu, 2 Nov 2017 12:17:56 +0000 Subject: COMPMID-556: Support beta for all softmax data types. Change-Id: I4c0ca033dc53829fb7ac3dd7c7469d143be74e73 Reviewed-on: http://mpd-gerrit.cambridge.arm.com/94251 Tested-by: Kaizen Reviewed-by: Anthony Barbier --- arm_compute/core/CL/CLKernelLibrary.h | 35 ++++++++++++++++++++++ arm_compute/core/CL/kernels/CLSoftmaxLayerKernel.h | 2 +- arm_compute/core/Helpers.inl | 2 +- arm_compute/core/Utils.h | 2 +- arm_compute/runtime/CL/functions/CLSoftmaxLayer.h | 4 +-- 5 files changed, 40 insertions(+), 5 deletions(-) (limited to 'arm_compute') diff --git a/arm_compute/core/CL/CLKernelLibrary.h b/arm_compute/core/CL/CLKernelLibrary.h index fc131cdcfe..d433a740ac 100644 --- a/arm_compute/core/CL/CLKernelLibrary.h +++ b/arm_compute/core/CL/CLKernelLibrary.h @@ -33,6 +33,41 @@ namespace arm_compute { +/** Build options */ +class CLBuildOptions +{ + using StringSet = std::set; + +public: + /** Default constructor. */ + CLBuildOptions(); + /** Adds option to the existing build option list + * + * @param[in] option Option to add + */ + void add_option(std::string option); + /** Adds option if a given condition is true; + * + * @param[in] cond Condition to check + * @param[in] option Option to add if condition is true + */ + void add_option_if(bool cond, std::string option); + /** Adds first option if condition is true else the second one + * + * @param[in] cond Condition to check + * @param[in] option_true Option to add if condition is true + * @param[in] option_false Option to add if condition is false + */ + void add_option_if_else(bool cond, std::string option_true, std::string option_false); + /** Gets the current options list set + * + * @return Build options set + */ + StringSet options() const; + +private: + StringSet _build_opts; /**< Build options set */ +}; /** Program class */ class Program { diff --git a/arm_compute/core/CL/kernels/CLSoftmaxLayerKernel.h b/arm_compute/core/CL/kernels/CLSoftmaxLayerKernel.h index 60d555019d..1e079cbb06 100644 --- a/arm_compute/core/CL/kernels/CLSoftmaxLayerKernel.h +++ b/arm_compute/core/CL/kernels/CLSoftmaxLayerKernel.h @@ -60,7 +60,7 @@ public: * * @param[in] input Source tensor. Data types supported: QS8/QS16/F16/F32 * @param[in] max Max values tensor. Data types supported: same as @p input - * @param[in] beta A scaling factor for the exponent. QS8/QS16/F16 only support a beta value of 1. + * @param[in] beta A scaling factor for the exponent. * @param[out] output Destination tensor. Data types supported: same as @p input * @param[out] sum Sum of 1D logits tensor. Data types supported: same as @p input */ diff --git a/arm_compute/core/Helpers.inl b/arm_compute/core/Helpers.inl index 1a27684c9c..acdb9567db 100644 --- a/arm_compute/core/Helpers.inl +++ b/arm_compute/core/Helpers.inl @@ -263,7 +263,7 @@ inline bool set_fixed_point_position_if_zero(ITensorInfo &info, int fixed_point_ inline bool set_quantization_info_if_empty(ITensorInfo &info, QuantizationInfo quantization_info) { - if(info.quantization_info().empty() && (is_data_type_assymetric(info.data_type()))) + if(info.quantization_info().empty() && (is_data_type_quantized_assymetric(info.data_type()))) { info.set_quantization_info(quantization_info); return true; diff --git a/arm_compute/core/Utils.h b/arm_compute/core/Utils.h index 8e15a0a988..a77df030e6 100644 --- a/arm_compute/core/Utils.h +++ b/arm_compute/core/Utils.h @@ -755,7 +755,7 @@ inline bool is_data_type_fixed_point(DataType dt) * * @return True if data type is of symmetric quantized type, else false. */ -inline bool is_data_type_assymetric(DataType dt) +inline bool is_data_type_quantized_assymetric(DataType dt) { switch(dt) { diff --git a/arm_compute/runtime/CL/functions/CLSoftmaxLayer.h b/arm_compute/runtime/CL/functions/CLSoftmaxLayer.h index e87deb6d15..d84297e9a1 100644 --- a/arm_compute/runtime/CL/functions/CLSoftmaxLayer.h +++ b/arm_compute/runtime/CL/functions/CLSoftmaxLayer.h @@ -39,7 +39,7 @@ class ICLTensor; /** Basic function to compute a SoftmaxLayer. * * Softmax is calculated by : - * @f[ out = exp(x - max(x)) / sum(exp(x - max(x))) @f] + * @f[ out = exp((x - max(x)) * beta) / sum(exp((x - max(x)) * beta)) @f] * * This function runs the following kernels: * -# @ref CLLogits1DMaxKernel @@ -54,7 +54,7 @@ public: /** Set the input and output tensors. * * @param[in] input Source tensor. Data types supported: QS8/QS16/F16/F32 - * @param[in] beta A scaling factor for the exponent. QS8/QS16/F16 only support a beta value of 1. + * @param[in] beta A scaling factor for the exponent. * @param[out] output Destination tensor. Data types supported: same as @p input */ void configure(const ICLTensor *input, ICLTensor *output, float beta = 1.0f); -- cgit v1.2.1