diff options
author | Pablo Palmier <Pablo.Palmier@arm.com> | 2017-10-18 11:03:08 +0100 |
---|---|---|
committer | Anthony Barbier <anthony.barbier@arm.com> | 2018-11-02 16:35:24 +0000 |
commit | 48a60f9f7b0b7b5cf38253b7a2ac576aac43ef78 (patch) | |
tree | 0e0b519f8c21d61a0bb63c45eca92b5057e351cb /src/core/CL/kernels | |
parent | c9938d26cc12896c4d37eabac185b4fbe3365f93 (diff) | |
download | ComputeLibrary-48a60f9f7b0b7b5cf38253b7a2ac576aac43ef78.tar.gz |
IVGCVSW-632 CL support for Softmax beta parameter
Change-Id: I21da48d2f40aa900301235eaced54b7eb644b0b2
Reviewed-on: http://mpd-gerrit.cambridge.arm.com/91307
Tested-by: Kaizen <jeremy.johnson+kaizengerrit@arm.com>
Reviewed-by: Anthony Barbier <anthony.barbier@arm.com>
Diffstat (limited to 'src/core/CL/kernels')
-rw-r--r-- | src/core/CL/kernels/CLSoftmaxLayerKernel.cpp | 8 |
1 files changed, 7 insertions, 1 deletions
diff --git a/src/core/CL/kernels/CLSoftmaxLayerKernel.cpp b/src/core/CL/kernels/CLSoftmaxLayerKernel.cpp index da3b9423d5..fb066bc645 100644 --- a/src/core/CL/kernels/CLSoftmaxLayerKernel.cpp +++ b/src/core/CL/kernels/CLSoftmaxLayerKernel.cpp @@ -105,10 +105,11 @@ CLLogits1DShiftExpSumKernel::CLLogits1DShiftExpSumKernel() { } -void CLLogits1DShiftExpSumKernel::configure(const ICLTensor *input, const ICLTensor *max, ICLTensor *output, ICLTensor *sum) +void CLLogits1DShiftExpSumKernel::configure(const ICLTensor *input, const ICLTensor *max, ICLTensor *output, ICLTensor *sum, float beta) { ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(input, 1, DataType::QS8, DataType::QS16, DataType::F16, DataType::F32); ARM_COMPUTE_ERROR_ON_NULLPTR(max, sum, output); + ARM_COMPUTE_ERROR_ON(beta != 1.0f && input->info()->data_type() != DataType::F32); // Output auto initialization if not yet initialized auto_init_if_empty(*sum->info(), max->info()->tensor_shape(), 1, input->info()->data_type(), input->info()->fixed_point_position()); @@ -145,6 +146,11 @@ void CLLogits1DShiftExpSumKernel::configure(const ICLTensor *input, const ICLTen build_opts.emplace("-DNON_MULTIPLE_OF_16"); } + if(beta != 1.0f) + { + build_opts.emplace(("-DBETA=" + float_to_string_with_full_precision(beta))); + } + // Create kernel _kernel = static_cast<cl::Kernel>(CLKernelLibrary::get().create_kernel("softmax_layer_shift_exp_sum", build_opts)); |