aboutsummaryrefslogtreecommitdiff
path: root/arm_compute/runtime/CL/functions/CLPReluLayer.h
diff options
context:
space:
mode:
authorMichele Di Giorgio <michele.digiorgio@arm.com>2021-01-22 15:42:59 +0000
committerMichele Di Giorgio <michele.digiorgio@arm.com>2021-01-26 14:11:18 +0000
commit1e0208a66ddea1be2d0e715591598c6704660811 (patch)
treee0cdfe503ae54f892bea84ff3f0e916464828d42 /arm_compute/runtime/CL/functions/CLPReluLayer.h
parent7249f154c2ec029f9b8c91f2bb845abe6590f7ed (diff)
downloadComputeLibrary-1e0208a66ddea1be2d0e715591598c6704660811.tar.gz
Make CLArithmeticAddition kernel and function state-less
Resolves COMPMID-4006 Change-Id: Iddc32b0b250142aac9a4a7b9dc0eef462d196025 Signed-off-by: Michele Di Giorgio <michele.digiorgio@arm.com> Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/4913 Tested-by: Arm Jenkins <bsgcomp@arm.com> Comments-Addressed: Arm Jenkins <bsgcomp@arm.com> Reviewed-by: Sang-Hoon Park <sang-hoon.park@arm.com>
Diffstat (limited to 'arm_compute/runtime/CL/functions/CLPReluLayer.h')
-rw-r--r--arm_compute/runtime/CL/functions/CLPReluLayer.h6
1 files changed, 3 insertions, 3 deletions
diff --git a/arm_compute/runtime/CL/functions/CLPReluLayer.h b/arm_compute/runtime/CL/functions/CLPReluLayer.h
index ab32bccc24..1751fda030 100644
--- a/arm_compute/runtime/CL/functions/CLPReluLayer.h
+++ b/arm_compute/runtime/CL/functions/CLPReluLayer.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2019-2020 Arm Limited.
+ * Copyright (c) 2019-2021 Arm Limited.
*
* SPDX-License-Identifier: MIT
*
@@ -35,7 +35,7 @@ class ITensorInfo;
namespace experimental
{
-/** Basic function to run @ref CLArithmeticOperationKernel for PRELU
+/** Basic function to run @ref arm_compute::opencl::kernels::ClArithmeticKernel for PRELU
*
* @note The function implements an activation layer with the PRELU activation function.
*/
@@ -69,7 +69,7 @@ public:
};
} // namespace experimental
-/** Basic function to run @ref CLArithmeticOperationKernel for PRELU
+/** Basic function to run @ref opencl::kernels::ClArithmeticKernel for PRELU
*
* @note The function implements an activation layer with the PRELU activation function.
*/