diff options
author | Sang-Hoon Park <sang-hoon.park@arm.com> | 2021-02-17 13:12:53 +0000 |
---|---|---|
committer | Georgios Pinitas <georgios.pinitas@arm.com> | 2021-03-11 14:50:55 +0000 |
commit | 6b0bf9972975aff01e42e5790f7b7c98cd835afb (patch) | |
tree | 5e0a50af1e5a7e325621d762d7a6c523592d2e6f /arm_compute/runtime/NEON/functions/NEPReluLayer.h | |
parent | d7de9c50c85be2a01b9fef2867c3be140685fd6d (diff) | |
download | ComputeLibrary-6b0bf9972975aff01e42e5790f7b7c98cd835afb.tar.gz |
Add CpuPRelu operators
Implements: COMPMID-4184
Change-Id: I252168b460a18f837a26df5641664e95ddbd9c7e
Signed-off-by: Sang-Hoon Park <sang-hoon.park@arm.com>
Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/5237
Tested-by: Arm Jenkins <bsgcomp@arm.com>
Reviewed-by: Manuel Bottini <manuel.bottini@arm.com>
Reviewed-by: Georgios Pinitas <georgios.pinitas@arm.com>
Comments-Addressed: Arm Jenkins <bsgcomp@arm.com>
Diffstat (limited to 'arm_compute/runtime/NEON/functions/NEPReluLayer.h')
-rw-r--r-- | arm_compute/runtime/NEON/functions/NEPReluLayer.h | 31 |
1 files changed, 2 insertions, 29 deletions
diff --git a/arm_compute/runtime/NEON/functions/NEPReluLayer.h b/arm_compute/runtime/NEON/functions/NEPReluLayer.h index 12ffb8da7b..b07febfe7f 100644 --- a/arm_compute/runtime/NEON/functions/NEPReluLayer.h +++ b/arm_compute/runtime/NEON/functions/NEPReluLayer.h @@ -26,41 +26,14 @@ #include "arm_compute/core/Types.h" #include "arm_compute/runtime/IFunction.h" -#include "arm_compute/runtime/NEON/INEOperator.h" + +#include <memory> namespace arm_compute { class ITensor; class ITensorInfo; -namespace experimental -{ -/** Basic function to run @ref cpu::kernels::CpuArithmeticKernel for PRELU - * - * @note The function implements an activation layer with the PRELU activation function. - */ -class NEPRelu : public INEOperator -{ -public: - /** Set the input and output tensor. - * - * @param[in] input Source tensor info. Data types supported: QASYMM8/QASYMM8_SIGNED/F16/F32. - * @param[in] alpha Source alpha tensor info. Data types supported: same of @p input. - * @param[out] output Destination tensor info. Data type supported: same as @p input - */ - void configure(const ITensorInfo *input, const ITensorInfo *alpha, ITensorInfo *output); - /** Static function to check if given info will lead to a valid configuration of @ref cpu::kernels::CpuComparisonKernel - * - * @param[in] input Source tensor info. Data types supported: QASYMM8/QASYMM8_SIGNED/F16/F32. - * @param[in] alpha Source alpha tensor info. Data types supported: same of @p input. - * @param[in] output Destination tensor info. Data type supported: same as @p input - * - * @return a status - */ - static Status validate(const ITensorInfo *input, const ITensorInfo *alpha, const ITensorInfo *output); -}; -} // namespace experimental - /** Basic function to run @ref cpu::kernels::CpuArithmeticKernel for PRELU * * @note The function implements an activation layer with the PRELU activation function. |