diff options
author | ramelg01 <ramy.elgammal@arm.com> | 2021-10-29 10:52:53 +0100 |
---|---|---|
committer | ramy.elgammal <ramy.elgammal@arm.com> | 2021-11-04 11:10:56 +0000 |
commit | 6049edadf0c89a026b3fcd1927ee7531d3c40278 (patch) | |
tree | c12fcea637e41cdb9e1f72dc734e4a87d0b31981 /tests/validation/CL/GEMMMatrixMultiplyNative.cpp | |
parent | 71cbd28b7cf5115b0451d43e5c84cce4ae4d8ec7 (diff) | |
download | ComputeLibrary-6049edadf0c89a026b3fcd1927ee7531d3c40278.tar.gz |
Add PRelu to supported PostOps in:
- ClGemmMatrixMultiplyReshapedKernel
- ClGemmMatrixMultiplyNativeKernel
- ClGemmMatrixMultiplyReshapedOnlyRhsKernel
Resolves: COMPMID-4713
Change-Id: I3adcb1b3d4af37ebcbc3bee19cc1845885d08600
Signed-off-by: Ramy Elgammal <ramy.elgammal@arm.com>
Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/6553
Reviewed-by: SiCong Li <sicong.li@arm.com>
Tested-by: Arm Jenkins <bsgcomp@arm.com>
Comments-Addressed: Arm Jenkins <bsgcomp@arm.com>
Diffstat (limited to 'tests/validation/CL/GEMMMatrixMultiplyNative.cpp')
-rw-r--r-- | tests/validation/CL/GEMMMatrixMultiplyNative.cpp | 29 |
1 files changed, 27 insertions, 2 deletions
diff --git a/tests/validation/CL/GEMMMatrixMultiplyNative.cpp b/tests/validation/CL/GEMMMatrixMultiplyNative.cpp index e3f151a2ca..54e9d32afc 100644 --- a/tests/validation/CL/GEMMMatrixMultiplyNative.cpp +++ b/tests/validation/CL/GEMMMatrixMultiplyNative.cpp @@ -179,13 +179,38 @@ experimental::PostOpList<PostOpArgBroadcast> post_ops_3() ConvertPolicy::SATURATE); return post_ops; } - +// To test that the output of the main op is the first parameter in prelu post op +experimental::PostOpList<PostOpArgBroadcast> post_ops_4() +{ + experimental::PostOpList<PostOpArgBroadcast> post_ops{}; + post_ops.push_back_op<experimental::PostOpAct<PostOpArgBroadcast>>(ActivationLayerInfo{ActivationLayerInfo::ActivationFunction::LINEAR, 0.5F, 0.0F}); + post_ops.push_back_op<experimental::PostOpEltwisePRelu<PostOpArgBroadcast>>( + std::make_tuple(false, false, true), // If true, broadcast in corresponding dim: 0, 1 or 2 + 0, + ConvertPolicy::SATURATE); + post_ops.push_back_op<experimental::PostOpAct<PostOpArgBroadcast>>(ActivationLayerInfo{ActivationLayerInfo::ActivationFunction::RELU, 2.1F, 1.3F}); + return post_ops; +} +// To test that the output of the main op is the second parameter in prelu post op i.e. it is the alpha_param +experimental::PostOpList<PostOpArgBroadcast> post_ops_5() +{ + experimental::PostOpList<PostOpArgBroadcast> post_ops{}; + post_ops.push_back_op<experimental::PostOpAct<PostOpArgBroadcast>>(ActivationLayerInfo{ActivationLayerInfo::ActivationFunction::LINEAR, 0.5F, 0.0F}); + post_ops.push_back_op<experimental::PostOpEltwisePRelu<PostOpArgBroadcast>>( + std::make_tuple(false, false, false), // If true, broadcast in corresponding dim: 0, 1 or 2 + 1, + ConvertPolicy::SATURATE); + post_ops.push_back_op<experimental::PostOpAct<PostOpArgBroadcast>>(ActivationLayerInfo{ActivationLayerInfo::ActivationFunction::RELU, 2.1F, 1.3F}); + return post_ops; +} /** Different Post Op Lists */ const auto post_op_lists = framework::dataset::make("post_op_lists", { post_ops_1(), post_ops_2(), post_ops_3(), -} ); + post_ops_4(), + post_ops_5() + } ); bool is_post_op_list_valid(unsigned int m, unsigned int n, unsigned int k, unsigned int batch, DataType data_type, const experimental::PostOpList<ITensorInfo*>& post_ops) { |