diff options
author | Eren Kopuz <eren.kopuz@arm.com> | 2020-06-09 15:37:43 +0100 |
---|---|---|
committer | SiCong Li <sicong.li@arm.com> | 2020-06-18 10:18:25 +0000 |
commit | 350099ea9868aa6ec84d082fe6d5962fafc86aa4 (patch) | |
tree | 9c4be974b045155e0c5007f9c82c1ded2335c4d9 /examples/gemm_tuner/cl_gemm_reshaped.cpp | |
parent | 6ad60af32af672f27e152bf37790cd0c0c4db696 (diff) | |
download | ComputeLibrary-350099ea9868aa6ec84d082fe6d5962fafc86aa4.tar.gz |
COMPMID-3451: Add support for F16 in Gemm Tuner
- Comand line option added to specify F32 or F16
Change-Id: Ibd8a5c8d013a6dff601aa773e94946de1ff6e280
Signed-off-by: Eren Kopuz <eren.kopuz@arm.com>
Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/3322
Tested-by: Arm Jenkins <bsgcomp@arm.com>
Reviewed-by: SiCong Li <sicong.li@arm.com>
Reviewed-by: Gian Marco Iodice <gianmarco.iodice@arm.com>
Reviewed-by: Georgios Pinitas <georgios.pinitas@arm.com>
Comments-Addressed: Arm Jenkins <bsgcomp@arm.com>
Diffstat (limited to 'examples/gemm_tuner/cl_gemm_reshaped.cpp')
-rw-r--r-- | examples/gemm_tuner/cl_gemm_reshaped.cpp | 15 |
1 files changed, 7 insertions, 8 deletions
diff --git a/examples/gemm_tuner/cl_gemm_reshaped.cpp b/examples/gemm_tuner/cl_gemm_reshaped.cpp index e579ed762c..da72dfd58d 100644 --- a/examples/gemm_tuner/cl_gemm_reshaped.cpp +++ b/examples/gemm_tuner/cl_gemm_reshaped.cpp @@ -1,5 +1,5 @@ /* - * Copyright (c) 2019 ARM Limited. + * Copyright (c) 2019-2020 ARM Limited. * * SPDX-License-Identifier: MIT * @@ -177,10 +177,9 @@ public: bool do_setup(int argc, char **argv) override { // Default parameters - const DataType data_type = DataType::F32; - const float alpha = 1.0f; - const float beta = 0.0f; - const ActivationLayerInfo act_info = ActivationLayerInfo(); + const float alpha = 1.0f; + const float beta = 0.0f; + const ActivationLayerInfo act_info = ActivationLayerInfo(); CommonGemmExampleParams params; GemmConfigs configs; @@ -219,9 +218,9 @@ public: CLScheduler::get().default_init(&tuner); - lhs.allocator()->init(TensorInfo(TensorShape(params.K, params.M, params.B), 1, data_type)); - rhs.allocator()->init(TensorInfo(TensorShape(params.N, params.K, params.B), 1, data_type)); - bias.allocator()->init(TensorInfo(TensorShape(params.N, 1, params.B), 1, data_type)); + lhs.allocator()->init(TensorInfo(TensorShape(params.K, params.M, params.B), 1, params.data_type)); + rhs.allocator()->init(TensorInfo(TensorShape(params.N, params.K, params.B), 1, params.data_type)); + bias.allocator()->init(TensorInfo(TensorShape(params.N, 1, params.B), 1, params.data_type)); GEMMLHSMatrixInfo lhs_info; lhs_info.m0 = configs.m0; |