aboutsummaryrefslogtreecommitdiff
path: root/src/runtime/heuristics/matmul_native/ClMatMulNativeDefaultConfigValhall.cpp
diff options
context:
space:
mode:
authorFelix Thomasmathibalan <felixjohnny.thomasmathibalan@arm.com>2023-09-27 17:46:17 +0100
committerfelixjohnny.thomasmathibalan <felixjohnny.thomasmathibalan@arm.com>2023-09-28 12:08:05 +0000
commitafd38f0c617d6f89b2b4532c6c44f116617e2b6f (patch)
tree03bc7d5a762099989b16a656fa8d397b490ed70e /src/runtime/heuristics/matmul_native/ClMatMulNativeDefaultConfigValhall.cpp
parentbdcb4c148ee2fdeaaddf4cf1e57bbb0de02bb894 (diff)
downloadComputeLibrary-afd38f0c617d6f89b2b4532c6c44f116617e2b6f.tar.gz
Apply clang-format on repository
Code is formatted as per a revised clang format configuration file(not part of this delivery). Version 14.0.6 is used. Exclusion List: - files with .cl extension - files that are not strictly C/C++ (e.g. Android.bp, Sconscript ...) And the following directories - compute_kernel_writer/validation/ - tests/ - include/ - src/core/NEON/kernels/convolution/ - src/core/NEON/kernels/arm_gemm/ - src/core/NEON/kernels/arm_conv/ - data/ There will be a follow up for formatting of .cl files and the files under tests/ and compute_kernel_writer/validation/. Signed-off-by: Felix Thomasmathibalan <felixjohnny.thomasmathibalan@arm.com> Change-Id: Ib7eb1fcf4e7537b9feaefcfc15098a804a3fde0a Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/10391 Benchmark: Arm Jenkins <bsgcomp@arm.com> Tested-by: Arm Jenkins <bsgcomp@arm.com> Reviewed-by: Gunes Bayir <gunes.bayir@arm.com>
Diffstat (limited to 'src/runtime/heuristics/matmul_native/ClMatMulNativeDefaultConfigValhall.cpp')
-rw-r--r--src/runtime/heuristics/matmul_native/ClMatMulNativeDefaultConfigValhall.cpp396
1 files changed, 129 insertions, 267 deletions
diff --git a/src/runtime/heuristics/matmul_native/ClMatMulNativeDefaultConfigValhall.cpp b/src/runtime/heuristics/matmul_native/ClMatMulNativeDefaultConfigValhall.cpp
index 01102b3d60..b3c8d891dc 100644
--- a/src/runtime/heuristics/matmul_native/ClMatMulNativeDefaultConfigValhall.cpp
+++ b/src/runtime/heuristics/matmul_native/ClMatMulNativeDefaultConfigValhall.cpp
@@ -28,30 +28,33 @@
#include "arm_compute/core/GPUTarget.h"
#include "arm_compute/core/KernelDescriptors.h"
#include "arm_compute/core/TensorInfo.h"
-#include "src/gpu/cl/kernels/ClMatMulNativeKernel.h"
-#include <utility>
+#include "src/gpu/cl/kernels/ClMatMulNativeKernel.h"
#include "src/runtime/heuristics/matmul_native/ClMatMulNativeHelpers.h"
+#include <utility>
+
namespace arm_compute
{
namespace cl_matmul
{
-ClMatMulNativeDefaultConfigValhall::ClMatMulNativeDefaultConfigValhall(GPUTarget gpu)
- : IClMatMulNativeKernelConfig(gpu)
+ClMatMulNativeDefaultConfigValhall::ClMatMulNativeDefaultConfigValhall(GPUTarget gpu) : IClMatMulNativeKernelConfig(gpu)
{
}
-MatMulKernelInfo ClMatMulNativeDefaultConfigValhall::configure(const ITensorInfo *lhs, const ITensorInfo *rhs, const MatMulInfo &info)
+MatMulKernelInfo
+ClMatMulNativeDefaultConfigValhall::configure(const ITensorInfo *lhs, const ITensorInfo *rhs, const MatMulInfo &info)
{
- using ConfigurationFunctionExecutorPtr = MatMulKernelInfo (ClMatMulNativeDefaultConfigValhall::*)(unsigned int m, unsigned int n, unsigned int k, unsigned int b, bool rhs_lock_padding, const MatMulInfo & info);
+ using ConfigurationFunctionExecutorPtr = MatMulKernelInfo (ClMatMulNativeDefaultConfigValhall::*)(
+ unsigned int m, unsigned int n, unsigned int k, unsigned int b, bool rhs_lock_padding, const MatMulInfo &info);
- ClMatMulNativeConfigArray<ConfigurationFunctionExecutorPtr> configs_G710(&ClMatMulNativeDefaultConfigValhall::configure_G710_f32,
- &ClMatMulNativeDefaultConfigValhall::configure_G710_f16,
- &ClMatMulNativeDefaultConfigValhall::configure_G710_u8);
+ ClMatMulNativeConfigArray<ConfigurationFunctionExecutorPtr> configs_G710(
+ &ClMatMulNativeDefaultConfigValhall::configure_G710_f32,
+ &ClMatMulNativeDefaultConfigValhall::configure_G710_f16,
+ &ClMatMulNativeDefaultConfigValhall::configure_G710_u8);
ConfigurationFunctionExecutorPtr func = nullptr;
- switch(_target)
+ switch (_target)
{
case GPUTarget::G710:
default:
@@ -67,7 +70,7 @@ MatMulKernelInfo ClMatMulNativeDefaultConfigValhall::configure(const ITensorInfo
const bool is_batched = lhs_shape.num_dimensions() > 2;
- if(is_batched == true)
+ if (is_batched == true)
{
lhs_shape.collapse_from(2);
}
@@ -81,103 +84,48 @@ MatMulKernelInfo ClMatMulNativeDefaultConfigValhall::configure(const ITensorInfo
return (this->*func)(m, n, k, b, rhs->lock_paddings(), info);
}
-MatMulKernelInfo ClMatMulNativeDefaultConfigValhall::configure_G710_f32(unsigned int m, unsigned int n, unsigned int k, unsigned int b, bool rhs_lock_padding, const MatMulInfo &info)
+MatMulKernelInfo ClMatMulNativeDefaultConfigValhall::configure_G710_f32(
+ unsigned int m, unsigned int n, unsigned int k, unsigned int b, bool rhs_lock_padding, const MatMulInfo &info)
{
- const MatMulNativeConfigsMatrix configs_mnkb_best_nt_nt =
- {
- { 3136, 64, 64, 36, 4, 4, 16, 1 },
- { 4096, 48, 32, 36, 4, 4, 4, 1 },
- { 688, 92, 68, 32, 2, 8, 4, 1 },
- { 24, 464, 412, 24, 2, 8, 4, 1 },
- { 112, 184, 144, 28, 4, 4, 16, 1 },
- { 5776, 64, 32, 36, 2, 4, 16, 1 },
- { 1568, 64, 40, 36, 2, 8, 8, 1 },
- { 2920, 64, 64, 24, 4, 4, 16, 1 }
- };
-
- const MatMulNativeConfigsMatrix configs_mnkb_fallback_nt_nt =
- {
- { 3136, 64, 64, 36, 4, 4, 8, 0 },
- { 4096, 48, 32, 36, 4, 4, 8, 0 },
- { 688, 92, 68, 32, 5, 4, 4, 0 },
- { 24, 464, 412, 24, 6, 2, 8, 0 },
- { 112, 184, 144, 28, 6, 4, 4, 0 },
- { 5776, 64, 32, 36, 5, 4, 4, 0 },
- { 1568, 64, 40, 36, 4, 4, 8, 0 },
- { 2920, 64, 64, 24, 4, 4, 8, 0 }
- };
-
- const MatMulNativeConfigsMatrix configs_mnkb_best_nt_t =
- {
- { 3136, 64, 64, 36, 4, 4, 4, 1 },
- { 4096, 48, 32, 36, 2, 2, 16, 1 },
- { 688, 92, 68, 32, 4, 4, 4, 1 },
- { 24, 464, 412, 24, 6, 2, 8, 1 },
- { 112, 184, 144, 28, 4, 2, 16, 1 },
- { 5776, 64, 32, 36, 4, 4, 4, 1 },
- { 1568, 64, 40, 36, 4, 4, 8, 1 },
- { 2920, 64, 64, 24, 4, 4, 4, 1 }
- };
-
- const MatMulNativeConfigsMatrix configs_mnkb_fallback_nt_t =
- {
- { 3136, 64, 64, 36, 5, 4, 4, 0 },
- { 4096, 48, 32, 36, 5, 4, 4, 0 },
- { 688, 92, 68, 32, 5, 4, 4, 0 },
- { 24, 464, 412, 24, 6, 2, 4, 0 },
- { 112, 184, 144, 28, 5, 4, 4, 0 },
- { 5776, 64, 32, 36, 5, 4, 4, 0 },
- { 1568, 64, 40, 36, 5, 4, 4, 0 },
- { 2920, 64, 64, 24, 6, 2, 4, 0 }
- };
-
- const MatMulNativeConfigsMatrix configs_mnkb_best_t_nt =
- {
- { 3136, 64, 64, 36, 4, 4, 16, 1 },
- { 4096, 48, 32, 36, 4, 4, 4, 1 },
- { 688, 92, 68, 32, 2, 8, 4, 1 },
- { 24, 464, 412, 24, 2, 8, 4, 1 },
- { 112, 184, 144, 28, 4, 4, 16, 1 },
- { 5776, 64, 32, 36, 2, 8, 8, 1 },
- { 1568, 64, 40, 36, 4, 4, 8, 1 },
- { 2920, 64, 64, 24, 4, 4, 16, 1 }
- };
-
- const MatMulNativeConfigsMatrix configs_mnkb_fallback_t_nt =
- {
- { 3136, 64, 64, 36, 4, 4, 4, 0 },
- { 4096, 48, 32, 36, 4, 4, 4, 0 },
- { 688, 92, 68, 32, 4, 4, 4, 0 },
- { 24, 464, 412, 24, 4, 4, 4, 0 },
- { 112, 184, 144, 28, 4, 4, 4, 0 },
- { 5776, 64, 32, 36, 4, 4, 8, 0 },
- { 1568, 64, 40, 36, 4, 4, 4, 0 },
- { 2920, 64, 64, 24, 4, 4, 4, 0 }
- };
-
- const MatMulNativeConfigsMatrix configs_mnkb_best_t_t =
- {
- { 3136, 64, 64, 36, 4, 4, 4, 1 },
- { 4096, 48, 32, 36, 4, 4, 4, 1 },
- { 688, 92, 68, 32, 4, 4, 4, 1 },
- { 24, 464, 412, 24, 2, 2, 16, 1 },
- { 112, 184, 144, 28, 4, 4, 4, 1 },
- { 5776, 64, 32, 36, 4, 4, 4, 1 },
- { 1568, 64, 40, 36, 4, 4, 4, 1 },
- { 2920, 64, 64, 24, 4, 4, 4, 1 }
- };
-
- const MatMulNativeConfigsMatrix configs_mnkb_fallback_t_t =
- {
- { 3136, 64, 64, 36, 4, 4, 4, 0 },
- { 4096, 48, 32, 36, 4, 4, 4, 0 },
- { 688, 92, 68, 32, 4, 4, 4, 0 },
- { 24, 464, 412, 24, 4, 2, 8, 0 },
- { 112, 184, 144, 28, 4, 4, 4, 0 },
- { 5776, 64, 32, 36, 4, 4, 4, 0 },
- { 1568, 64, 40, 36, 4, 4, 4, 0 },
- { 2920, 64, 64, 24, 4, 4, 4, 0 }
- };
+ const MatMulNativeConfigsMatrix configs_mnkb_best_nt_nt = {
+ {3136, 64, 64, 36, 4, 4, 16, 1}, {4096, 48, 32, 36, 4, 4, 4, 1}, {688, 92, 68, 32, 2, 8, 4, 1},
+ {24, 464, 412, 24, 2, 8, 4, 1}, {112, 184, 144, 28, 4, 4, 16, 1}, {5776, 64, 32, 36, 2, 4, 16, 1},
+ {1568, 64, 40, 36, 2, 8, 8, 1}, {2920, 64, 64, 24, 4, 4, 16, 1}};
+
+ const MatMulNativeConfigsMatrix configs_mnkb_fallback_nt_nt = {
+ {3136, 64, 64, 36, 4, 4, 8, 0}, {4096, 48, 32, 36, 4, 4, 8, 0}, {688, 92, 68, 32, 5, 4, 4, 0},
+ {24, 464, 412, 24, 6, 2, 8, 0}, {112, 184, 144, 28, 6, 4, 4, 0}, {5776, 64, 32, 36, 5, 4, 4, 0},
+ {1568, 64, 40, 36, 4, 4, 8, 0}, {2920, 64, 64, 24, 4, 4, 8, 0}};
+
+ const MatMulNativeConfigsMatrix configs_mnkb_best_nt_t = {
+ {3136, 64, 64, 36, 4, 4, 4, 1}, {4096, 48, 32, 36, 2, 2, 16, 1}, {688, 92, 68, 32, 4, 4, 4, 1},
+ {24, 464, 412, 24, 6, 2, 8, 1}, {112, 184, 144, 28, 4, 2, 16, 1}, {5776, 64, 32, 36, 4, 4, 4, 1},
+ {1568, 64, 40, 36, 4, 4, 8, 1}, {2920, 64, 64, 24, 4, 4, 4, 1}};
+
+ const MatMulNativeConfigsMatrix configs_mnkb_fallback_nt_t = {
+ {3136, 64, 64, 36, 5, 4, 4, 0}, {4096, 48, 32, 36, 5, 4, 4, 0}, {688, 92, 68, 32, 5, 4, 4, 0},
+ {24, 464, 412, 24, 6, 2, 4, 0}, {112, 184, 144, 28, 5, 4, 4, 0}, {5776, 64, 32, 36, 5, 4, 4, 0},
+ {1568, 64, 40, 36, 5, 4, 4, 0}, {2920, 64, 64, 24, 6, 2, 4, 0}};
+
+ const MatMulNativeConfigsMatrix configs_mnkb_best_t_nt = {
+ {3136, 64, 64, 36, 4, 4, 16, 1}, {4096, 48, 32, 36, 4, 4, 4, 1}, {688, 92, 68, 32, 2, 8, 4, 1},
+ {24, 464, 412, 24, 2, 8, 4, 1}, {112, 184, 144, 28, 4, 4, 16, 1}, {5776, 64, 32, 36, 2, 8, 8, 1},
+ {1568, 64, 40, 36, 4, 4, 8, 1}, {2920, 64, 64, 24, 4, 4, 16, 1}};
+
+ const MatMulNativeConfigsMatrix configs_mnkb_fallback_t_nt = {
+ {3136, 64, 64, 36, 4, 4, 4, 0}, {4096, 48, 32, 36, 4, 4, 4, 0}, {688, 92, 68, 32, 4, 4, 4, 0},
+ {24, 464, 412, 24, 4, 4, 4, 0}, {112, 184, 144, 28, 4, 4, 4, 0}, {5776, 64, 32, 36, 4, 4, 8, 0},
+ {1568, 64, 40, 36, 4, 4, 4, 0}, {2920, 64, 64, 24, 4, 4, 4, 0}};
+
+ const MatMulNativeConfigsMatrix configs_mnkb_best_t_t = {
+ {3136, 64, 64, 36, 4, 4, 4, 1}, {4096, 48, 32, 36, 4, 4, 4, 1}, {688, 92, 68, 32, 4, 4, 4, 1},
+ {24, 464, 412, 24, 2, 2, 16, 1}, {112, 184, 144, 28, 4, 4, 4, 1}, {5776, 64, 32, 36, 4, 4, 4, 1},
+ {1568, 64, 40, 36, 4, 4, 4, 1}, {2920, 64, 64, 24, 4, 4, 4, 1}};
+
+ const MatMulNativeConfigsMatrix configs_mnkb_fallback_t_t = {
+ {3136, 64, 64, 36, 4, 4, 4, 0}, {4096, 48, 32, 36, 4, 4, 4, 0}, {688, 92, 68, 32, 4, 4, 4, 0},
+ {24, 464, 412, 24, 4, 2, 8, 0}, {112, 184, 144, 28, 4, 4, 4, 0}, {5776, 64, 32, 36, 4, 4, 4, 0},
+ {1568, 64, 40, 36, 4, 4, 4, 0}, {2920, 64, 64, 24, 4, 4, 4, 0}};
const bool adj_lhs = info.adj_lhs();
const bool adj_rhs = info.adj_rhs();
@@ -185,17 +133,17 @@ MatMulKernelInfo ClMatMulNativeDefaultConfigValhall::configure_G710_f32(unsigned
const MatMulNativeConfigsMatrix *configs_best_to_use = nullptr;
const MatMulNativeConfigsMatrix *configs_fallback_to_use = nullptr;
- if((adj_lhs == false) && (adj_rhs == false))
+ if ((adj_lhs == false) && (adj_rhs == false))
{
configs_best_to_use = &configs_mnkb_best_nt_nt;
configs_fallback_to_use = &configs_mnkb_fallback_nt_nt;
}
- else if((adj_lhs == false) && (adj_rhs == true))
+ else if ((adj_lhs == false) && (adj_rhs == true))
{
configs_best_to_use = &configs_mnkb_best_nt_t;
configs_fallback_to_use = &configs_mnkb_fallback_nt_t;
}
- else if((adj_lhs == true) && (adj_rhs == false))
+ else if ((adj_lhs == true) && (adj_rhs == false))
{
configs_best_to_use = &configs_mnkb_best_t_nt;
configs_fallback_to_use = &configs_mnkb_fallback_t_nt;
@@ -209,108 +157,51 @@ MatMulKernelInfo ClMatMulNativeDefaultConfigValhall::configure_G710_f32(unsigned
MatMulKernelInfo desc0 = find_info(*configs_best_to_use, adj_lhs, adj_rhs, m, n, k, b);
MatMulKernelInfo desc1 = find_info(*configs_fallback_to_use, adj_lhs, adj_rhs, m, n, k, b);
- return select_info(desc0,
- desc1,
- m, n, k, b, DataType::F32, rhs_lock_padding);
+ return select_info(desc0, desc1, m, n, k, b, DataType::F32, rhs_lock_padding);
}
-MatMulKernelInfo ClMatMulNativeDefaultConfigValhall::configure_G710_f16(unsigned int m, unsigned int n, unsigned int k, unsigned int b, bool rhs_lock_padding, const MatMulInfo &info)
+MatMulKernelInfo ClMatMulNativeDefaultConfigValhall::configure_G710_f16(
+ unsigned int m, unsigned int n, unsigned int k, unsigned int b, bool rhs_lock_padding, const MatMulInfo &info)
{
- const MatMulNativeConfigsMatrix configs_mnkb_best_nt_nt =
- {
- { 3136, 64, 64, 36, 4, 4, 16, 1 },
- { 4096, 48, 32, 36, 4, 4, 8, 1 },
- { 688, 92, 68, 32, 4, 4, 16, 1 },
- { 24, 464, 412, 24, 4, 4, 4, 1 },
- { 112, 184, 144, 28, 4, 4, 16, 1 },
- { 5776, 64, 32, 36, 4, 4, 8, 1 },
- { 1568, 64, 40, 36, 4, 4, 8, 1 },
- { 2920, 64, 64, 24, 4, 4, 16, 1 }
- };
-
- const MatMulNativeConfigsMatrix configs_mnkb_fallback_nt_nt =
- {
- { 3136, 64, 64, 36, 6, 4, 8, 0 },
- { 4096, 48, 32, 36, 6, 4, 8, 0 },
- { 688, 92, 68, 32, 6, 4, 8, 0 },
- { 24, 464, 412, 24, 4, 4, 8, 0 },
- { 112, 184, 144, 28, 6, 4, 8, 0 },
- { 5776, 64, 32, 36, 6, 4, 8, 0 },
- { 1568, 64, 40, 36, 6, 4, 8, 0 },
- { 2920, 64, 64, 24, 6, 4, 8, 0 }
- };
-
- const MatMulNativeConfigsMatrix configs_mnkb_best_nt_t =
- {
- { 3136, 64, 64, 36, 6, 4, 8, 1 },
- { 4096, 48, 32, 36, 6, 4, 8, 1 },
- { 688, 92, 68, 32, 4, 4, 4, 1 },
- { 24, 464, 412, 24, 6, 2, 4, 1 },
- { 112, 184, 144, 28, 4, 2, 16, 1 },
- { 5776, 64, 32, 36, 6, 4, 8, 1 },
- { 1568, 64, 40, 36, 6, 4, 8, 1 },
- { 2920, 64, 64, 24, 6, 4, 8, 1 }
- };
-
- const MatMulNativeConfigsMatrix configs_mnkb_fallback_nt_t =
- {
- { 3136, 64, 64, 36, 6, 2, 16, 0 },
- { 4096, 48, 32, 36, 5, 4, 8, 0 },
- { 688, 92, 68, 32, 6, 2, 16, 0 },
- { 24, 464, 412, 24, 6, 2, 16, 0 },
- { 112, 184, 144, 28, 6, 2, 16, 0 },
- { 5776, 64, 32, 36, 5, 4, 8, 0 },
- { 1568, 64, 40, 36, 5, 4, 8, 0 },
- { 2920, 64, 64, 24, 6, 2, 16, 0 }
- };
-
- const MatMulNativeConfigsMatrix configs_mnkb_best_t_nt =
- {
- { 3136, 64, 64, 36, 4, 4, 16, 1 },
- { 4096, 48, 32, 36, 4, 4, 4, 1 },
- { 688, 92, 68, 32, 4, 4, 4, 1 },
- { 24, 464, 412, 24, 4, 4, 4, 1 },
- { 112, 184, 144, 28, 4, 4, 4, 1 },
- { 5776, 64, 32, 36, 4, 4, 4, 1 },
- { 1568, 64, 40, 36, 4, 4, 4, 1 },
- { 2920, 64, 64, 24, 4, 4, 4, 1 }
- };
-
- const MatMulNativeConfigsMatrix configs_mnkb_fallback_t_nt =
- {
- { 3136, 64, 64, 36, 4, 4, 4, 0 },
- { 4096, 48, 32, 36, 4, 4, 4, 0 },
- { 688, 92, 68, 32, 4, 4, 4, 0 },
- { 24, 464, 412, 24, 4, 4, 4, 0 },
- { 112, 184, 144, 28, 4, 4, 4, 0 },
- { 5776, 64, 32, 36, 4, 4, 4, 0 },
- { 1568, 64, 40, 36, 4, 4, 4, 0 },
- { 2920, 64, 64, 24, 4, 4, 4, 0 }
- };
-
- const MatMulNativeConfigsMatrix configs_mnkb_best_t_t =
- {
- { 3136, 64, 64, 36, 4, 4, 16, 1 },
- { 4096, 48, 32, 36, 4, 4, 8, 1 },
- { 688, 92, 68, 32, 4, 4, 4, 1 },
- { 24, 464, 412, 24, 4, 2, 8, 1 },
- { 112, 184, 144, 28, 4, 2, 16, 1 },
- { 5776, 64, 32, 36, 4, 4, 16, 1 },
- { 1568, 64, 40, 36, 4, 4, 8, 1 },
- { 2920, 64, 64, 24, 4, 4, 16, 1 }
- };
-
- const MatMulNativeConfigsMatrix configs_mnkb_fallback_t_t =
- {
- { 3136, 64, 64, 36, 4, 4, 8, 0 },
- { 4096, 48, 32, 36, 4, 4, 8, 0 },
- { 688, 92, 68, 32, 4, 4, 8, 0 },
- { 24, 464, 412, 24, 4, 4, 8, 0 },
- { 112, 184, 144, 28, 4, 4, 8, 0 },
- { 5776, 64, 32, 36, 4, 4, 8, 0 },
- { 1568, 64, 40, 36, 4, 4, 8, 0 },
- { 2920, 64, 64, 24, 4, 4, 8, 0 }
- };
+ const MatMulNativeConfigsMatrix configs_mnkb_best_nt_nt = {
+ {3136, 64, 64, 36, 4, 4, 16, 1}, {4096, 48, 32, 36, 4, 4, 8, 1}, {688, 92, 68, 32, 4, 4, 16, 1},
+ {24, 464, 412, 24, 4, 4, 4, 1}, {112, 184, 144, 28, 4, 4, 16, 1}, {5776, 64, 32, 36, 4, 4, 8, 1},
+ {1568, 64, 40, 36, 4, 4, 8, 1}, {2920, 64, 64, 24, 4, 4, 16, 1}};
+
+ const MatMulNativeConfigsMatrix configs_mnkb_fallback_nt_nt = {
+ {3136, 64, 64, 36, 6, 4, 8, 0}, {4096, 48, 32, 36, 6, 4, 8, 0}, {688, 92, 68, 32, 6, 4, 8, 0},
+ {24, 464, 412, 24, 4, 4, 8, 0}, {112, 184, 144, 28, 6, 4, 8, 0}, {5776, 64, 32, 36, 6, 4, 8, 0},
+ {1568, 64, 40, 36, 6, 4, 8, 0}, {2920, 64, 64, 24, 6, 4, 8, 0}};
+
+ const MatMulNativeConfigsMatrix configs_mnkb_best_nt_t = {
+ {3136, 64, 64, 36, 6, 4, 8, 1}, {4096, 48, 32, 36, 6, 4, 8, 1}, {688, 92, 68, 32, 4, 4, 4, 1},
+ {24, 464, 412, 24, 6, 2, 4, 1}, {112, 184, 144, 28, 4, 2, 16, 1}, {5776, 64, 32, 36, 6, 4, 8, 1},
+ {1568, 64, 40, 36, 6, 4, 8, 1}, {2920, 64, 64, 24, 6, 4, 8, 1}};
+
+ const MatMulNativeConfigsMatrix configs_mnkb_fallback_nt_t = {
+ {3136, 64, 64, 36, 6, 2, 16, 0}, {4096, 48, 32, 36, 5, 4, 8, 0}, {688, 92, 68, 32, 6, 2, 16, 0},
+ {24, 464, 412, 24, 6, 2, 16, 0}, {112, 184, 144, 28, 6, 2, 16, 0}, {5776, 64, 32, 36, 5, 4, 8, 0},
+ {1568, 64, 40, 36, 5, 4, 8, 0}, {2920, 64, 64, 24, 6, 2, 16, 0}};
+
+ const MatMulNativeConfigsMatrix configs_mnkb_best_t_nt = {
+ {3136, 64, 64, 36, 4, 4, 16, 1}, {4096, 48, 32, 36, 4, 4, 4, 1}, {688, 92, 68, 32, 4, 4, 4, 1},
+ {24, 464, 412, 24, 4, 4, 4, 1}, {112, 184, 144, 28, 4, 4, 4, 1}, {5776, 64, 32, 36, 4, 4, 4, 1},
+ {1568, 64, 40, 36, 4, 4, 4, 1}, {2920, 64, 64, 24, 4, 4, 4, 1}};
+
+ const MatMulNativeConfigsMatrix configs_mnkb_fallback_t_nt = {
+ {3136, 64, 64, 36, 4, 4, 4, 0}, {4096, 48, 32, 36, 4, 4, 4, 0}, {688, 92, 68, 32, 4, 4, 4, 0},
+ {24, 464, 412, 24, 4, 4, 4, 0}, {112, 184, 144, 28, 4, 4, 4, 0}, {5776, 64, 32, 36, 4, 4, 4, 0},
+ {1568, 64, 40, 36, 4, 4, 4, 0}, {2920, 64, 64, 24, 4, 4, 4, 0}};
+
+ const MatMulNativeConfigsMatrix configs_mnkb_best_t_t = {
+ {3136, 64, 64, 36, 4, 4, 16, 1}, {4096, 48, 32, 36, 4, 4, 8, 1}, {688, 92, 68, 32, 4, 4, 4, 1},
+ {24, 464, 412, 24, 4, 2, 8, 1}, {112, 184, 144, 28, 4, 2, 16, 1}, {5776, 64, 32, 36, 4, 4, 16, 1},
+ {1568, 64, 40, 36, 4, 4, 8, 1}, {2920, 64, 64, 24, 4, 4, 16, 1}};
+
+ const MatMulNativeConfigsMatrix configs_mnkb_fallback_t_t = {
+ {3136, 64, 64, 36, 4, 4, 8, 0}, {4096, 48, 32, 36, 4, 4, 8, 0}, {688, 92, 68, 32, 4, 4, 8, 0},
+ {24, 464, 412, 24, 4, 4, 8, 0}, {112, 184, 144, 28, 4, 4, 8, 0}, {5776, 64, 32, 36, 4, 4, 8, 0},
+ {1568, 64, 40, 36, 4, 4, 8, 0}, {2920, 64, 64, 24, 4, 4, 8, 0}};
const bool adj_lhs = info.adj_lhs();
const bool adj_rhs = info.adj_rhs();
@@ -318,17 +209,17 @@ MatMulKernelInfo ClMatMulNativeDefaultConfigValhall::configure_G710_f16(unsigned
const MatMulNativeConfigsMatrix *configs_best_to_use = nullptr;
const MatMulNativeConfigsMatrix *configs_fallback_to_use = nullptr;
- if((adj_lhs == false) && (adj_rhs == false))
+ if ((adj_lhs == false) && (adj_rhs == false))
{
configs_best_to_use = &configs_mnkb_best_nt_nt;
configs_fallback_to_use = &configs_mnkb_fallback_nt_nt;
}
- else if((adj_lhs == false) && (adj_rhs == true))
+ else if ((adj_lhs == false) && (adj_rhs == true))
{
configs_best_to_use = &configs_mnkb_best_nt_t;
configs_fallback_to_use = &configs_mnkb_fallback_nt_t;
}
- else if((adj_lhs == true) && (adj_rhs == false))
+ else if ((adj_lhs == true) && (adj_rhs == false))
{
configs_best_to_use = &configs_mnkb_best_t_nt;
configs_fallback_to_use = &configs_mnkb_fallback_t_nt;
@@ -342,75 +233,46 @@ MatMulKernelInfo ClMatMulNativeDefaultConfigValhall::configure_G710_f16(unsigned
MatMulKernelInfo desc0 = find_info(*configs_best_to_use, adj_lhs, adj_rhs, m, n, k, b);
MatMulKernelInfo desc1 = find_info(*configs_fallback_to_use, adj_lhs, adj_rhs, m, n, k, b);
- return select_info(desc0,
- desc1,
- m, n, k, b, DataType::F16, rhs_lock_padding);
+ return select_info(desc0, desc1, m, n, k, b, DataType::F16, rhs_lock_padding);
}
-MatMulKernelInfo ClMatMulNativeDefaultConfigValhall::configure_G710_u8(unsigned int m, unsigned int n, unsigned int k, unsigned int b, bool rhs_lock_padding, const MatMulInfo &info)
+MatMulKernelInfo ClMatMulNativeDefaultConfigValhall::configure_G710_u8(
+ unsigned int m, unsigned int n, unsigned int k, unsigned int b, bool rhs_lock_padding, const MatMulInfo &info)
{
ARM_COMPUTE_UNUSED(rhs_lock_padding);
- const MatMulNativeConfigsMatrix configs_mnkb_best_nt_nt =
- {
- { 3136, 64, 64, 36, 6, 4, 4, 0 },
- { 4096, 48, 32, 36, 6, 4, 4, 0 },
- { 688, 92, 68, 32, 2, 8, 4, 0 },
- { 24, 464, 412, 24, 4, 4, 4, 0 },
- { 112, 184, 144, 28, 6, 4, 4, 0 },
- { 5776, 64, 32, 36, 6, 4, 4, 0 },
- { 1568, 64, 40, 36, 6, 4, 4, 0 },
- { 2920, 64, 64, 24, 5, 4, 4, 0 }
- };
-
- const MatMulNativeConfigsMatrix configs_mnkb_best_nt_t =
- {
- { 3136, 64, 64, 36, 4, 4, 16, 0 },
- { 4096, 48, 32, 36, 4, 4, 16, 0 },
- { 688, 92, 68, 32, 4, 4, 16, 0 },
- { 24, 464, 412, 24, 6, 2, 16, 0 },
- { 112, 184, 144, 28, 4, 4, 16, 0 },
- { 5776, 64, 32, 36, 4, 4, 16, 0 },
- { 1568, 64, 40, 36, 6, 4, 4, 0 },
- { 2920, 64, 64, 24, 4, 4, 16, 0 }
- };
-
- const MatMulNativeConfigsMatrix configs_mnkb_best_t_nt =
- {
- { 3136, 64, 64, 36, 4, 4, 8, 0 },
- { 4096, 48, 32, 36, 4, 4, 8, 0 },
- { 688, 92, 68, 32, 4, 4, 4, 0 },
- { 24, 464, 412, 24, 4, 4, 4, 0 },
- { 112, 184, 144, 28, 4, 4, 8, 0 },
- { 5776, 64, 32, 36, 4, 4, 8, 0 },
- { 1568, 64, 40, 36, 4, 4, 8, 0 },
- { 2920, 64, 64, 24, 4, 4, 8, 0 }
- };
-
- const MatMulNativeConfigsMatrix configs_mnkb_best_t_t =
- {
- { 3136, 64, 64, 36, 4, 2, 16, 0 },
- { 4096, 48, 32, 36, 4, 4, 4, 0 },
- { 688, 92, 68, 32, 4, 4, 8, 0 },
- { 24, 464, 412, 24, 4, 2, 16, 0 },
- { 112, 184, 144, 28, 4, 2, 16, 0 },
- { 5776, 64, 32, 36, 4, 4, 4, 0 },
- { 1568, 64, 40, 36, 4, 4, 8, 0 },
- { 2920, 64, 64, 24, 4, 2, 16, 0 }
- };
+ const MatMulNativeConfigsMatrix configs_mnkb_best_nt_nt = {
+ {3136, 64, 64, 36, 6, 4, 4, 0}, {4096, 48, 32, 36, 6, 4, 4, 0}, {688, 92, 68, 32, 2, 8, 4, 0},
+ {24, 464, 412, 24, 4, 4, 4, 0}, {112, 184, 144, 28, 6, 4, 4, 0}, {5776, 64, 32, 36, 6, 4, 4, 0},
+ {1568, 64, 40, 36, 6, 4, 4, 0}, {2920, 64, 64, 24, 5, 4, 4, 0}};
+
+ const MatMulNativeConfigsMatrix configs_mnkb_best_nt_t = {
+ {3136, 64, 64, 36, 4, 4, 16, 0}, {4096, 48, 32, 36, 4, 4, 16, 0}, {688, 92, 68, 32, 4, 4, 16, 0},
+ {24, 464, 412, 24, 6, 2, 16, 0}, {112, 184, 144, 28, 4, 4, 16, 0}, {5776, 64, 32, 36, 4, 4, 16, 0},
+ {1568, 64, 40, 36, 6, 4, 4, 0}, {2920, 64, 64, 24, 4, 4, 16, 0}};
+
+ const MatMulNativeConfigsMatrix configs_mnkb_best_t_nt = {
+ {3136, 64, 64, 36, 4, 4, 8, 0}, {4096, 48, 32, 36, 4, 4, 8, 0}, {688, 92, 68, 32, 4, 4, 4, 0},
+ {24, 464, 412, 24, 4, 4, 4, 0}, {112, 184, 144, 28, 4, 4, 8, 0}, {5776, 64, 32, 36, 4, 4, 8, 0},
+ {1568, 64, 40, 36, 4, 4, 8, 0}, {2920, 64, 64, 24, 4, 4, 8, 0}};
+
+ const MatMulNativeConfigsMatrix configs_mnkb_best_t_t = {
+ {3136, 64, 64, 36, 4, 2, 16, 0}, {4096, 48, 32, 36, 4, 4, 4, 0}, {688, 92, 68, 32, 4, 4, 8, 0},
+ {24, 464, 412, 24, 4, 2, 16, 0}, {112, 184, 144, 28, 4, 2, 16, 0}, {5776, 64, 32, 36, 4, 4, 4, 0},
+ {1568, 64, 40, 36, 4, 4, 8, 0}, {2920, 64, 64, 24, 4, 2, 16, 0}};
const bool adj_lhs = info.adj_lhs();
const bool adj_rhs = info.adj_rhs();
- if((adj_lhs == false) && (adj_rhs == false))
+ if ((adj_lhs == false) && (adj_rhs == false))
{
return find_info(configs_mnkb_best_nt_nt, adj_lhs, adj_rhs, m, n, k, b);
}
- else if((adj_lhs == false) && (adj_rhs == true))
+ else if ((adj_lhs == false) && (adj_rhs == true))
{
return find_info(configs_mnkb_best_nt_t, adj_lhs, adj_rhs, m, n, k, b);
}
- else if((adj_lhs == true) && (adj_rhs == false))
+ else if ((adj_lhs == true) && (adj_rhs == false))
{
return find_info(configs_mnkb_best_t_nt, adj_lhs, adj_rhs, m, n, k, b);
}
@@ -419,5 +281,5 @@ MatMulKernelInfo ClMatMulNativeDefaultConfigValhall::configure_G710_u8(unsigned
return find_info(configs_mnkb_best_t_t, adj_lhs, adj_rhs, m, n, k, b);
}
}
-} // namespace opencl
+} // namespace cl_matmul
} // namespace arm_compute