aboutsummaryrefslogtreecommitdiff
path: root/tests/validation/fixtures/FullyConnectedLayerFixture.h
diff options
context:
space:
mode:
authorMatthew Bentham <Matthew.Bentham@arm.com>2023-07-12 11:54:59 +0000
committerMatthew Bentham <matthew.bentham@arm.com>2023-07-12 14:56:44 +0000
commit945b8da90cea5ccacc0294e58131f73f39137367 (patch)
treee3ae7bb1fc5ef28a683883c0eafef531e4f4fa2e /tests/validation/fixtures/FullyConnectedLayerFixture.h
parentab0b75054ca3ddd62cff34518f331aa8474daa5a (diff)
downloadComputeLibrary-945b8da90cea5ccacc0294e58131f73f39137367.tar.gz
Make test fixture setup methods not be templated
This simplifies code slightly as nothing needs those functions to be function templates. Signed-off-by: Matthew Bentham <Matthew.Bentham@arm.com> Change-Id: If48694bf5677bb83426aeba952eb87174a42dff0 Reviewed-on: https://eu-gerrit-1.euhpc.arm.com/c/VisualCompute/ComputeLibrary/+/536135 Tested-by: bsgcomp <bsgcomp@arm.com> Reviewed-by: Jakub Sujak <jakub.sujak@arm.com> Comments-Addressed: bsgcomp <bsgcomp@arm.com> Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/9907 Benchmark: Arm Jenkins <bsgcomp@arm.com> Tested-by: Arm Jenkins <bsgcomp@arm.com> Comments-Addressed: Arm Jenkins <bsgcomp@arm.com>
Diffstat (limited to 'tests/validation/fixtures/FullyConnectedLayerFixture.h')
-rw-r--r--tests/validation/fixtures/FullyConnectedLayerFixture.h7
1 files changed, 0 insertions, 7 deletions
diff --git a/tests/validation/fixtures/FullyConnectedLayerFixture.h b/tests/validation/fixtures/FullyConnectedLayerFixture.h
index e13c01d1e2..fb1cb4dcb6 100644
--- a/tests/validation/fixtures/FullyConnectedLayerFixture.h
+++ b/tests/validation/fixtures/FullyConnectedLayerFixture.h
@@ -54,7 +54,6 @@ public:
using TBias = typename std::conditional < (std::is_same<TDecay, uint8_t>::value || std::is_same<TDecay, int8_t>::value), int32_t, T >::type;
public:
- template <typename...>
void setup(TensorShape input_shape, TensorShape weights_shape, TensorShape bias_shape, TensorShape output_shape, bool transpose_weights, bool reshape_weights,
DataType data_type, QuantizationInfo quantization_info, ActivationLayerInfo activation_info, bool mixed_layout = false)
{
@@ -248,7 +247,6 @@ template <typename TensorType, typename AccessorType, typename FunctionType, typ
class FullyConnectedLayerValidationFixture : public FullyConnectedLayerValidationGenericFixture<TensorType, AccessorType, FunctionType, T>
{
public:
- template <typename...>
void setup(TensorShape input_shape, TensorShape weights_shape, TensorShape bias_shape, TensorShape output_shape, bool transpose_weights, bool reshape_weights, DataType data_type,
ActivationLayerInfo activation_info)
{
@@ -262,7 +260,6 @@ template <typename TensorType, typename AccessorType, typename FunctionType, typ
class FullyConnectedLayerValidationQuantizedFixture : public FullyConnectedLayerValidationGenericFixture<TensorType, AccessorType, FunctionType, T>
{
public:
- template <typename...>
void setup(TensorShape input_shape, TensorShape weights_shape, TensorShape bias_shape, TensorShape output_shape, bool transpose_weights, bool reshape_weights, DataType data_type,
QuantizationInfo quantization_info, ActivationLayerInfo activation_info)
{
@@ -358,7 +355,6 @@ public:
using TDecay = typename std::decay<T>::type;
using TBias = typename std::conditional < (std::is_same<TDecay, uint8_t>::value || std::is_same<TDecay, int8_t>::value), int32_t, T >::type;
- template <typename...>
void setup(TensorShape src_shape, TensorShape weights_shape, TensorShape bias_shape, TensorShape dst_shape,
DataType data_type, ActivationLayerInfo activation_info, bool constant_weights, bool constant_bias, bool weights_reshaped, bool remove_bias = false)
{
@@ -496,7 +492,6 @@ template <typename TensorType, typename AccessorType, typename FunctionType, typ
class FullyConnectedWithDynamicWeightsFixture : public FullyConnectedWithDynamicTensorsFixture<TensorType, AccessorType, FunctionType, T>
{
public:
- template <typename...>
void setup(TensorShape src_shape, TensorShape weights_shape, TensorShape bias_shape, TensorShape dst_shape,
DataType data_type, ActivationLayerInfo activation_info, bool weights_reshaped)
{
@@ -509,7 +504,6 @@ template <typename TensorType, typename AccessorType, typename FunctionType, typ
class FullyConnectedDynamicNoBiasFixture : public FullyConnectedWithDynamicTensorsFixture<TensorType, AccessorType, FunctionType, T>
{
public:
- template <typename...>
void setup(TensorShape src_shape, TensorShape weights_shape, TensorShape bias_shape, TensorShape dst_shape,
DataType data_type, ActivationLayerInfo activation_info, bool weights_reshaped)
{
@@ -522,7 +516,6 @@ template <typename TensorType, typename AccessorType, typename FunctionType, typ
class FullyConnectedWithDynamicBiasFixture : public FullyConnectedWithDynamicTensorsFixture<TensorType, AccessorType, FunctionType, T>
{
public:
- template <typename...>
void setup(TensorShape src_shape, TensorShape weights_shape, TensorShape bias_shape, TensorShape dst_shape,
DataType data_type, ActivationLayerInfo activation_info)
{