diff options
author | Moritz Pflanzer <moritz.pflanzer@arm.com> | 2017-07-18 13:42:54 +0100 |
---|---|---|
committer | Anthony Barbier <anthony.barbier@arm.com> | 2018-09-17 14:16:42 +0100 |
commit | b6c8d24042616341c1fbca6e255a69561c73fedf (patch) | |
tree | f959d732981d4230d6491d8636afe3f31d64e798 /tests/fixtures_new | |
parent | fce87954ac2373e910ccb0d83a00f5958ba41e71 (diff) | |
download | ComputeLibrary-b6c8d24042616341c1fbca6e255a69561c73fedf.tar.gz |
COMPMID-415: Use templates for data arguments
Change-Id: I815d705e7cf42022f7a203935dcaaa333a2801fe
Reviewed-on: http://mpd-gerrit.cambridge.arm.com/80311
Reviewed-by: Anthony Barbier <anthony.barbier@arm.com>
Tested-by: Kaizen <jeremy.johnson+kaizengerrit@arm.com>
Diffstat (limited to 'tests/fixtures_new')
-rw-r--r-- | tests/fixtures_new/ActivationLayerFixture.h | 1 | ||||
-rw-r--r-- | tests/fixtures_new/AlexNetFixture.h | 1 | ||||
-rw-r--r-- | tests/fixtures_new/ConvolutionLayerFixture.h | 1 | ||||
-rw-r--r-- | tests/fixtures_new/FullyConnectedLayerFixture.h | 1 | ||||
-rw-r--r-- | tests/fixtures_new/GEMMFixture.h | 1 | ||||
-rw-r--r-- | tests/fixtures_new/LeNet5Fixture.h | 1 | ||||
-rw-r--r-- | tests/fixtures_new/NormalizationLayerFixture.h | 1 | ||||
-rw-r--r-- | tests/fixtures_new/PoolingLayerFixture.h | 1 |
8 files changed, 8 insertions, 0 deletions
diff --git a/tests/fixtures_new/ActivationLayerFixture.h b/tests/fixtures_new/ActivationLayerFixture.h index bb03fa2ed0..5066810c79 100644 --- a/tests/fixtures_new/ActivationLayerFixture.h +++ b/tests/fixtures_new/ActivationLayerFixture.h @@ -39,6 +39,7 @@ template <typename TensorType, typename Function, typename Accessor> class ActivationLayerFixture : public framework::Fixture { public: + template <typename...> void setup(TensorShape shape, ActivationLayerInfo info, DataType data_type, int batches) { // Set batched in source and destination shapes diff --git a/tests/fixtures_new/AlexNetFixture.h b/tests/fixtures_new/AlexNetFixture.h index fcac1b2236..0ebdae0091 100644 --- a/tests/fixtures_new/AlexNetFixture.h +++ b/tests/fixtures_new/AlexNetFixture.h @@ -47,6 +47,7 @@ template <typename ITensorType, class AlexNetFixture : public framework::Fixture { public: + template <typename...> void setup(DataType data_type, int batches) { constexpr bool weights_transposed = true; diff --git a/tests/fixtures_new/ConvolutionLayerFixture.h b/tests/fixtures_new/ConvolutionLayerFixture.h index 65426103e2..f41cd1d25e 100644 --- a/tests/fixtures_new/ConvolutionLayerFixture.h +++ b/tests/fixtures_new/ConvolutionLayerFixture.h @@ -39,6 +39,7 @@ template <typename TensorType, typename Function, typename Accessor> class ConvolutionLayerFixture : public framework::Fixture { public: + template <typename...> void setup(TensorShape src_shape, TensorShape weights_shape, TensorShape biases_shape, TensorShape dst_shape, PadStrideInfo info, DataType data_type, int batches) { // Set batched in source and destination shapes diff --git a/tests/fixtures_new/FullyConnectedLayerFixture.h b/tests/fixtures_new/FullyConnectedLayerFixture.h index 9bf18a9689..82ecb39b9c 100644 --- a/tests/fixtures_new/FullyConnectedLayerFixture.h +++ b/tests/fixtures_new/FullyConnectedLayerFixture.h @@ -39,6 +39,7 @@ template <typename TensorType, typename Function, typename Accessor> class FullyConnectedLayerFixture : public framework::Fixture { public: + template <typename...> void setup(TensorShape src_shape, TensorShape weights_shape, TensorShape biases_shape, TensorShape dst_shape, DataType data_type, int batches) { // Set batched in source and destination shapes diff --git a/tests/fixtures_new/GEMMFixture.h b/tests/fixtures_new/GEMMFixture.h index cd357789e5..b23661f3e3 100644 --- a/tests/fixtures_new/GEMMFixture.h +++ b/tests/fixtures_new/GEMMFixture.h @@ -39,6 +39,7 @@ template <typename TensorType, typename Function> class GEMMFixture : public framework::Fixture { public: + template <typename...> void setup(TensorShape shape_a, TensorShape shape_b, TensorShape shape_c, TensorShape shape_dst, float alpha, float beta, DataType data_type) { constexpr int fixed_point_position = 4; diff --git a/tests/fixtures_new/LeNet5Fixture.h b/tests/fixtures_new/LeNet5Fixture.h index 3f36628c60..d9173af048 100644 --- a/tests/fixtures_new/LeNet5Fixture.h +++ b/tests/fixtures_new/LeNet5Fixture.h @@ -43,6 +43,7 @@ template <typename TensorType, class LeNet5Fixture : public framework::Fixture { public: + template <typename...> void setup(int batches) { network.init(batches); diff --git a/tests/fixtures_new/NormalizationLayerFixture.h b/tests/fixtures_new/NormalizationLayerFixture.h index 63d2d42c88..999eed6cff 100644 --- a/tests/fixtures_new/NormalizationLayerFixture.h +++ b/tests/fixtures_new/NormalizationLayerFixture.h @@ -39,6 +39,7 @@ template <typename TensorType, typename Function, typename Accessor> class NormalizationLayerFixture : public framework::Fixture { public: + template <typename...> void setup(TensorShape shape, NormalizationLayerInfo info, DataType data_type, int batches) { // Set batched in source and destination shapes diff --git a/tests/fixtures_new/PoolingLayerFixture.h b/tests/fixtures_new/PoolingLayerFixture.h index a09b421ad0..fc9c90ae3c 100644 --- a/tests/fixtures_new/PoolingLayerFixture.h +++ b/tests/fixtures_new/PoolingLayerFixture.h @@ -39,6 +39,7 @@ template <typename TensorType, typename Function, typename Accessor> class PoolingLayerFixture : public framework::Fixture { public: + template <typename...> void setup(TensorShape src_shape, TensorShape dst_shape, PoolingLayerInfo info, DataType data_type, int batches) { // Set batched in source and destination shapes |