aboutsummaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
authorGeorgios Pinitas <georgios.pinitas@arm.com>2019-04-12 13:15:58 +0100
committerGeorgios Pinitas <georgios.pinitas@arm.com>2019-04-15 16:52:22 +0000
commit9e4824c909b14dbaf7106e9527b0ffa22ef09bdc (patch)
treeb1cc8f6a8b275a7e227e305f1b02870d5e0f30ec /tests
parentd66094e37ecd747e85f30130e1a678bdbaf30788 (diff)
downloadComputeLibrary-9e4824c909b14dbaf7106e9527b0ffa22ef09bdc.tar.gz
COMPMID-2111: ConcatenateLayer API should accept an index instead of an enum
Alters the concatenate layer to be layout agnostic and accept an index as thec concatenation axis instead of an typed layout dependent enumeration. Change-Id: I0eaaf919f66a1ba1b09bbfb47c171fc1d4045530 Signed-off-by: Georgios Pinitas <georgios.pinitas@arm.com> Reviewed-on: https://review.mlplatform.org/c/994 Comments-Addressed: Arm Jenkins <bsgcomp@arm.com> Reviewed-by: Michele Di Giorgio <michele.digiorgio@arm.com> Tested-by: Arm Jenkins <bsgcomp@arm.com>
Diffstat (limited to 'tests')
-rw-r--r--tests/validation/CL/DepthConcatenateLayer.cpp4
-rw-r--r--tests/validation/CL/WidthConcatenateLayer.cpp4
-rw-r--r--tests/validation/NEON/DepthConcatenateLayer.cpp16
-rw-r--r--tests/validation/NEON/HeightConcatenateLayer.cpp2
-rw-r--r--tests/validation/NEON/WidthConcatenateLayer.cpp3
-rw-r--r--tests/validation/fixtures/ConcatenateLayerFixture.h16
6 files changed, 15 insertions, 30 deletions
diff --git a/tests/validation/CL/DepthConcatenateLayer.cpp b/tests/validation/CL/DepthConcatenateLayer.cpp
index f4a693ca7d..5da8a34351 100644
--- a/tests/validation/CL/DepthConcatenateLayer.cpp
+++ b/tests/validation/CL/DepthConcatenateLayer.cpp
@@ -73,7 +73,7 @@ DATA_TEST_CASE(Validate, framework::DatasetMode::ALL, zip(zip(zip(
inputs_vector_info_raw.emplace_back(&input);
}
- bool is_valid = bool(CLConcatenateLayer::validate(inputs_vector_info_raw, &output_info.clone()->set_is_resizable(false), DataLayoutDimension::CHANNEL));
+ bool is_valid = bool(CLConcatenateLayer::validate(inputs_vector_info_raw, &output_info.clone()->set_is_resizable(false), 2));
ARM_COMPUTE_EXPECT(is_valid == expected, framework::LogLevel::ERRORS);
}
// clang-format on
@@ -95,7 +95,7 @@ TEST_CASE(Configuration, framework::DatasetMode::ALL)
// Create and configure function
CLConcatenateLayer concat_layer;
- concat_layer.configure({ &src1, &src2, &src3 }, &dst, DataLayoutDimension::CHANNEL);
+ concat_layer.configure({ &src1, &src2, &src3 }, &dst, 2);
}
template <typename T>
using CLDepthConcatenateLayerFixture = ConcatenateLayerValidationFixture<CLTensor, ICLTensor, CLAccessor, CLConcatenateLayer, T>;
diff --git a/tests/validation/CL/WidthConcatenateLayer.cpp b/tests/validation/CL/WidthConcatenateLayer.cpp
index 0ca6d72bff..2c1eb7fada 100644
--- a/tests/validation/CL/WidthConcatenateLayer.cpp
+++ b/tests/validation/CL/WidthConcatenateLayer.cpp
@@ -77,7 +77,7 @@ DATA_TEST_CASE(Validate, framework::DatasetMode::ALL, zip(zip(zip(
inputs_vector_info_raw.emplace_back(&input);
}
- bool is_valid = bool(CLConcatenateLayer::validate(inputs_vector_info_raw,&output_info.clone()->set_is_resizable(true),DataLayoutDimension::WIDTH ));
+ bool is_valid = bool(CLConcatenateLayer::validate(inputs_vector_info_raw,&output_info.clone()->set_is_resizable(true), 0));
ARM_COMPUTE_EXPECT(is_valid == expected, framework::LogLevel::ERRORS);
}
// clang-format on
@@ -99,7 +99,7 @@ TEST_CASE(Configuration, framework::DatasetMode::ALL)
// Create and configure function
CLConcatenateLayer concat_layer;
- concat_layer.configure({ &src1, &src2, &src3 }, &dst, DataLayoutDimension::WIDTH);
+ concat_layer.configure({ &src1, &src2, &src3 }, &dst, 0);
}
template <typename T>
diff --git a/tests/validation/NEON/DepthConcatenateLayer.cpp b/tests/validation/NEON/DepthConcatenateLayer.cpp
index 1b355ae17d..0ddb220d34 100644
--- a/tests/validation/NEON/DepthConcatenateLayer.cpp
+++ b/tests/validation/NEON/DepthConcatenateLayer.cpp
@@ -55,13 +55,13 @@ DATA_TEST_CASE(Validate, framework::DatasetMode::ALL, zip(zip(zip(
TensorInfo(TensorShape(23U, 27U, 4U), 1, DataType::F32),
TensorInfo(TensorShape(16U, 27U, 6U), 1, DataType::F32)
})),
- framework::dataset::make("OutputInfo", { TensorInfo(TensorShape(23U, 27U, 9U), 1, DataType::F16),
- TensorInfo(TensorShape(25U, 12U, 9U), 1, DataType::F32),
- TensorInfo(TensorShape(23U, 27U, 8U), 1, DataType::F32),
- TensorInfo(TensorShape(16U, 27U, 12U), 1, DataType::F32)
- })),
- framework::dataset::make("Expected", { false, false, false, true })),
- input_info1, input_info2, output_info,expected)
+ framework::dataset::make("OutputInfo", { TensorInfo(TensorShape(23U, 27U, 9U), 1, DataType::F16),
+ TensorInfo(TensorShape(25U, 12U, 9U), 1, DataType::F32),
+ TensorInfo(TensorShape(23U, 27U, 8U), 1, DataType::F32),
+ TensorInfo(TensorShape(16U, 27U, 12U), 1, DataType::F32)
+ })),
+ framework::dataset::make("Expected", { false, false, false, true })),
+ input_info1, input_info2, output_info,expected)
{
std::vector<TensorInfo> inputs_vector_info;
inputs_vector_info.emplace_back(std::move(input_info1));
@@ -73,7 +73,7 @@ DATA_TEST_CASE(Validate, framework::DatasetMode::ALL, zip(zip(zip(
inputs_vector_info_raw.emplace_back(&input);
}
- bool is_valid = bool(NEConcatenateLayer::validate(inputs_vector_info_raw, &output_info.clone()->set_is_resizable(false), DataLayoutDimension::CHANNEL));
+ bool is_valid = bool(NEConcatenateLayer::validate(inputs_vector_info_raw, &output_info.clone()->set_is_resizable(false), 2));
ARM_COMPUTE_EXPECT(is_valid == expected, framework::LogLevel::ERRORS);
}
// clang-format on
diff --git a/tests/validation/NEON/HeightConcatenateLayer.cpp b/tests/validation/NEON/HeightConcatenateLayer.cpp
index 0d08824645..9c23fb9bd3 100644
--- a/tests/validation/NEON/HeightConcatenateLayer.cpp
+++ b/tests/validation/NEON/HeightConcatenateLayer.cpp
@@ -77,7 +77,7 @@ DATA_TEST_CASE(Validate, framework::DatasetMode::ALL, zip(zip(zip(
inputs_vector_info_raw.emplace_back(&input);
}
- bool is_valid = bool(NEConcatenateLayer::validate(inputs_vector_info_raw, &output_info.clone()->set_is_resizable(true), DataLayoutDimension::HEIGHT));
+ bool is_valid = bool(NEConcatenateLayer::validate(inputs_vector_info_raw, &output_info.clone()->set_is_resizable(true), 1));
ARM_COMPUTE_EXPECT(is_valid == expected, framework::LogLevel::ERRORS);
}
// clang-format on
diff --git a/tests/validation/NEON/WidthConcatenateLayer.cpp b/tests/validation/NEON/WidthConcatenateLayer.cpp
index 20df3f4d7d..ed840ef325 100644
--- a/tests/validation/NEON/WidthConcatenateLayer.cpp
+++ b/tests/validation/NEON/WidthConcatenateLayer.cpp
@@ -75,8 +75,7 @@ DATA_TEST_CASE(Validate, framework::DatasetMode::ALL, zip(zip(zip(
inputs_vector_info_raw.emplace_back(&input);
}
- bool is_valid = bool(NEConcatenateLayer::validate(inputs_vector_info_raw,
- &output_info.clone()->set_is_resizable(true),DataLayoutDimension::WIDTH));
+ bool is_valid = bool(NEConcatenateLayer::validate(inputs_vector_info_raw, &output_info.clone()->set_is_resizable(true), 0));
ARM_COMPUTE_EXPECT(is_valid == expected, framework::LogLevel::ERRORS);
}
// clang-format on
diff --git a/tests/validation/fixtures/ConcatenateLayerFixture.h b/tests/validation/fixtures/ConcatenateLayerFixture.h
index db09957c09..39d4f9f95d 100644
--- a/tests/validation/fixtures/ConcatenateLayerFixture.h
+++ b/tests/validation/fixtures/ConcatenateLayerFixture.h
@@ -112,21 +112,7 @@ protected:
// Create and configure function
FunctionType concat;
- switch(axis)
- {
- case 0:
- concat.configure(src_ptrs, &dst, DataLayoutDimension::WIDTH);
- break;
- case 1:
- concat.configure(src_ptrs, &dst, DataLayoutDimension::HEIGHT);
- break;
- case 2:
- concat.configure(src_ptrs, &dst, DataLayoutDimension::CHANNEL);
- break;
- default:
- ARM_COMPUTE_ERROR("Not supported");
- break;
- }
+ concat.configure(src_ptrs, &dst, axis);
for(auto &src : srcs)
{