From 17d6d3cb0f92517b3764870b85cb7ffd22332556 Mon Sep 17 00:00:00 2001 From: Georgios Pinitas Date: Mon, 2 Jul 2018 17:52:40 +0100 Subject: COMPMID-1352: Disable support for 4D softmax layer. Change-Id: Ia8afabb36e644895d321ded51a6a0676347443e1 Reviewed-on: https://eu-gerrit-1.euhpc.arm.com/138387 Reviewed-by: Gian Marco Iodice Tested-by: Jenkins --- tests/validation/CL/SoftmaxLayer.cpp | 26 ++++++++++++++------------ tests/validation/NEON/SoftmaxLayer.cpp | 16 +++++++++------- 2 files changed, 23 insertions(+), 19 deletions(-) (limited to 'tests/validation') diff --git a/tests/validation/CL/SoftmaxLayer.cpp b/tests/validation/CL/SoftmaxLayer.cpp index 114529edfd..b47f84f8cd 100644 --- a/tests/validation/CL/SoftmaxLayer.cpp +++ b/tests/validation/CL/SoftmaxLayer.cpp @@ -99,25 +99,27 @@ DATA_TEST_CASE(Configuration, framework::DatasetMode::ALL, combine(concat(datase // *INDENT-OFF* // clang-format off DATA_TEST_CASE(Validate, framework::DatasetMode::ALL, zip(zip( - framework::dataset::make("InputInfo", { TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), // Mismatching data types - TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), // Mismatching shapes - TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::QASYMM8, // Invalid output quantization info + framework::dataset::make("InputInfo", { TensorInfo(TensorShape(27U, 13U), 1, DataType::F32), // Mismatching data types + TensorInfo(TensorShape(27U, 13U), 1, DataType::F32), // Mismatching shapes + TensorInfo(TensorShape(27U, 13U), 1, DataType::QASYMM8, // Invalid output quantization info QuantizationInfo(1.f/256, 12)), - TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), // Window shrink - TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::F32), - TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::QASYMM8, + TensorInfo(TensorShape(27U, 13U), 1, DataType::F32), // Window shrink + TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32),// Invalid input dimensionality + TensorInfo(TensorShape(32U, 13U), 1, DataType::F32), + TensorInfo(TensorShape(32U, 13U), 1, DataType::QASYMM8, QuantizationInfo(1.f/256, 12)), }), - framework::dataset::make("OutputInfo",{ TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F16), - TensorInfo(TensorShape(27U, 11U, 2U), 1, DataType::F32), - TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::QASYMM8, + framework::dataset::make("OutputInfo",{ TensorInfo(TensorShape(27U, 13U), 1, DataType::F16), + TensorInfo(TensorShape(27U, 11U), 1, DataType::F32), + TensorInfo(TensorShape(27U, 13U), 1, DataType::QASYMM8, QuantizationInfo(1.f/256, 12)), + TensorInfo(TensorShape(27U, 13U), 1, DataType::F32), TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), - TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::F32), - TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::QASYMM8, + TensorInfo(TensorShape(32U, 13U), 1, DataType::F32), + TensorInfo(TensorShape(32U, 13U), 1, DataType::QASYMM8, QuantizationInfo(1.f/256, 0)), })), - framework::dataset::make("Expected", { false, false, false, false, true, true })), + framework::dataset::make("Expected", { false, false, false, false, false, true, true })), input_info, output_info, expected) { ARM_COMPUTE_EXPECT(bool(CLSoftmaxLayer::validate(&input_info.clone()->set_is_resizable(false), &output_info.clone()->set_is_resizable(false))) == expected, framework::LogLevel::ERRORS); diff --git a/tests/validation/NEON/SoftmaxLayer.cpp b/tests/validation/NEON/SoftmaxLayer.cpp index 4a1aa23ac1..b6efc8f9b5 100644 --- a/tests/validation/NEON/SoftmaxLayer.cpp +++ b/tests/validation/NEON/SoftmaxLayer.cpp @@ -95,15 +95,17 @@ DATA_TEST_CASE(Configuration, framework::DatasetMode::ALL, combine(concat(datase // *INDENT-OFF* // clang-format off DATA_TEST_CASE(Validate, framework::DatasetMode::ALL, zip(zip( - framework::dataset::make("InputInfo", { TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), // Mismatching data types - TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), // Mismatching shapes - TensorInfo(TensorShape(32U, 16U, 2U), 1, DataType::F32), + framework::dataset::make("InputInfo", { TensorInfo(TensorShape(27U, 13U), 1, DataType::F32), // Mismatching data types + TensorInfo(TensorShape(27U, 13U), 1, DataType::F32), // Mismatching shapes + TensorInfo(TensorShape(32U, 16U, 2U), 1, DataType::F32), // Invalid input dimensionality + TensorInfo(TensorShape(32U, 16U), 1, DataType::F32), }), - framework::dataset::make("OutputInfo",{ TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F16), - TensorInfo(TensorShape(27U, 11U, 2U), 1, DataType::F32), - TensorInfo(TensorShape(32U, 16U, 2U), 1, DataType::F32), + framework::dataset::make("OutputInfo",{ TensorInfo(TensorShape(27U, 13U), 1, DataType::F16), + TensorInfo(TensorShape(27U, 11U), 1, DataType::F32), + TensorInfo(TensorShape(27U, 11U), 1, DataType::F32), + TensorInfo(TensorShape(32U, 16U), 1, DataType::F32), })), - framework::dataset::make("Expected", { false, false, true })), + framework::dataset::make("Expected", { false, false, false, true })), input_info, output_info, expected) { bool is_valid = bool(NESoftmaxLayer::validate(&input_info.clone()->set_is_resizable(false), &output_info.clone()->set_is_resizable(false))); -- cgit v1.2.1