From 80943253e63779b61412264ff4c58b406cf061ae Mon Sep 17 00:00:00 2001 From: Michalis Spyrou Date: Thu, 10 Jan 2019 17:19:50 +0000 Subject: COMPMID-1652 CL Cleanup and add missing tests Change-Id: I359cc0fd0c3fa42ab10a770e59d58704403889b2 Reviewed-on: https://review.mlplatform.org/498 Tested-by: Arm Jenkins Reviewed-by: Michele Di Giorgio Reviewed-by: Isabella Gottardi --- tests/validation/CL/BatchNormalizationLayer.cpp | 39 +++++++++++++++++++++++++ 1 file changed, 39 insertions(+) (limited to 'tests/validation/CL/BatchNormalizationLayer.cpp') diff --git a/tests/validation/CL/BatchNormalizationLayer.cpp b/tests/validation/CL/BatchNormalizationLayer.cpp index 501e580709..dee703e624 100644 --- a/tests/validation/CL/BatchNormalizationLayer.cpp +++ b/tests/validation/CL/BatchNormalizationLayer.cpp @@ -181,12 +181,51 @@ TEST_SUITE_END() // Float TEST_SUITE_END() // BatchNormalizationLayer TEST_SUITE(BatchNormalizationLayerFusion) +// *INDENT-OFF* +// clang-format off +DATA_TEST_CASE(Validate, framework::DatasetMode::ALL, zip(zip( + framework::dataset::make("Weights", { TensorInfo(TensorShape(32U, 13U, 2U, 2U), 1, DataType::F32), // Valid + TensorInfo(TensorShape(32U, 13U, 2U, 2U), 1, DataType::F32), // Mismatching data types + TensorInfo(TensorShape(32U, 13U, 2U, 1U), 1, DataType::F32), // Invalid mean/var/beta/gamma shape + }), + framework::dataset::make("MVBGInfo",{ TensorInfo(TensorShape(2U), 1, DataType::F32), + TensorInfo(TensorShape(2U), 1, DataType::F16), + TensorInfo(TensorShape(5U), 1, DataType::F32), + })), + framework::dataset::make("Expected", { true, false, false})), + weights_info, mvbg_info, expected) +{ + const auto &weights_in_info = weights_info; + const auto &mean_info = mvbg_info; + const auto &var_info = mvbg_info; + const auto &fused_weights_info = weights_info; + const auto &fused_bias_info = mvbg_info; + const auto &conv_bias_info = mvbg_info; + const auto &beta_info = mvbg_info; + const auto &gamma_info = mvbg_info; + bool has_error = bool(CLFuseBatchNormalization::validate( + &weights_in_info.clone()->set_is_resizable(false), &mean_info.clone()->set_is_resizable(false), + &var_info.clone()->set_is_resizable(false), &fused_weights_info.clone()->set_is_resizable(false), + &fused_bias_info.clone()->set_is_resizable(false), &conv_bias_info.clone()->set_is_resizable(false), + &beta_info.clone()->set_is_resizable(false), &gamma_info.clone()->set_is_resizable(false), 1.f)); + ARM_COMPUTE_EXPECT(has_error == expected, framework::LogLevel::ERRORS); +} +// clang-format on +// *INDENT-ON* template using CLBatchNormalizationLayerFusionFixture = BatchNormalizationLayerFusionValidationFixture; TEST_SUITE(Float) TEST_SUITE(FP32) FIXTURE_DATA_TEST_CASE(RunSmall, CLBatchNormalizationLayerFusionFixture, framework::DatasetMode::PRECOMMIT, + combine(combine(combine(datasets::SmallConvolutionLayerReducedDataset(), common_fusion_dataset), + framework::dataset::make("DataType", DataType::F32)), + framework::dataset::make("DataLayout", { DataLayout::NCHW, DataLayout::NHWC }))) +{ + // Validate output + validate(CLAccessor(_target), _reference, rel_tolerance_f32, 0.f, abs_tolerance_f32); +} +FIXTURE_DATA_TEST_CASE(RunLarge, CLBatchNormalizationLayerFusionFixture, framework::DatasetMode::NIGHTLY, combine(combine(combine(datasets::SmallConvolutionLayerDataset(), common_fusion_dataset), framework::dataset::make("DataType", DataType::F32)), framework::dataset::make("DataLayout", { DataLayout::NCHW, DataLayout::NHWC }))) -- cgit v1.2.1