aboutsummaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
authorGiorgio Arena <giorgio.arena@arm.com>2021-04-15 12:54:53 +0100
committerGiorgio Arena <giorgio.arena@arm.com>2021-04-15 14:11:16 +0000
commit5d75d4a236bd4d73b514bd81d902fbdbd660b00a (patch)
treee4323ea0685007e1fe8747860b75d99cc49a9334 /tests
parentc77132168f47f89118fd285011d84a470a49fc4a (diff)
downloadComputeLibrary-5d75d4a236bd4d73b514bd81d902fbdbd660b00a.tar.gz
Fix validation bug in release mode for armv7
Resolve COMPMID-4377, COMPMID-4379 Change-Id: I302f08b5bf0afb5295d31843fea20181d9283658 Signed-off-by: Giorgio Arena <giorgio.arena@arm.com> Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/5435 Reviewed-by: Michele Di Giorgio <michele.digiorgio@arm.com> Comments-Addressed: Arm Jenkins <bsgcomp@arm.com> Tested-by: Arm Jenkins <bsgcomp@arm.com>
Diffstat (limited to 'tests')
-rw-r--r--tests/validation/CL/FullyConnectedLayer.cpp69
1 files changed, 29 insertions, 40 deletions
diff --git a/tests/validation/CL/FullyConnectedLayer.cpp b/tests/validation/CL/FullyConnectedLayer.cpp
index 066f0b689e..7b313e7c50 100644
--- a/tests/validation/CL/FullyConnectedLayer.cpp
+++ b/tests/validation/CL/FullyConnectedLayer.cpp
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2017-2020 Arm Limited.
+ * Copyright (c) 2017-2021 Arm Limited.
*
* SPDX-License-Identifier: MIT
*
@@ -51,15 +51,6 @@ constexpr float tolerance_num = 0.07f; /**< Tolerance n
/** Tolerance for quantized asymmetric operations */
constexpr AbsoluteTolerance<uint8_t> tolerance_qasymm8(1);
-/** CNN data types */
-const auto CNNDataTypes = framework::dataset::make("DataType",
-{
- DataType::F16,
- DataType::F32,
- DataType::QASYMM8,
- DataType::QASYMM8_SIGNED,
-});
-
const auto FullyConnectedParameters = combine(framework::dataset::make("TransposeWeights", { false, true }), framework::dataset::make("ReshapeWeights", { false, true }));
const auto QuantizationData = framework::dataset::make("QuantizationInfo",
@@ -77,13 +68,11 @@ const auto ActivationFunctionsDataset = framework::dataset::make("ActivationInfo
ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::TANH)
});
-const auto ActivationFunctionsQuantizedDataset = framework::dataset::make("ActivationInfo",
-{
- ActivationLayerInfo(),
- ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU),
- ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::BOUNDED_RELU, 0.5f),
- ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 0.75f, 0.25f)
-});
+const auto ActivationFunctionsQuantizedDataset = concat(concat(concat(
+ framework::dataset::make("ActivationInfo", ActivationLayerInfo()),
+ framework::dataset::make("ActivationInfo", ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))),
+ framework::dataset::make("ActivationInfo", ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::BOUNDED_RELU, 0.5f))),
+ framework::dataset::make("ActivationInfo", ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU, 0.75f, 0.25f)));
} // namespace
TEST_SUITE(CL)
@@ -170,13 +159,13 @@ FIXTURE_DATA_TEST_CASE(RunSmall, CLFullyConnectedLayerFixture<float>, framework:
validate(CLAccessor(_target), _reference, rel_tolerance_f32, 0, abs_tolerance_f32);
}
FIXTURE_DATA_TEST_CASE(RunMixedDataLayout, CLFullyConnectedLayerMixedDataLayoutFixture<float>, framework::DatasetMode::PRECOMMIT, combine(combine(combine(combine(combine(combine(
- framework::dataset::make("Input", TensorShape(9U, 5U, 7U)),
- framework::dataset::make("Weights", TensorShape(315U, 271U))),
- framework::dataset::make("Biases", TensorShape(271U))),
- framework::dataset::make("Output", TensorShape(271U))),
- FullyConnectedParameters),
- framework::dataset::make("DataType", DataType::F32)),
- framework::dataset::make("ActivationInfo", ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))))
+ framework::dataset::make("Input", TensorShape(9U, 5U, 7U)),
+ framework::dataset::make("Weights", TensorShape(315U, 271U))),
+ framework::dataset::make("Biases", TensorShape(271U))),
+ framework::dataset::make("Output", TensorShape(271U))),
+ FullyConnectedParameters),
+ framework::dataset::make("DataType", DataType::F32)),
+ framework::dataset::make("ActivationInfo", ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))))
{
// Validate output
validate(CLAccessor(_target), _reference, rel_tolerance_f32, 0, abs_tolerance_f32);
@@ -207,14 +196,14 @@ FIXTURE_DATA_TEST_CASE(RunSmall, CLFullyConnectedLayerQuantizedFixture<uint8_t>,
}
FIXTURE_DATA_TEST_CASE(RunMixedDataLayout, CLFullyConnectedLayerQuantizedMixedDataLayoutFixture<uint8_t>, framework::DatasetMode::PRECOMMIT,
combine(combine(combine(combine(combine(combine(combine(
- framework::dataset::make("Input", TensorShape(9U, 5U, 7U)),
- framework::dataset::make("Weights", TensorShape(315U, 271U))),
- framework::dataset::make("Biases", TensorShape(271U))),
- framework::dataset::make("Output", TensorShape(271U))),
- FullyConnectedParameters),
- framework::dataset::make("DataType", DataType::QASYMM8)),
- QuantizationData),
- framework::dataset::make("ActivationInfo", ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))))
+ framework::dataset::make("Input", TensorShape(9U, 5U, 7U)),
+ framework::dataset::make("Weights", TensorShape(315U, 271U))),
+ framework::dataset::make("Biases", TensorShape(271U))),
+ framework::dataset::make("Output", TensorShape(271U))),
+ FullyConnectedParameters),
+ framework::dataset::make("DataType", DataType::QASYMM8)),
+ QuantizationData),
+ framework::dataset::make("ActivationInfo", ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))))
{
// Validate output
validate(CLAccessor(_target), _reference, tolerance_qasymm8);
@@ -237,14 +226,14 @@ FIXTURE_DATA_TEST_CASE(RunSmall, CLFullyConnectedLayerQuantizedFixture<int8_t>,
}
FIXTURE_DATA_TEST_CASE(RunMixedDataLayout, CLFullyConnectedLayerQuantizedMixedDataLayoutFixture<int8_t>, framework::DatasetMode::PRECOMMIT,
combine(combine(combine(combine(combine(combine(combine(
- framework::dataset::make("Input", TensorShape(9U, 5U, 7U)),
- framework::dataset::make("Weights", TensorShape(315U, 271U))),
- framework::dataset::make("Biases", TensorShape(271U))),
- framework::dataset::make("Output", TensorShape(271U))),
- FullyConnectedParameters),
- framework::dataset::make("DataType", DataType::QASYMM8_SIGNED)),
- QuantizationData),
- framework::dataset::make("ActivationInfo", ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))))
+ framework::dataset::make("Input", TensorShape(9U, 5U, 7U)),
+ framework::dataset::make("Weights", TensorShape(315U, 271U))),
+ framework::dataset::make("Biases", TensorShape(271U))),
+ framework::dataset::make("Output", TensorShape(271U))),
+ FullyConnectedParameters),
+ framework::dataset::make("DataType", DataType::QASYMM8_SIGNED)),
+ QuantizationData),
+ framework::dataset::make("ActivationInfo", ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))))
{
// Validate output
validate(CLAccessor(_target), _reference, tolerance_qasymm8);