aboutsummaryrefslogtreecommitdiff
path: root/tests/validation
diff options
context:
space:
mode:
authorJakub Sujak <jakub.sujak@arm.com>2023-03-29 11:16:18 +0100
committerJakub Sujak <jakub.sujak@arm.com>2023-04-04 14:09:21 +0000
commit617ed50000532877296fff93973590a8ab67f96d (patch)
treed571654a2c31dfb6d5ec1529160b9f3d3348938c /tests/validation
parentf26ea2f8cc957a1e6faf0361dea805fb2e236061 (diff)
downloadComputeLibrary-617ed50000532877296fff93973590a8ab67f96d.tar.gz
Support dynamic weights for Fully Connected layers on GPU
The fully connected function and operator running on GPU have been adapted to support dynamic weights. Dynamic weights require the reshape and data layout conversion of weight tensors at runtime in the prepare stage of the operator. The implementation for GPU is identical to the CPU implementation. This patch also deprecates the `are_weights_reshaped` option in Fully Connected. Resolves: COMPMID-5870 Change-Id: I28f967695879d82cc91a928d95308a4e0e52a597 Signed-off-by: Jakub Sujak <jakub.sujak@arm.com> Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/9403 Tested-by: Arm Jenkins <bsgcomp@arm.com> Reviewed-by: Viet-Hoa Do <viet-hoa.do@arm.com> Comments-Addressed: Arm Jenkins <bsgcomp@arm.com> Benchmark: Arm Jenkins <bsgcomp@arm.com>
Diffstat (limited to 'tests/validation')
-rw-r--r--tests/validation/CL/FullyConnectedLayer.cpp24
-rw-r--r--tests/validation/fixtures/FullyConnectedLayerFixture.h2
2 files changed, 21 insertions, 5 deletions
diff --git a/tests/validation/CL/FullyConnectedLayer.cpp b/tests/validation/CL/FullyConnectedLayer.cpp
index fcfae4e156..9213ab541d 100644
--- a/tests/validation/CL/FullyConnectedLayer.cpp
+++ b/tests/validation/CL/FullyConnectedLayer.cpp
@@ -150,6 +150,12 @@ FIXTURE_DATA_TEST_CASE(RunLarge, CLFullyConnectedLayerFixture<half>, framework::
// Validate output
validate(CLAccessor(_target), _reference, tolerance_f16, tolerance_num);
}
+FIXTURE_DATA_TEST_CASE(RunDynamicWeights, CLFullyConnectedLayerDynamicWeightsFixture<half>, framework::DatasetMode::PRECOMMIT, combine(combine(combine(datasets::SmallFullyConnectedLayerDataset(),
+ framework::dataset::make("DataType", DataType::F16)),
+ framework::dataset::make("ActivationInfo", ActivationLayerInfo())),
+ framework::dataset::make("WeightsReshaped", { false, true })))
+{
+}
TEST_SUITE_END()
TEST_SUITE(FP32)
@@ -173,9 +179,9 @@ FIXTURE_DATA_TEST_CASE(RunMixedDataLayout, CLFullyConnectedLayerMixedDataLayoutF
validate(CLAccessor(_target), _reference, rel_tolerance_f32, 0, abs_tolerance_f32);
}
FIXTURE_DATA_TEST_CASE(RunDynamicWeights, CLFullyConnectedLayerDynamicWeightsFixture<float>, framework::DatasetMode::PRECOMMIT, combine(combine(combine(datasets::SmallFullyConnectedLayerDataset(),
- framework::dataset::make("DataType", DataType::F32)),
- framework::dataset::make("ActivationInfo", ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))),
- framework::dataset::make("WeightsReshaped", { true })))
+ framework::dataset::make("DataType", DataType::F32)),
+ framework::dataset::make("ActivationInfo", ActivationLayerInfo())),
+ framework::dataset::make("WeightsReshaped", { false, true })))
{
}
FIXTURE_DATA_TEST_CASE(RunLarge, CLFullyConnectedLayerFixture<float>, framework::DatasetMode::NIGHTLY, combine(combine(combine(datasets::LargeFullyConnectedLayerDataset(), FullyConnectedParameters),
@@ -223,6 +229,12 @@ FIXTURE_DATA_TEST_CASE(RunLarge, CLFullyConnectedLayerQuantizedFixture<uint8_t>,
// Validate output
validate(CLAccessor(_target), _reference, tolerance_qasymm8);
}
+FIXTURE_DATA_TEST_CASE(RunDynamicWeights, CLFullyConnectedLayerDynamicWeightsFixture<uint8_t>, framework::DatasetMode::PRECOMMIT, combine(combine(combine(datasets::SmallFullyConnectedLayerDataset(),
+ framework::dataset::make("DataType", DataType::QASYMM8)),
+ framework::dataset::make("ActivationInfo", ActivationLayerInfo())),
+ framework::dataset::make("WeightsReshaped", { false /* COMPMID-6000: Support FullyConnected with quantized dynamic weights already reshaped */ })))
+{
+}
TEST_SUITE_END() /* QASYMM8 */
TEST_SUITE(QASYMM8_SIGNED)
FIXTURE_DATA_TEST_CASE(RunSmall, CLFullyConnectedLayerQuantizedFixture<int8_t>, framework::DatasetMode::PRECOMMIT,
@@ -246,6 +258,12 @@ FIXTURE_DATA_TEST_CASE(RunMixedDataLayout, CLFullyConnectedLayerQuantizedMixedDa
// Validate output
validate(CLAccessor(_target), _reference, tolerance_qasymm8);
}
+FIXTURE_DATA_TEST_CASE(RunDynamicWeights, CLFullyConnectedLayerDynamicWeightsFixture<int8_t>, framework::DatasetMode::PRECOMMIT, combine(combine(combine(datasets::SmallFullyConnectedLayerDataset(),
+ framework::dataset::make("DataType", DataType::QASYMM8_SIGNED)),
+ framework::dataset::make("ActivationInfo", ActivationLayerInfo())),
+ framework::dataset::make("WeightsReshaped", { false /* COMPMID-6000: Support FullyConnected with quantized dynamic weights already reshaped */ })))
+{
+}
TEST_SUITE_END() // QASYMM8_SIGNED
TEST_SUITE_END() // Quantized
TEST_SUITE_END() // FullyConnectedLayer
diff --git a/tests/validation/fixtures/FullyConnectedLayerFixture.h b/tests/validation/fixtures/FullyConnectedLayerFixture.h
index 7d1aa494ba..75bef144ad 100644
--- a/tests/validation/fixtures/FullyConnectedLayerFixture.h
+++ b/tests/validation/fixtures/FullyConnectedLayerFixture.h
@@ -333,7 +333,6 @@ private:
validate(AccessorType(target), ref, rel_tolerance_f32, 0, abs_tolerance_f32);
}
-#ifdef __ARM_FEATURE_FP16_VECTOR_ARITHMETIC
void validate_with_tolerance(TensorType &target, SimpleTensor<half_float::half> &ref)
{
constexpr AbsoluteTolerance<float> abs_tolerance_f16(0.3f);
@@ -342,7 +341,6 @@ private:
validate(AccessorType(target), ref, rel_tolerance_f16, tolerance_num_f16, abs_tolerance_f16);
}
-#endif // __ARM_FEATURE_FP16_VECTOR_ARITHMETIC
void validate_with_tolerance(TensorType &target, SimpleTensor<uint8_t> &ref)
{