aboutsummaryrefslogtreecommitdiff
path: root/tests/validation/CL/ActivationLayer.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'tests/validation/CL/ActivationLayer.cpp')
-rw-r--r--tests/validation/CL/ActivationLayer.cpp61
1 files changed, 10 insertions, 51 deletions
diff --git a/tests/validation/CL/ActivationLayer.cpp b/tests/validation/CL/ActivationLayer.cpp
index b32e3791f5..133b39d154 100644
--- a/tests/validation/CL/ActivationLayer.cpp
+++ b/tests/validation/CL/ActivationLayer.cpp
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2017-2020 ARM Limited.
+ * Copyright (c) 2017-2020, 2022 Arm Limited.
*
* SPDX-License-Identifier: MIT
*
@@ -70,6 +70,7 @@ AbsoluteTolerance<float> tolerance(ActivationLayerInfo::ActivationFunction activ
case ActivationLayerInfo::ActivationFunction::SOFT_RELU:
case ActivationLayerInfo::ActivationFunction::ELU:
case ActivationLayerInfo::ActivationFunction::SQRT:
+ case ActivationLayerInfo::ActivationFunction::GELU:
return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.01f : 0.00001f);
case ActivationLayerInfo::ActivationFunction::TANH:
return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.001f : 0.00001f);
@@ -89,61 +90,16 @@ const auto CNNDataTypes = framework::dataset::make("DataType",
/** Input data sets. */
const auto ActivationDataset = combine(combine(framework::dataset::make("InPlace", { false, true }), datasets::ActivationFunctions()), framework::dataset::make("AlphaBeta", { 0.5f, 1.f }));
+
} // namespace
TEST_SUITE(CL)
TEST_SUITE(ActivationLayer)
-
-DATA_TEST_CASE(Configuration, framework::DatasetMode::ALL, combine(combine(datasets::SmallShapes(), CNNDataTypes), framework::dataset::make("InPlace", { false, true })),
- shape, data_type, in_place)
-{
- // Create context
- auto ctx = parameters->get_ctx<CLTensor>();
-
- // Create tensors
- CLTensor src = create_tensor<CLTensor>(shape, data_type, 1, QuantizationInfo(), DataLayout::NCHW, ctx);
- CLTensor dst = create_tensor<CLTensor>(shape, data_type, 1, QuantizationInfo(), DataLayout::NCHW, ctx);
-
- ARM_COMPUTE_EXPECT(src.info()->is_resizable(), framework::LogLevel::ERRORS);
- ARM_COMPUTE_EXPECT(dst.info()->is_resizable(), framework::LogLevel::ERRORS);
-
- // Create and configure function
- CLActivationLayer act_layer(ctx);
-
- if(in_place)
- {
- act_layer.configure(&src, nullptr, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::ABS));
- }
- else
- {
- act_layer.configure(&src, &dst, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::ABS));
- }
-
- // Validate valid region
- const ValidRegion valid_region = shape_to_valid_region(shape);
- validate(src.info()->valid_region(), valid_region);
-
- if(!in_place)
- {
- validate(dst.info()->valid_region(), valid_region);
- }
-
- // Validate padding
- const int step = 16 / arm_compute::data_size_from_type(data_type);
- const PaddingSize padding = PaddingCalculator(shape.x(), step).required_padding();
- validate(src.info()->padding(), padding);
-
- if(!in_place)
- {
- validate(dst.info()->padding(), padding);
- }
-}
-
// *INDENT-OFF*
// clang-format off
DATA_TEST_CASE(Validate, framework::DatasetMode::ALL, zip(zip(zip(
framework::dataset::make("InputInfo", { TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), // Mismatching data types
- TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), // Window shrink
+ TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32),
TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::F32),
TensorInfo(TensorShape(32U, 13U, 2U), 1, DataType::QASYMM8),
TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::QASYMM8), // Invalid quantization info
@@ -172,11 +128,12 @@ DATA_TEST_CASE(Validate, framework::DatasetMode::ALL, zip(zip(zip(
ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LOGISTIC),
ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::SQRT),
})),
- framework::dataset::make("Expected", { false, false, true, true, false, false, true, true, false })),
+ framework::dataset::make("Expected", { false, true, true, true, false, false, true, true, false })),
input_info, output_info, act_info, expected)
{
ARM_COMPUTE_EXPECT(bool(CLActivationLayer::validate(&input_info.clone()->set_is_resizable(false), (output_info.total_size() == 0) ? nullptr : &output_info.clone()->set_is_resizable(false), act_info)) == expected, framework::LogLevel::ERRORS);
}
+
// clang-format on
// *INDENT-ON*
@@ -212,8 +169,10 @@ template <typename T>
using CLActivationLayerQuantizedFixture = ActivationValidationQuantizedFixture<CLTensor, CLAccessor, CLActivationLayer, T>;
const auto QuantizedActivationDataset8 = combine(combine(framework::dataset::make("InPlace", { false }),
- concat(datasets::ActivationFunctionsQuantized(), framework::dataset::make("ActivationFunction", ActivationLayerInfo::ActivationFunction::HARD_SWISH))),
- framework::dataset::make("AlphaBeta", { 0.5f, 1.f }));
+ concat(datasets::ActivationFunctionsQuantized(),
+ framework::dataset::make("ActivationFunction",
+{ ActivationLayerInfo::ActivationFunction::HARD_SWISH, ActivationLayerInfo::ActivationFunction::LEAKY_RELU }))),
+framework::dataset::make("AlphaBeta", { 0.5f, 1.f }));
const auto QuantizedActivationDataset16 = combine(combine(framework::dataset::make("InPlace", { false }),
datasets::ActivationFunctionsQuantized()),