From 572ade736ab344a62afa7da214cd9407fe53a281 Mon Sep 17 00:00:00 2001 From: Moritz Pflanzer Date: Fri, 21 Jul 2017 17:36:33 +0100 Subject: COMPMID-415: Move ActivationLayer to new validation Change-Id: I38ce20d95640f9c1baf699a095c35e592ad4339f Reviewed-on: http://mpd-gerrit.cambridge.arm.com/81115 Reviewed-by: Anthony Barbier Tested-by: Kaizen --- tests/validation_new/NEON/ActivationLayer.cpp | 230 ++++++++++++++++++++++++++ 1 file changed, 230 insertions(+) create mode 100644 tests/validation_new/NEON/ActivationLayer.cpp (limited to 'tests/validation_new/NEON') diff --git a/tests/validation_new/NEON/ActivationLayer.cpp b/tests/validation_new/NEON/ActivationLayer.cpp new file mode 100644 index 0000000000..db0faaecdf --- /dev/null +++ b/tests/validation_new/NEON/ActivationLayer.cpp @@ -0,0 +1,230 @@ +/* + * Copyright (c) 2017 ARM Limited. + * + * SPDX-License-Identifier: MIT + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ +#include "arm_compute/core/Types.h" +#include "arm_compute/runtime/NEON/functions/NEActivationLayer.h" +#include "arm_compute/runtime/Tensor.h" +#include "arm_compute/runtime/TensorAllocator.h" +#include "framework/Asserts.h" +#include "framework/Macros.h" +#include "framework/datasets/Datasets.h" +#include "tests/NEON/Accessor.h" +#include "tests/PaddingCalculator.h" +#include "tests/datasets_new/ActivationFunctionsDataset.h" +#include "tests/datasets_new/ShapeDatasets.h" +#include "tests/validation_new/Validation.h" +#include "tests/validation_new/fixtures/ActivationLayerFixture.h" +#include "tests/validation_new/half.h" + +namespace arm_compute +{ +namespace test +{ +namespace validation +{ +namespace +{ +/** Define tolerance of the activation layer. + * + * @param[in] data_type The data type used. + * @param[in] activation The activation function used. + * + * @return Tolerance depending on the activation function. + */ +float tolerance(DataType data_type, ActivationLayerInfo::ActivationFunction activation) +{ + switch(activation) + { + case ActivationLayerInfo::ActivationFunction::LOGISTIC: + case ActivationLayerInfo::ActivationFunction::SOFT_RELU: + case ActivationLayerInfo::ActivationFunction::SQRT: + case ActivationLayerInfo::ActivationFunction::TANH: + switch(data_type) + { + case DataType::QS8: + return 5.f; + case DataType::QS16: + return 11.f; + case DataType::F16: + return 0.01f; + default: + return 0.00001f; + } + break; + default: + return 0.f; + } +} + +/** CNN data types */ +const auto CNNDataTypes = framework::dataset::make("DataType", +{ +#ifdef ARM_COMPUTE_ENABLE_FP16 + DataType::F16, +#endif /* ARM_COMPUTE_ENABLE_FP16 */ + DataType::F32, + DataType::QS8, + DataType::QS16, +}); + +/** Input data sets. */ +const auto ActivationDataset = combine(combine(framework::dataset::make("InPlace", { false, true }), datasets::ActivationFunctions()), framework::dataset::make("AlphaBeta", { 0.5f, 1.f })); +} // namespace + +TEST_SUITE(NEON) +TEST_SUITE(ActivationLayer) + +DATA_TEST_CASE(Configuration, framework::DatasetMode::ALL, combine(combine(concat(datasets::SmallShapes(), datasets::LargeShapes()), CNNDataTypes), framework::dataset::make("InPlace", { false, true })), + shape, data_type, in_place) +{ + // Set fixed point position data type allowed + const int fixed_point_position = is_data_type_fixed_point(data_type) ? 3 : 0; + + // Create tensors + Tensor src = create_tensor(shape, data_type, 1, fixed_point_position); + Tensor dst = create_tensor(shape, data_type, 1, fixed_point_position); + + ARM_COMPUTE_EXPECT(src.info()->is_resizable(), framework::LogLevel::ERRORS); + ARM_COMPUTE_EXPECT(dst.info()->is_resizable(), framework::LogLevel::ERRORS); + + // Create and configure function + NEActivationLayer act_layer; + + if(in_place) + { + act_layer.configure(&src, nullptr, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::ABS)); + } + else + { + act_layer.configure(&src, &dst, ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::ABS)); + } + + // Validate valid region + const ValidRegion valid_region = shape_to_valid_region(shape); + validate(src.info()->valid_region(), valid_region); + + if(!in_place) + { + validate(dst.info()->valid_region(), valid_region); + } + + // Validate padding + const PaddingSize padding = PaddingCalculator(shape.x(), 16).required_padding(); + validate(src.info()->padding(), padding); + + if(!in_place) + { + validate(dst.info()->padding(), padding); + } +} + +template +using NEActivationLayerFixture = ActivationValidationFixture; + +TEST_SUITE(Float) +#ifdef ARM_COMPUTE_ENABLE_FP16 +TEST_SUITE(FP16) +FIXTURE_DATA_TEST_CASE(RunSmall, NEActivationLayerFixture, framework::DatasetMode::PRECOMMIT, combine(combine(datasets::SmallShapes(), ActivationDataset), + framework::dataset::make("DataType", + DataType::F16))) +{ + // Validate output + validate(Accessor(_target), _reference, tolerance(_data_type, _function)); +} +FIXTURE_DATA_TEST_CASE(RunLarge, NEActivationLayerFixture, framework::DatasetMode::NIGHTLY, combine(combine(datasets::LargeShapes(), ActivationDataset), + framework::dataset::make("DataType", + DataType::F16))) +{ + // Validate output + validate(Accessor(_target), _reference, tolerance(_data_type, _function)); +} +TEST_SUITE_END() +#endif /* ARM_COMPUTE_ENABLE_FP16 */ + +TEST_SUITE(FP32) +FIXTURE_DATA_TEST_CASE(RunSmall, NEActivationLayerFixture, framework::DatasetMode::PRECOMMIT, combine(combine(datasets::SmallShapes(), ActivationDataset), framework::dataset::make("DataType", + DataType::F32))) +{ + // Validate output + validate(Accessor(_target), _reference, tolerance(_data_type, _function)); +} +FIXTURE_DATA_TEST_CASE(RunLarge, NEActivationLayerFixture, framework::DatasetMode::NIGHTLY, combine(combine(datasets::LargeShapes(), ActivationDataset), framework::dataset::make("DataType", + DataType::F32))) +{ + // Validate output + validate(Accessor(_target), _reference, tolerance(_data_type, _function)); +} +TEST_SUITE_END() +TEST_SUITE_END() + +template +using NEActivationLayerFixedPointFixture = ActivationValidationFixedPointFixture; + +TEST_SUITE(Quantized) +TEST_SUITE(QS8) +// We test for fixed point precision [3,5] because [1,2] and [6,7] ranges cause +// overflowing issues in most of the transcendentals functions. +FIXTURE_DATA_TEST_CASE(RunSmall, NEActivationLayerFixedPointFixture, framework::DatasetMode::PRECOMMIT, combine(combine(combine(datasets::SmallShapes(), ActivationDataset), + framework::dataset::make("DataType", + DataType::QS8)), + framework::dataset::make("FractionalBits", 3, 6))) +{ + // Validate output + validate(Accessor(_target), _reference, tolerance(_data_type, _function)); +} +FIXTURE_DATA_TEST_CASE(RunLarge, NEActivationLayerFixedPointFixture, framework::DatasetMode::NIGHTLY, combine(combine(combine(datasets::LargeShapes(), ActivationDataset), + framework::dataset::make("DataType", + DataType::QS8)), + framework::dataset::make("FractionalBits", 3, 6))) +{ + // Validate output + validate(Accessor(_target), _reference, tolerance(_data_type, _function)); +} +TEST_SUITE_END() + +TEST_SUITE(QS16) +// Testing for fixed point position [1,14) as reciprocal limits the maximum fixed point position to 14 +FIXTURE_DATA_TEST_CASE(RunSmall, NEActivationLayerFixedPointFixture, framework::DatasetMode::PRECOMMIT, combine(combine(combine(datasets::SmallShapes(), ActivationDataset), + framework::dataset::make("DataType", + DataType::QS16)), + framework::dataset::make("FractionalBits", 1, 14))) +{ + // Validate output + validate(Accessor(_target), _reference, tolerance(_data_type, _function)); +} +FIXTURE_DATA_TEST_CASE(RunLarge, NEActivationLayerFixedPointFixture, framework::DatasetMode::NIGHTLY, combine(combine(combine(datasets::LargeShapes(), ActivationDataset), + framework::dataset::make("DataType", + DataType::QS16)), + framework::dataset::make("FractionalBits", 1, 14))) +{ + // Validate output + validate(Accessor(_target), _reference, tolerance(_data_type, _function)); +} +TEST_SUITE_END() +TEST_SUITE_END() + +TEST_SUITE_END() +TEST_SUITE_END() +} // namespace validation +} // namespace test +} // namespace arm_compute -- cgit v1.2.1