From d6b8a71714361881a249a6f6ed67125f290f4a83 Mon Sep 17 00:00:00 2001 From: Jonathan Deakin Date: Tue, 23 Aug 2022 11:44:18 +0100 Subject: =?UTF-8?q?Add=20FP32=20Neon=E2=84=A2=20swish=20activation?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Change-Id: Id37b59adbc8c4cbe218d1652aeb02a0b4ce42c66 Signed-off-by: Jonathan Deakin Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/8256 Tested-by: Arm Jenkins Reviewed-by: Pablo Marquez Tello Comments-Addressed: Arm Jenkins Benchmark: Arm Jenkins --- tests/validation/NEON/ActivationLayer.cpp | 5 ++++- tests/validation/reference/ActivationLayer.h | 3 +++ 2 files changed, 7 insertions(+), 1 deletion(-) (limited to 'tests/validation') diff --git a/tests/validation/NEON/ActivationLayer.cpp b/tests/validation/NEON/ActivationLayer.cpp index a2971f28ba..d580a1c2ca 100644 --- a/tests/validation/NEON/ActivationLayer.cpp +++ b/tests/validation/NEON/ActivationLayer.cpp @@ -68,6 +68,7 @@ RelativeTolerance relative_tolerance(DataType data_type, ActivationLayerI case ActivationLayerInfo::ActivationFunction::SQRT: case ActivationLayerInfo::ActivationFunction::TANH: case ActivationLayerInfo::ActivationFunction::HARD_SWISH: + case ActivationLayerInfo::ActivationFunction::SWISH: case ActivationLayerInfo::ActivationFunction::GELU: switch(data_type) { @@ -111,6 +112,7 @@ AbsoluteTolerance absolute_tolerance(DataType data_type, ActivationLayerI case ActivationLayerInfo::ActivationFunction::LOGISTIC: case ActivationLayerInfo::ActivationFunction::SQRT: case ActivationLayerInfo::ActivationFunction::TANH: + case ActivationLayerInfo::ActivationFunction::SWISH: case ActivationLayerInfo::ActivationFunction::HARD_SWISH: switch(data_type) { @@ -173,7 +175,8 @@ const auto CNNDataTypes = framework::dataset::make("DataType", DataType::F32, }); -const auto NeonActivationFunctionsDataset = concat(datasets::ActivationFunctions(), framework::dataset::make("ActivationFunction", ActivationLayerInfo::ActivationFunction::HARD_SWISH)); +const auto NeonActivationFunctionsDataset = concat(datasets::ActivationFunctions(), + framework::dataset::make("ActivationFunction", {ActivationLayerInfo::ActivationFunction::HARD_SWISH, ActivationLayerInfo::ActivationFunction::SWISH})); /** Input data sets. */ const auto ActivationDataset = combine(combine(framework::dataset::make("InPlace", { false, true }), NeonActivationFunctionsDataset), framework::dataset::make("AlphaBeta", { 0.5f, 1.f })); diff --git a/tests/validation/reference/ActivationLayer.h b/tests/validation/reference/ActivationLayer.h index 97733238ef..2bf96831a6 100644 --- a/tests/validation/reference/ActivationLayer.h +++ b/tests/validation/reference/ActivationLayer.h @@ -84,6 +84,9 @@ inline T activate_float(T x, T a, T b, ActivationLayerInfo::ActivationFunction a case ActivationLayerInfo::ActivationFunction::HARD_SWISH: ret = x * ((std::min(std::max(static_cast(x + 3), static_cast(0.0f)), static_cast(6.0f))) * 0.166666667f); break; + case ActivationLayerInfo::ActivationFunction::SWISH: + ret = static_cast(x) / (static_cast(1) + std::exp(-a*x)); + break; case ActivationLayerInfo::ActivationFunction::GELU: ret = x * 0.5f * (1 + erf(x / std::sqrt(2.0f))); break; -- cgit v1.2.1