aboutsummaryrefslogtreecommitdiff
path: root/tests/validation/CPP/ActivationLayer.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'tests/validation/CPP/ActivationLayer.cpp')
-rw-r--r--tests/validation/CPP/ActivationLayer.cpp6
1 files changed, 6 insertions, 0 deletions
diff --git a/tests/validation/CPP/ActivationLayer.cpp b/tests/validation/CPP/ActivationLayer.cpp
index fa393be5e1..8fcacca1e2 100644
--- a/tests/validation/CPP/ActivationLayer.cpp
+++ b/tests/validation/CPP/ActivationLayer.cpp
@@ -66,6 +66,9 @@ SimpleTensor<T> activation_layer(const SimpleTensor<T> &src, ActivationLayerInfo
case ActivationLayerInfo::ActivationFunction::BOUNDED_RELU:
dst[i] = std::min<T>(a, std::max(static_cast<T>(0), x));
break;
+ case ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU:
+ dst[i] = std::min<T>(a, std::max<T>(b, x));
+ break;
case ActivationLayerInfo::ActivationFunction::LEAKY_RELU:
dst[i] = (x > 0) ? x : a * x;
break;
@@ -125,6 +128,9 @@ SimpleTensor<T> activation_layer(const SimpleTensor<T> &src, ActivationLayerInfo
case ActivationLayerInfo::ActivationFunction::BOUNDED_RELU:
dst[i] = min(a, max(const_0, x)).raw();
break;
+ case ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU:
+ dst[i] = min(a, max(b, x)).raw();
+ break;
case ActivationLayerInfo::ActivationFunction::LEAKY_RELU:
dst[i] = (x > const_0) ? x.raw() : mul(a, x).raw();
break;