aboutsummaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
Diffstat (limited to 'tests')
-rw-r--r--tests/TypePrinter.h3
-rw-r--r--tests/datasets/ActivationFunctionsDataset.h1
-rw-r--r--tests/validation/CPP/ActivationLayer.cpp6
3 files changed, 10 insertions, 0 deletions
diff --git a/tests/TypePrinter.h b/tests/TypePrinter.h
index c207c1d634..bbccaadc6d 100644
--- a/tests/TypePrinter.h
+++ b/tests/TypePrinter.h
@@ -254,6 +254,9 @@ inline ::std::ostream &operator<<(::std::ostream &os, const ActivationLayerInfo:
case ActivationLayerInfo::ActivationFunction::BOUNDED_RELU:
os << "BOUNDED_RELU";
break;
+ case ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU:
+ os << "LU_BOUNDED_RELU";
+ break;
case ActivationLayerInfo::ActivationFunction::LEAKY_RELU:
os << "LEAKY_RELU";
break;
diff --git a/tests/datasets/ActivationFunctionsDataset.h b/tests/datasets/ActivationFunctionsDataset.h
index 3e4f408614..31323dc8be 100644
--- a/tests/datasets/ActivationFunctionsDataset.h
+++ b/tests/datasets/ActivationFunctionsDataset.h
@@ -46,6 +46,7 @@ public:
ActivationLayerInfo::ActivationFunction::LOGISTIC,
ActivationLayerInfo::ActivationFunction::RELU,
ActivationLayerInfo::ActivationFunction::BOUNDED_RELU,
+ ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU,
ActivationLayerInfo::ActivationFunction::LEAKY_RELU,
ActivationLayerInfo::ActivationFunction::SOFT_RELU,
ActivationLayerInfo::ActivationFunction::SQRT,
diff --git a/tests/validation/CPP/ActivationLayer.cpp b/tests/validation/CPP/ActivationLayer.cpp
index fa393be5e1..8fcacca1e2 100644
--- a/tests/validation/CPP/ActivationLayer.cpp
+++ b/tests/validation/CPP/ActivationLayer.cpp
@@ -66,6 +66,9 @@ SimpleTensor<T> activation_layer(const SimpleTensor<T> &src, ActivationLayerInfo
case ActivationLayerInfo::ActivationFunction::BOUNDED_RELU:
dst[i] = std::min<T>(a, std::max(static_cast<T>(0), x));
break;
+ case ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU:
+ dst[i] = std::min<T>(a, std::max<T>(b, x));
+ break;
case ActivationLayerInfo::ActivationFunction::LEAKY_RELU:
dst[i] = (x > 0) ? x : a * x;
break;
@@ -125,6 +128,9 @@ SimpleTensor<T> activation_layer(const SimpleTensor<T> &src, ActivationLayerInfo
case ActivationLayerInfo::ActivationFunction::BOUNDED_RELU:
dst[i] = min(a, max(const_0, x)).raw();
break;
+ case ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU:
+ dst[i] = min(a, max(b, x)).raw();
+ break;
case ActivationLayerInfo::ActivationFunction::LEAKY_RELU:
dst[i] = (x > const_0) ? x.raw() : mul(a, x).raw();
break;