aboutsummaryrefslogtreecommitdiff
path: root/tests/validation/GLES_COMPUTE
diff options
context:
space:
mode:
Diffstat (limited to 'tests/validation/GLES_COMPUTE')
-rw-r--r--tests/validation/GLES_COMPUTE/ActivationLayer.cpp27
-rw-r--r--tests/validation/GLES_COMPUTE/PoolingLayer.cpp16
2 files changed, 11 insertions, 32 deletions
diff --git a/tests/validation/GLES_COMPUTE/ActivationLayer.cpp b/tests/validation/GLES_COMPUTE/ActivationLayer.cpp
index a8c7253b8f..7676b858f6 100644
--- a/tests/validation/GLES_COMPUTE/ActivationLayer.cpp
+++ b/tests/validation/GLES_COMPUTE/ActivationLayer.cpp
@@ -61,35 +61,14 @@ AbsoluteTolerance<float> tolerance(ActivationLayerInfo::ActivationFunction activ
case ActivationLayerInfo::ActivationFunction::SQUARE:
return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.1f : epsilon);
case ActivationLayerInfo::ActivationFunction::LOGISTIC:
- if(is_data_type_fixed_point(data_type))
- {
- return AbsoluteTolerance<float>(5.f);
- }
- else
- {
- return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.001f : epsilon);
- }
+ return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.001f : epsilon);
case ActivationLayerInfo::ActivationFunction::LEAKY_RELU:
return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.00001f : epsilon);
case ActivationLayerInfo::ActivationFunction::SOFT_RELU:
case ActivationLayerInfo::ActivationFunction::SQRT:
- if(is_data_type_fixed_point(data_type))
- {
- return AbsoluteTolerance<float>(5.f);
- }
- else
- {
- return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.01f : 0.00001f);
- }
+ return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.01f : 0.00001f);
case ActivationLayerInfo::ActivationFunction::TANH:
- if(is_data_type_fixed_point(data_type))
- {
- return AbsoluteTolerance<float>(5.f);
- }
- else
- {
- return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.001f : 0.00001f);
- }
+ return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.001f : 0.00001f);
default:
return AbsoluteTolerance<float>(epsilon);
}
diff --git a/tests/validation/GLES_COMPUTE/PoolingLayer.cpp b/tests/validation/GLES_COMPUTE/PoolingLayer.cpp
index ac1bd724ac..7679007a82 100644
--- a/tests/validation/GLES_COMPUTE/PoolingLayer.cpp
+++ b/tests/validation/GLES_COMPUTE/PoolingLayer.cpp
@@ -59,17 +59,17 @@ TEST_SUITE(PoolingLayer)
DATA_TEST_CASE(Validate, framework::DatasetMode::ALL, zip(zip(zip(
framework::dataset::make("InputInfo",
{
- TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32, 0), // Mismatching data type
- TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32, 0), // Window shrink
- TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32, 0), // Invalid pad/size combination
- TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32, 0), // Invalid pad/size combination
- TensorInfo(TensorShape(15U, 13U, 5U), 1, DataType::F32, 0), // Non-rectangular Global Pooling
- TensorInfo(TensorShape(13U, 13U, 5U), 1, DataType::F32, 0), // Invalid output Global Pooling
- TensorInfo(TensorShape(13U, 13U, 5U), 1, DataType::F32, 0),
+ TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), // Mismatching data type
+ TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), // Window shrink
+ TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), // Invalid pad/size combination
+ TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), // Invalid pad/size combination
+ TensorInfo(TensorShape(15U, 13U, 5U), 1, DataType::F32), // Non-rectangular Global Pooling
+ TensorInfo(TensorShape(13U, 13U, 5U), 1, DataType::F32), // Invalid output Global Pooling
+ TensorInfo(TensorShape(13U, 13U, 5U), 1, DataType::F32),
}),
framework::dataset::make("OutputInfo",
{
- TensorInfo(TensorShape(25U, 11U, 2U), 1, DataType::F16, 0), TensorInfo(TensorShape(25U, 11U, 2U), 1, DataType::F32, 0), TensorInfo(TensorShape(30U, 11U, 2U), 1, DataType::F32, 0), TensorInfo(TensorShape(25U, 16U, 2U), 1, DataType::F32, 0), TensorInfo(TensorShape(1U, 1U, 5U), 1, DataType::F32, 0), TensorInfo(TensorShape(2U, 2U, 5U), 1, DataType::F32, 0), TensorInfo(TensorShape(1U, 1U, 5U), 1, DataType::F32, 0),
+ TensorInfo(TensorShape(25U, 11U, 2U), 1, DataType::F16), TensorInfo(TensorShape(25U, 11U, 2U), 1, DataType::F32), TensorInfo(TensorShape(30U, 11U, 2U), 1, DataType::F32), TensorInfo(TensorShape(25U, 16U, 2U), 1, DataType::F32), TensorInfo(TensorShape(1U, 1U, 5U), 1, DataType::F32), TensorInfo(TensorShape(2U, 2U, 5U), 1, DataType::F32), TensorInfo(TensorShape(1U, 1U, 5U), 1, DataType::F32),
})),
framework::dataset::make("PoolInfo",
{