aboutsummaryrefslogtreecommitdiff
path: root/tests/validation/GLES_COMPUTE
diff options
context:
space:
mode:
authorVidhya Sudhan Loganathan <vidhyasudhan.loganathan@arm.com>2018-07-04 09:34:00 +0100
committerAnthony Barbier <anthony.barbier@arm.com>2018-11-02 16:54:10 +0000
commit7485d5a62685cb745ab50e970adb722cb71557ac (patch)
treeba01b99ca466c93edc9a3f8c1e34394ff84be060 /tests/validation/GLES_COMPUTE
parent014333d73883c3872e458cedda5ccef586a7ccd4 (diff)
downloadComputeLibrary-7485d5a62685cb745ab50e970adb722cb71557ac.tar.gz
COMPMID-970 : Remove QS8 / QS16 support
Removed fixed point related code. Change-Id: I487acf138dace3b0450e0d72ca7071eaec254566 Reviewed-on: https://eu-gerrit-1.euhpc.arm.com/137678 Tested-by: Jenkins <bsgcomp@arm.com> Reviewed-by: Anthony Barbier <anthony.barbier@arm.com>
Diffstat (limited to 'tests/validation/GLES_COMPUTE')
-rw-r--r--tests/validation/GLES_COMPUTE/ActivationLayer.cpp27
-rw-r--r--tests/validation/GLES_COMPUTE/PoolingLayer.cpp16
2 files changed, 11 insertions, 32 deletions
diff --git a/tests/validation/GLES_COMPUTE/ActivationLayer.cpp b/tests/validation/GLES_COMPUTE/ActivationLayer.cpp
index a8c7253b8f..7676b858f6 100644
--- a/tests/validation/GLES_COMPUTE/ActivationLayer.cpp
+++ b/tests/validation/GLES_COMPUTE/ActivationLayer.cpp
@@ -61,35 +61,14 @@ AbsoluteTolerance<float> tolerance(ActivationLayerInfo::ActivationFunction activ
case ActivationLayerInfo::ActivationFunction::SQUARE:
return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.1f : epsilon);
case ActivationLayerInfo::ActivationFunction::LOGISTIC:
- if(is_data_type_fixed_point(data_type))
- {
- return AbsoluteTolerance<float>(5.f);
- }
- else
- {
- return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.001f : epsilon);
- }
+ return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.001f : epsilon);
case ActivationLayerInfo::ActivationFunction::LEAKY_RELU:
return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.00001f : epsilon);
case ActivationLayerInfo::ActivationFunction::SOFT_RELU:
case ActivationLayerInfo::ActivationFunction::SQRT:
- if(is_data_type_fixed_point(data_type))
- {
- return AbsoluteTolerance<float>(5.f);
- }
- else
- {
- return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.01f : 0.00001f);
- }
+ return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.01f : 0.00001f);
case ActivationLayerInfo::ActivationFunction::TANH:
- if(is_data_type_fixed_point(data_type))
- {
- return AbsoluteTolerance<float>(5.f);
- }
- else
- {
- return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.001f : 0.00001f);
- }
+ return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.001f : 0.00001f);
default:
return AbsoluteTolerance<float>(epsilon);
}
diff --git a/tests/validation/GLES_COMPUTE/PoolingLayer.cpp b/tests/validation/GLES_COMPUTE/PoolingLayer.cpp
index ac1bd724ac..7679007a82 100644
--- a/tests/validation/GLES_COMPUTE/PoolingLayer.cpp
+++ b/tests/validation/GLES_COMPUTE/PoolingLayer.cpp
@@ -59,17 +59,17 @@ TEST_SUITE(PoolingLayer)
DATA_TEST_CASE(Validate, framework::DatasetMode::ALL, zip(zip(zip(
framework::dataset::make("InputInfo",
{
- TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32, 0), // Mismatching data type
- TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32, 0), // Window shrink
- TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32, 0), // Invalid pad/size combination
- TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32, 0), // Invalid pad/size combination
- TensorInfo(TensorShape(15U, 13U, 5U), 1, DataType::F32, 0), // Non-rectangular Global Pooling
- TensorInfo(TensorShape(13U, 13U, 5U), 1, DataType::F32, 0), // Invalid output Global Pooling
- TensorInfo(TensorShape(13U, 13U, 5U), 1, DataType::F32, 0),
+ TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), // Mismatching data type
+ TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), // Window shrink
+ TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), // Invalid pad/size combination
+ TensorInfo(TensorShape(27U, 13U, 2U), 1, DataType::F32), // Invalid pad/size combination
+ TensorInfo(TensorShape(15U, 13U, 5U), 1, DataType::F32), // Non-rectangular Global Pooling
+ TensorInfo(TensorShape(13U, 13U, 5U), 1, DataType::F32), // Invalid output Global Pooling
+ TensorInfo(TensorShape(13U, 13U, 5U), 1, DataType::F32),
}),
framework::dataset::make("OutputInfo",
{
- TensorInfo(TensorShape(25U, 11U, 2U), 1, DataType::F16, 0), TensorInfo(TensorShape(25U, 11U, 2U), 1, DataType::F32, 0), TensorInfo(TensorShape(30U, 11U, 2U), 1, DataType::F32, 0), TensorInfo(TensorShape(25U, 16U, 2U), 1, DataType::F32, 0), TensorInfo(TensorShape(1U, 1U, 5U), 1, DataType::F32, 0), TensorInfo(TensorShape(2U, 2U, 5U), 1, DataType::F32, 0), TensorInfo(TensorShape(1U, 1U, 5U), 1, DataType::F32, 0),
+ TensorInfo(TensorShape(25U, 11U, 2U), 1, DataType::F16), TensorInfo(TensorShape(25U, 11U, 2U), 1, DataType::F32), TensorInfo(TensorShape(30U, 11U, 2U), 1, DataType::F32), TensorInfo(TensorShape(25U, 16U, 2U), 1, DataType::F32), TensorInfo(TensorShape(1U, 1U, 5U), 1, DataType::F32), TensorInfo(TensorShape(2U, 2U, 5U), 1, DataType::F32), TensorInfo(TensorShape(1U, 1U, 5U), 1, DataType::F32),
})),
framework::dataset::make("PoolInfo",
{