From 014333d73883c3872e458cedda5ccef586a7ccd4 Mon Sep 17 00:00:00 2001 From: Vidhya Sudhan Loganathan Date: Mon, 2 Jul 2018 09:13:49 +0100 Subject: COMPMID-970 : Remove QS8 / QS16 support Removed Fixed point position arguments from test sources Change-Id: I8343724723b71611fd501ed34de0866d3fb60e7e Reviewed-on: https://eu-gerrit-1.euhpc.arm.com/136382 Tested-by: Jenkins Reviewed-by: Michele DiGiorgio Reviewed-by: Anthony Barbier --- tests/SimpleTensor.h | 36 ++++++++++++++---------------------- 1 file changed, 14 insertions(+), 22 deletions(-) (limited to 'tests/SimpleTensor.h') diff --git a/tests/SimpleTensor.h b/tests/SimpleTensor.h index 8b3b37010e..759a869696 100644 --- a/tests/SimpleTensor.h +++ b/tests/SimpleTensor.h @@ -64,26 +64,23 @@ public: /** Create an uninitialised tensor of the given @p shape and @p format. * - * @param[in] shape Shape of the new raw tensor. - * @param[in] format Format of the new raw tensor. - * @param[in] fixed_point_position (Optional) Number of bits for the fractional part of the fixed point numbers + * @param[in] shape Shape of the new raw tensor. + * @param[in] format Format of the new raw tensor. */ - SimpleTensor(TensorShape shape, Format format, int fixed_point_position = 0); + SimpleTensor(TensorShape shape, Format format); /** Create an uninitialised tensor of the given @p shape and @p data type. * - * @param[in] shape Shape of the new raw tensor. - * @param[in] data_type Data type of the new raw tensor. - * @param[in] num_channels (Optional) Number of channels (default = 1). - * @param[in] fixed_point_position (Optional) Number of bits for the fractional part of the fixed point numbers (default = 0). - * @param[in] quantization_info (Optional) Quantization info for asymmetric quantization (default = empty). - * @param[in] data_layout (Optional) Data layout of the tensor (default = NCHW). + * @param[in] shape Shape of the new raw tensor. + * @param[in] data_type Data type of the new raw tensor. + * @param[in] num_channels (Optional) Number of channels (default = 1). + * @param[in] quantization_info (Optional) Quantization info for asymmetric quantization (default = empty). + * @param[in] data_layout (Optional) Data layout of the tensor (default = NCHW). */ SimpleTensor(TensorShape shape, DataType data_type, - int num_channels = 1, - int fixed_point_position = 0, - QuantizationInfo quantization_info = QuantizationInfo(), - DataLayout data_layout = DataLayout::NCHW); + int num_channels = 1, + QuantizationInfo quantization_info = QuantizationInfo(), + DataLayout data_layout = DataLayout::NCHW); /** Create a deep copy of the given @p tensor. * @@ -224,17 +221,15 @@ protected: Format _format{ Format::UNKNOWN }; DataType _data_type{ DataType::UNKNOWN }; int _num_channels{ 0 }; - int _fixed_point_position{ 0 }; QuantizationInfo _quantization_info{}; DataLayout _data_layout{ DataLayout::UNKNOWN }; }; template -SimpleTensor::SimpleTensor(TensorShape shape, Format format, int fixed_point_position) +SimpleTensor::SimpleTensor(TensorShape shape, Format format) : _buffer(nullptr), _shape(shape), _format(format), - _fixed_point_position(fixed_point_position), _quantization_info(), _data_layout(DataLayout::NCHW) { @@ -243,12 +238,11 @@ SimpleTensor::SimpleTensor(TensorShape shape, Format format, int fixed_point_ } template -SimpleTensor::SimpleTensor(TensorShape shape, DataType data_type, int num_channels, int fixed_point_position, QuantizationInfo quantization_info, DataLayout data_layout) +SimpleTensor::SimpleTensor(TensorShape shape, DataType data_type, int num_channels, QuantizationInfo quantization_info, DataLayout data_layout) : _buffer(nullptr), _shape(shape), _data_type(data_type), _num_channels(num_channels), - _fixed_point_position(fixed_point_position), _quantization_info(quantization_info), _data_layout(data_layout) { @@ -262,7 +256,6 @@ SimpleTensor::SimpleTensor(const SimpleTensor &tensor) _format(tensor.format()), _data_type(tensor.data_type()), _num_channels(tensor.num_channels()), - _fixed_point_position(tensor.fixed_point_position()), _quantization_info(tensor.quantization_info()), _data_layout(tensor.data_layout()) { @@ -305,7 +298,7 @@ size_t SimpleTensor::element_size() const template int SimpleTensor::fixed_point_position() const { - return _fixed_point_position; + return 0; } template @@ -428,7 +421,6 @@ void swap(SimpleTensor &tensor1, SimpleTensor &tensor2) swap(tensor1._format, tensor2._format); swap(tensor1._data_type, tensor2._data_type); swap(tensor1._num_channels, tensor2._num_channels); - swap(tensor1._fixed_point_position, tensor2._fixed_point_position); swap(tensor1._quantization_info, tensor2._quantization_info); swap(tensor1._buffer, tensor2._buffer); } -- cgit v1.2.1