aboutsummaryrefslogtreecommitdiff
path: root/src/backends/backendsCommon/test
diff options
context:
space:
mode:
authorDerek Lamberti <derek.lamberti@arm.com>2020-01-10 17:14:08 +0000
committerKevin May <kevin.may@arm.com>2020-01-13 18:18:12 +0000
commitf90c56d72de4848a2dc5844a97458aaf09df07c2 (patch)
tree71f1c6f16a4687286614f5526ed70938a611b27d /src/backends/backendsCommon/test
parent842e0dbd40114e19bf26916fefe06c869dbe416d (diff)
downloadarmnn-f90c56d72de4848a2dc5844a97458aaf09df07c2.tar.gz
Rename quantized data types to remove ambiguity for signed/unsigned payloads
!android-nn-driver:2572 Change-Id: I8fe52ceb09987b3d05c539409510f535165455cc Signed-off-by: Derek Lamberti <derek.lamberti@arm.com>
Diffstat (limited to 'src/backends/backendsCommon/test')
-rw-r--r--src/backends/backendsCommon/test/EndToEndTestImpl.hpp2
-rw-r--r--src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp16
-rw-r--r--src/backends/backendsCommon/test/JsonPrinterTestImpl.cpp4
-rw-r--r--src/backends/backendsCommon/test/LayerReleaseConstantDataTest.cpp8
-rw-r--r--src/backends/backendsCommon/test/OptimizeSubgraphViewTests.cpp44
-rw-r--r--src/backends/backendsCommon/test/QuantizedLstmEndToEndTestImpl.cpp24
-rw-r--r--src/backends/backendsCommon/test/WorkloadDataValidation.cpp2
-rw-r--r--src/backends/backendsCommon/test/WorkloadTestUtils.hpp4
-rw-r--r--src/backends/backendsCommon/test/layerTests/AbsTestImpl.cpp20
-rw-r--r--src/backends/backendsCommon/test/layerTests/ActivationTestImpl.cpp46
-rw-r--r--src/backends/backendsCommon/test/layerTests/AdditionTestImpl.cpp12
-rw-r--r--src/backends/backendsCommon/test/layerTests/ArgMinMaxTestImpl.cpp20
-rw-r--r--src/backends/backendsCommon/test/layerTests/BatchNormalizationTestImpl.cpp8
-rw-r--r--src/backends/backendsCommon/test/layerTests/ComparisonTestImpl.cpp36
-rw-r--r--src/backends/backendsCommon/test/layerTests/ConcatTestImpl.cpp64
-rw-r--r--src/backends/backendsCommon/test/layerTests/ConstantTestImpl.cpp8
-rw-r--r--src/backends/backendsCommon/test/layerTests/Conv2dTestImpl.cpp72
-rw-r--r--src/backends/backendsCommon/test/layerTests/DebugTestImpl.cpp16
-rw-r--r--src/backends/backendsCommon/test/layerTests/DepthToSpaceTestImpl.cpp32
-rw-r--r--src/backends/backendsCommon/test/layerTests/DequantizeTestImpl.cpp10
-rw-r--r--src/backends/backendsCommon/test/layerTests/DivisionTestImpl.cpp12
-rw-r--r--src/backends/backendsCommon/test/layerTests/FloorTestImpl.cpp4
-rw-r--r--src/backends/backendsCommon/test/layerTests/FullyConnectedTestImpl.cpp8
-rw-r--r--src/backends/backendsCommon/test/layerTests/GatherTestImpl.cpp12
-rw-r--r--src/backends/backendsCommon/test/layerTests/L2NormalizationTestImpl.cpp16
-rw-r--r--src/backends/backendsCommon/test/layerTests/LstmTestImpl.cpp28
-rw-r--r--src/backends/backendsCommon/test/layerTests/MaximumTestImpl.cpp12
-rw-r--r--src/backends/backendsCommon/test/layerTests/MinimumTestImpl.cpp8
-rw-r--r--src/backends/backendsCommon/test/layerTests/MultiplicationTestImpl.cpp12
-rw-r--r--src/backends/backendsCommon/test/layerTests/PadTestImpl.cpp20
-rw-r--r--src/backends/backendsCommon/test/layerTests/Pooling2dTestImpl.cpp80
-rw-r--r--src/backends/backendsCommon/test/layerTests/QuantizeTestImpl.cpp6
-rw-r--r--src/backends/backendsCommon/test/layerTests/ReshapeTestImpl.cpp16
-rw-r--r--src/backends/backendsCommon/test/layerTests/ResizeTestImpl.cpp80
-rw-r--r--src/backends/backendsCommon/test/layerTests/RsqrtTestImpl.cpp16
-rw-r--r--src/backends/backendsCommon/test/layerTests/SliceTestImpl.cpp16
-rw-r--r--src/backends/backendsCommon/test/layerTests/SoftmaxTestImpl.cpp14
-rw-r--r--src/backends/backendsCommon/test/layerTests/SpaceToBatchNdTestImpl.cpp32
-rw-r--r--src/backends/backendsCommon/test/layerTests/SpaceToDepthTestImpl.cpp8
-rw-r--r--src/backends/backendsCommon/test/layerTests/SplitterTestImpl.cpp8
-rw-r--r--src/backends/backendsCommon/test/layerTests/StridedSliceTestImpl.cpp54
-rw-r--r--src/backends/backendsCommon/test/layerTests/SubtractionTestImpl.cpp12
-rw-r--r--src/backends/backendsCommon/test/layerTests/TransposeConvolution2dTestImpl.cpp34
43 files changed, 478 insertions, 478 deletions
diff --git a/src/backends/backendsCommon/test/EndToEndTestImpl.hpp b/src/backends/backendsCommon/test/EndToEndTestImpl.hpp
index d6f589fa00..4b9bf7a711 100644
--- a/src/backends/backendsCommon/test/EndToEndTestImpl.hpp
+++ b/src/backends/backendsCommon/test/EndToEndTestImpl.hpp
@@ -88,7 +88,7 @@ inline bool ConstantUsageFloat32Test(const std::vector<BackendId>& backends)
inline bool ConstantUsageUint8Test(const std::vector<BackendId>& backends)
{
- TensorInfo commonTensorInfo({ 2, 3 }, DataType::QuantisedAsymm8);
+ TensorInfo commonTensorInfo({ 2, 3 }, DataType::QAsymmU8);
const float scale = 0.023529f;
const int8_t offset = -43;
diff --git a/src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp b/src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp
index 6924beb820..031210f1fc 100644
--- a/src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp
+++ b/src/backends/backendsCommon/test/IsLayerSupportedTestImpl.hpp
@@ -316,22 +316,22 @@ struct DummyLayer<armnn::QuantizedLstmLayer, void>
m_Layer = dummyGraph.AddLayer<armnn::QuantizedLstmLayer>("");
m_Layer->m_QuantizedLstmParameters.m_InputToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
- armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QuantisedAsymm8));
+ armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
m_Layer->m_QuantizedLstmParameters.m_InputToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
- armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QuantisedAsymm8));
+ armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
m_Layer->m_QuantizedLstmParameters.m_InputToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
- armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QuantisedAsymm8));
+ armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
m_Layer->m_QuantizedLstmParameters.m_InputToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
- armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QuantisedAsymm8));
+ armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
m_Layer->m_QuantizedLstmParameters.m_RecurrentToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
- armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QuantisedAsymm8));
+ armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
m_Layer->m_QuantizedLstmParameters.m_RecurrentToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
- armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QuantisedAsymm8));
+ armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
m_Layer->m_QuantizedLstmParameters.m_RecurrentToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
- armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QuantisedAsymm8));
+ armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
m_Layer->m_QuantizedLstmParameters.m_RecurrentToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
- armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QuantisedAsymm8));
+ armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
m_Layer->m_QuantizedLstmParameters.m_InputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
diff --git a/src/backends/backendsCommon/test/JsonPrinterTestImpl.cpp b/src/backends/backendsCommon/test/JsonPrinterTestImpl.cpp
index 9273a7910f..850a4d37ee 100644
--- a/src/backends/backendsCommon/test/JsonPrinterTestImpl.cpp
+++ b/src/backends/backendsCommon/test/JsonPrinterTestImpl.cpp
@@ -147,12 +147,12 @@ std::string GetSoftmaxProfilerJson(const std::vector<armnn::BackendId>& backends
softmax->GetOutputSlot(0).Connect(output->GetInputSlot(0));
// set the tensors in the network
- TensorInfo inputTensorInfo(TensorShape({1, 5}), DataType::QuantisedAsymm8);
+ TensorInfo inputTensorInfo(TensorShape({1, 5}), DataType::QAsymmU8);
inputTensorInfo.SetQuantizationOffset(100);
inputTensorInfo.SetQuantizationScale(10000.0f);
input->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
- TensorInfo outputTensorInfo(TensorShape({1, 5}), DataType::QuantisedAsymm8);
+ TensorInfo outputTensorInfo(TensorShape({1, 5}), DataType::QAsymmU8);
outputTensorInfo.SetQuantizationOffset(0);
outputTensorInfo.SetQuantizationScale(1.0f / 256.0f);
softmax->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
diff --git a/src/backends/backendsCommon/test/LayerReleaseConstantDataTest.cpp b/src/backends/backendsCommon/test/LayerReleaseConstantDataTest.cpp
index 08f696812e..eb1b976656 100644
--- a/src/backends/backendsCommon/test/LayerReleaseConstantDataTest.cpp
+++ b/src/backends/backendsCommon/test/LayerReleaseConstantDataTest.cpp
@@ -172,9 +172,9 @@ BOOST_AUTO_TEST_CASE(ReleaseFullyConnectedLayerConstantDataTest)
float outputQScale = 2.0f;
layer->m_Weight = std::make_unique<ScopedCpuTensorHandle>(TensorInfo({7, 20},
- DataType::QuantisedAsymm8, inputsQScale, 0));
+ DataType::QAsymmU8, inputsQScale, 0));
layer->m_Bias = std::make_unique<ScopedCpuTensorHandle>(TensorInfo({7},
- GetBiasDataType(DataType::QuantisedAsymm8), inputsQScale));
+ GetBiasDataType(DataType::QAsymmU8), inputsQScale));
layer->m_Weight->Allocate();
layer->m_Bias->Allocate();
@@ -183,8 +183,8 @@ BOOST_AUTO_TEST_CASE(ReleaseFullyConnectedLayerConstantDataTest)
Layer* const output = graph.AddLayer<OutputLayer>(0, "output");
// connect up
- Connect(input, layer, TensorInfo({3, 1, 4, 5}, DataType::QuantisedAsymm8, inputsQScale));
- Connect(layer, output, TensorInfo({3, 7}, DataType::QuantisedAsymm8, outputQScale));
+ Connect(input, layer, TensorInfo({3, 1, 4, 5}, DataType::QAsymmU8, inputsQScale));
+ Connect(layer, output, TensorInfo({3, 7}, DataType::QAsymmU8, outputQScale));
// check the constants that they are not NULL
BOOST_CHECK(layer->m_Weight != nullptr);
diff --git a/src/backends/backendsCommon/test/OptimizeSubgraphViewTests.cpp b/src/backends/backendsCommon/test/OptimizeSubgraphViewTests.cpp
index ca3c563757..162cc8436c 100644
--- a/src/backends/backendsCommon/test/OptimizeSubgraphViewTests.cpp
+++ b/src/backends/backendsCommon/test/OptimizeSubgraphViewTests.cpp
@@ -207,8 +207,8 @@ void CheckUntouchedSubgraph(const SubgraphView& untouchedSubgraph,
// Creates a subgraph containing only a single unsupported layer (only convolutions are unsupported by the mock backend)
SubgraphView::SubgraphViewPtr BuildFullyUnsupportedSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
{
- const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
- const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
+ const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
+ const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
Pooling2dDescriptor poolingDescriptor;
poolingDescriptor.m_PoolType = armnn::PoolingAlgorithm::Average;
@@ -242,8 +242,8 @@ SubgraphView::SubgraphViewPtr BuildFullyUnsupportedSubgraph1(Graph& graph, Layer
// Creates a subgraph containing only unsupported layers (only convolutions are unsupported by the mock backend)
SubgraphView::SubgraphViewPtr BuildFullyUnsupportedSubgraph2(Graph& graph, LayerNameToLayerMap& layersInGraph)
{
- const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
- const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
+ const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
+ const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
Pooling2dDescriptor poolingDescriptor;
poolingDescriptor.m_PoolType = armnn::PoolingAlgorithm::Average;
@@ -285,9 +285,9 @@ SubgraphView::SubgraphViewPtr BuildFullyUnsupportedSubgraph2(Graph& graph, Layer
// Creates a simple subgraph with only one convolution layer, supported by the mock backend
SubgraphView::SubgraphViewPtr BuildFullyOptimizableSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
{
- const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
- const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
- const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QuantisedAsymm8, 0.9f, 0);
+ const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
+ const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
+ const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
Convolution2dDescriptor convolutionDescriptor;
@@ -315,9 +315,9 @@ SubgraphView::SubgraphViewPtr BuildFullyOptimizableSubgraph1(Graph& graph, Layer
// Creates a subgraph with five convolutions layers, all supported by the mock backend
SubgraphView::SubgraphViewPtr BuildFullyOptimizableSubgraph2(Graph& graph, LayerNameToLayerMap& layersInGraph)
{
- const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
- const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
- const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QuantisedAsymm8, 0.9f, 0);
+ const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
+ const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
+ const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
Convolution2dDescriptor convolutionDescriptor;
@@ -362,9 +362,9 @@ SubgraphView::SubgraphViewPtr BuildFullyOptimizableSubgraph2(Graph& graph, Layer
// (only convolutions are unsupported by the mock backend)
SubgraphView::SubgraphViewPtr BuildPartiallySupportedSubgraph(Graph& graph, LayerNameToLayerMap& layersInGraph)
{
- const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
- const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
- const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QuantisedAsymm8, 0.9f, 0);
+ const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
+ const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
+ const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
Convolution2dDescriptor convolutionDescriptor;
@@ -421,9 +421,9 @@ SubgraphView::SubgraphViewPtr BuildPartiallySupportedSubgraph(Graph& graph, Laye
// Creates a subgraph with only unoptimizable layers ("unoptimizable" is added to the layer's name)
SubgraphView::SubgraphViewPtr BuildFullyUnoptimizableSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
{
- const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
- const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
- const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QuantisedAsymm8, 0.9f, 0);
+ const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
+ const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
+ const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
Convolution2dDescriptor convolutionDescriptor;
@@ -452,9 +452,9 @@ SubgraphView::SubgraphViewPtr BuildFullyUnoptimizableSubgraph1(Graph& graph, Lay
// Creates a subgraph with some unoptimizable layers ("unoptimizable" is added to the layer's name)
SubgraphView::SubgraphViewPtr BuildPartiallyOptimizableSubgraph1(Graph& graph, LayerNameToLayerMap& layersInGraph)
{
- const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
- const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
- const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QuantisedAsymm8, 0.9f, 0);
+ const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
+ const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
+ const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
Convolution2dDescriptor convolutionDescriptor;
@@ -501,9 +501,9 @@ SubgraphView::SubgraphViewPtr BuildPartiallyOptimizableSubgraph1(Graph& graph, L
// this is meant to test input slots coming from different layers
SubgraphView::SubgraphViewPtr BuildPartiallyOptimizableSubgraph2(Graph& graph, LayerNameToLayerMap& layersInGraph)
{
- const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
- const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QuantisedAsymm8, 1.0f, 0);
- const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QuantisedAsymm8, 0.9f, 0);
+ const TensorInfo inputInfo ({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
+ const TensorInfo outputInfo({ 1, 16, 16, 16 }, DataType::QAsymmU8, 1.0f, 0);
+ const TensorInfo weightInfo({ 16, 1, 1, 16 }, DataType::QAsymmU8, 0.9f, 0);
const TensorInfo biasInfo ({ 1, 1, 1, 16 }, DataType::Signed32, 0.9f, 0);
Convolution2dDescriptor convolutionDescriptor;
diff --git a/src/backends/backendsCommon/test/QuantizedLstmEndToEndTestImpl.cpp b/src/backends/backendsCommon/test/QuantizedLstmEndToEndTestImpl.cpp
index 609773ce89..cbba666004 100644
--- a/src/backends/backendsCommon/test/QuantizedLstmEndToEndTestImpl.cpp
+++ b/src/backends/backendsCommon/test/QuantizedLstmEndToEndTestImpl.cpp
@@ -43,12 +43,12 @@ armnn::INetworkPtr CreateQuantizedLstmNetwork(MultiArray input,
int32_t cellStateOffset = 0;
armnn::TensorInfo inputWeightsInfo({outputSize, inputSize},
- armnn::DataType::QuantisedAsymm8,
+ armnn::DataType::QAsymmU8,
weightsScale,
weightsOffset);
armnn::TensorInfo recurrentWeightsInfo({outputSize, outputSize},
- armnn::DataType::QuantisedAsymm8,
+ armnn::DataType::QAsymmU8,
weightsScale,
weightsOffset);
@@ -121,27 +121,27 @@ armnn::INetworkPtr CreateQuantizedLstmNetwork(MultiArray input,
armnn::IConnectableLayer* const outputStateOut = net->AddOutputLayer(1);
armnn::TensorInfo inputTensorInfo({batchSize , inputSize},
- armnn::DataType::QuantisedAsymm8,
+ armnn::DataType::QAsymmU8,
inputOutputScale,
inputOutputOffset);
armnn::TensorInfo cellStateInTensorInfo({batchSize , outputSize},
- armnn::DataType::QuantisedSymm16,
+ armnn::DataType::QSymmS16,
cellStateScale,
cellStateOffset);
armnn::TensorInfo outputStateInTensorInfo({batchSize , outputSize},
- armnn::DataType::QuantisedAsymm8,
+ armnn::DataType::QAsymmU8,
inputOutputScale,
inputOutputOffset);
armnn::TensorInfo cellStateOutTensorInfo({batchSize, outputSize},
- armnn::DataType::QuantisedSymm16,
+ armnn::DataType::QSymmS16,
cellStateScale,
cellStateOffset);
armnn::TensorInfo outputTensorInfo({batchSize, outputSize},
- armnn::DataType::QuantisedAsymm8,
+ armnn::DataType::QAsymmU8,
inputOutputScale,
inputOutputOffset);
@@ -178,23 +178,23 @@ IsCloseEnough(T value1, T value2, T tolerance)
void QuantizedLstmEndToEnd(const std::vector<armnn::BackendId>& backends)
{
std::vector<uint8_t> inputVector = {166, 179, 50, 150};
- armnn::TensorInfo inputDesc({2, 2}, armnn::DataType::QuantisedAsymm8);
+ armnn::TensorInfo inputDesc({2, 2}, armnn::DataType::QAsymmU8);
boost::multi_array<uint8_t, 2> input = MakeTensor<uint8_t, 2>(inputDesc, inputVector);
std::vector<int16_t> cellStateInVector = {876, 1034, 955, -909, 761, 1029, 796, -1036};
- armnn::TensorInfo cellStateInDesc({2, 4}, armnn::DataType::QuantisedSymm16);
+ armnn::TensorInfo cellStateInDesc({2, 4}, armnn::DataType::QSymmS16);
boost::multi_array<int16_t, 2> cellStateIn = MakeTensor<int16_t, 2>(cellStateInDesc, cellStateInVector);
std::vector<uint8_t> outputStateInVector = {136, 150, 140, 115, 135, 152, 138, 112};
- armnn::TensorInfo outputStateInDesc({2, 4}, armnn::DataType::QuantisedAsymm8);
+ armnn::TensorInfo outputStateInDesc({2, 4}, armnn::DataType::QAsymmU8);
boost::multi_array<uint8_t, 2> outputStateIn = MakeTensor<uint8_t, 2>(outputStateInDesc, outputStateInVector);
std::vector<int16_t> cellStateOutVector = {1485, 1177, 1373, -1023, 1019, 1355, 1097, -1235};
- armnn::TensorInfo cellStateOutVectorDesc({2, 4}, armnn::DataType::QuantisedSymm16);
+ armnn::TensorInfo cellStateOutVectorDesc({2, 4}, armnn::DataType::QSymmS16);
boost::multi_array<int16_t, 2> cellStateOut = MakeTensor<int16_t, 2>(cellStateOutVectorDesc, cellStateOutVector);
std::vector<uint8_t> outputStateOutVector = {140, 151, 146, 112, 136, 156, 142, 112};
- armnn::TensorInfo outputDesc({2, 4}, armnn::DataType::QuantisedAsymm8);
+ armnn::TensorInfo outputDesc({2, 4}, armnn::DataType::QAsymmU8);
boost::multi_array<uint8_t, 2> outputStateOut = MakeTensor<uint8_t, 2>(outputDesc, outputStateOutVector);
// Builds up the structure of the network
diff --git a/src/backends/backendsCommon/test/WorkloadDataValidation.cpp b/src/backends/backendsCommon/test/WorkloadDataValidation.cpp
index b5acd88e89..3c47eab01f 100644
--- a/src/backends/backendsCommon/test/WorkloadDataValidation.cpp
+++ b/src/backends/backendsCommon/test/WorkloadDataValidation.cpp
@@ -615,7 +615,7 @@ BOOST_AUTO_TEST_CASE(BiasPerAxisQuantization_Validate)
const TensorShape weightShape{ cOutput, cInput, hInput, wInput };
const TensorShape biasShape { cOutput };
- constexpr DataType inputType = DataType::QuantisedAsymm8;
+ constexpr DataType inputType = DataType::QAsymmU8;
constexpr DataType weightType = DataType::QuantizedSymm8PerAxis;
constexpr DataType biasType = DataType::Signed32;
diff --git a/src/backends/backendsCommon/test/WorkloadTestUtils.hpp b/src/backends/backendsCommon/test/WorkloadTestUtils.hpp
index 552eab2cae..0b0f265db4 100644
--- a/src/backends/backendsCommon/test/WorkloadTestUtils.hpp
+++ b/src/backends/backendsCommon/test/WorkloadTestUtils.hpp
@@ -98,9 +98,9 @@ inline armnn::Optional<armnn::DataType> GetBiasTypeFromWeightsType(armnn::Option
case armnn::DataType::Float16:
case armnn::DataType::Float32:
return weightsType;
- case armnn::DataType::QuantisedAsymm8:
+ case armnn::DataType::QAsymmU8:
return armnn::DataType::Signed32;
- case armnn::DataType::QuantisedSymm16:
+ case armnn::DataType::QSymmS16:
return armnn::DataType::Signed32;
default:
BOOST_ASSERT_MSG(false, "GetBiasTypeFromWeightsType(): Unsupported data type.");
diff --git a/src/backends/backendsCommon/test/layerTests/AbsTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/AbsTestImpl.cpp
index ff76a38b08..4ec20d87d7 100644
--- a/src/backends/backendsCommon/test/layerTests/AbsTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/AbsTestImpl.cpp
@@ -71,7 +71,7 @@ LayerTestResult<T, 2> Abs2dTest(
float qScale = 0.0625f;
int32_t qOffset = 64;
- if (ArmnnType == armnn::DataType::QuantisedSymm16)
+ if (ArmnnType == armnn::DataType::QSymmS16)
{
qScale = 0.1f;
qOffset = 0;
@@ -117,7 +117,7 @@ LayerTestResult<T, 3> Abs3dTest(
float qScale = 0.0625f;
int32_t qOffset = 64;
- if (ArmnnType == armnn::DataType::QuantisedSymm16)
+ if (ArmnnType == armnn::DataType::QSymmS16)
{
qScale = 0.1f;
qOffset = 0;
@@ -218,13 +218,13 @@ Abs2dTest<armnn::DataType::Float16>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 2>
-Abs2dTest<armnn::DataType::QuantisedAsymm8>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 2>
+Abs2dTest<armnn::DataType::QAsymmU8>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 2>
-Abs2dTest<armnn::DataType::QuantisedSymm16>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 2>
+Abs2dTest<armnn::DataType::QSymmS16>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
@@ -238,13 +238,13 @@ Abs3dTest<armnn::DataType::Float16>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 3>
-Abs3dTest<armnn::DataType::QuantisedAsymm8>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 3>
+Abs3dTest<armnn::DataType::QAsymmU8>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 3>
-Abs3dTest<armnn::DataType::QuantisedSymm16>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 3>
+Abs3dTest<armnn::DataType::QSymmS16>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
diff --git a/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.cpp
index 2f2d8dbd38..87f42194b9 100644
--- a/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.cpp
@@ -176,7 +176,7 @@ LayerTestResult<uint8_t, 4> BoundedReLuUint8UpperBoundOnlyTest(
float outputScale = 6.0f / 255.0f;
int32_t outputOffset = 0;
- return BoundedReLuTestCommon<armnn::DataType::QuantisedAsymm8>(
+ return BoundedReLuTestCommon<armnn::DataType::QAsymmU8>(
workloadFactory, memoryManager, 6.0f, 0.0f,
inputScale, inputOffset, outputScale, outputOffset,
input, output, inputWidth, inputHeight, inputChannels, inputBatchSize);
@@ -205,7 +205,7 @@ LayerTestResult<uint8_t, 4> BoundedReLuUint8UpperAndLowerBoundTest(
int32_t inputOffset = 112;
float inputScale = 0.0125f;
- return BoundedReLuTestCommon<armnn::DataType::QuantisedAsymm8>(
+ return BoundedReLuTestCommon<armnn::DataType::QAsymmU8>(
workloadFactory, memoryManager, 1.0f, -1.0f,
inputScale, inputOffset, inputScale, inputOffset, // Input/output scale & offset same.
input, output, inputWidth, inputHeight, inputChannels, inputBatchSize);
@@ -377,7 +377,7 @@ LayerTestResult<uint8_t, 4> ConstantLinearActivationUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return ConstantLinearActivationTestCommon<armnn::DataType::QuantisedAsymm8>(
+ return ConstantLinearActivationTestCommon<armnn::DataType::QAsymmU8>(
workloadFactory, memoryManager, 4.0f, 3);
}
@@ -385,7 +385,7 @@ LayerTestResult<int16_t, 4> ConstantLinearActivationInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return ConstantLinearActivationTestCommon<armnn::DataType::QuantisedSymm16>(
+ return ConstantLinearActivationTestCommon<armnn::DataType::QSymmS16>(
workloadFactory, memoryManager, 0.1f, 0);
}
@@ -508,14 +508,14 @@ LayerTestResult<uint8_t, 4> SimpleSigmoidUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return SimpleSigmoidTestCommon<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager, 0.1f, 50);
+ return SimpleSigmoidTestCommon<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager, 0.1f, 50);
}
LayerTestResult<int16_t, 4> SimpleSigmoidInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return SimpleSigmoidTestCommon<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager, 0.1f, 0);
+ return SimpleSigmoidTestCommon<armnn::DataType::QSymmS16>(workloadFactory, memoryManager, 0.1f, 0);
}
template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
@@ -557,7 +557,7 @@ LayerTestResult<int16_t, 4> ReLuInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return ReLuTestCommon<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager, 0.1f, 0);
+ return ReLuTestCommon<armnn::DataType::QSymmS16>(workloadFactory, memoryManager, 0.1f, 0);
}
@@ -565,7 +565,7 @@ LayerTestResult<uint8_t, 4> ReLuUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return ReLuTestCommon<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager, 0.1f, 0);
+ return ReLuTestCommon<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager, 0.1f, 0);
}
LayerTestResult<float, 4> ReLuTest(
@@ -616,7 +616,7 @@ LayerTestResult<int16_t, 4> BoundedReLuInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return ReLuTestCommon<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager, 0.1f, 0);
+ return ReLuTestCommon<armnn::DataType::QSymmS16>(workloadFactory, memoryManager, 0.1f, 0);
}
@@ -667,14 +667,14 @@ LayerTestResult<uint8_t, 4> SoftReLuUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return SoftReLuTestCommon<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager, 0.0625f, 64);
+ return SoftReLuTestCommon<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager, 0.0625f, 64);
}
LayerTestResult<int16_t, 4> SoftReLuInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return SoftReLuTestCommon<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager, 0.1f, 0);
+ return SoftReLuTestCommon<armnn::DataType::QSymmS16>(workloadFactory, memoryManager, 0.1f, 0);
}
template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
@@ -724,14 +724,14 @@ LayerTestResult<uint8_t, 4> LeakyReLuUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return LeakyReLuTestCommon<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager, 0.0625f, 64);
+ return LeakyReLuTestCommon<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager, 0.0625f, 64);
}
LayerTestResult<int16_t, 4> LeakyReLuInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return LeakyReLuTestCommon<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager, 0.1f, 0);
+ return LeakyReLuTestCommon<armnn::DataType::QSymmS16>(workloadFactory, memoryManager, 0.1f, 0);
}
template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
@@ -780,14 +780,14 @@ LayerTestResult<uint8_t, 4> AbsUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return AbsTestCommon<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager, 0.0625f, 64);
+ return AbsTestCommon<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager, 0.0625f, 64);
}
LayerTestResult<int16_t, 4> AbsInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return AbsTestCommon<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager, 0.1f, 0);
+ return AbsTestCommon<armnn::DataType::QSymmS16>(workloadFactory, memoryManager, 0.1f, 0);
}
LayerTestResult<float, 5> SqrtNNTest(
@@ -892,14 +892,14 @@ LayerTestResult<uint8_t, 4> SqrtUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return SqrtTestCommon<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager, 0.0625f, 64);
+ return SqrtTestCommon<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager, 0.0625f, 64);
}
LayerTestResult<int16_t, 4> SqrtInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return SqrtTestCommon<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager, 0.1f, 0);
+ return SqrtTestCommon<armnn::DataType::QSymmS16>(workloadFactory, memoryManager, 0.1f, 0);
}
template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
@@ -948,14 +948,14 @@ LayerTestResult<uint8_t, 4> SquareUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return SquareTestCommon<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager, 0.0625f, 64);
+ return SquareTestCommon<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager, 0.0625f, 64);
}
LayerTestResult<int16_t, 4> SquareInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return SquareTestCommon<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager, 0.1f, 0);
+ return SquareTestCommon<armnn::DataType::QSymmS16>(workloadFactory, memoryManager, 0.1f, 0);
}
template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
@@ -1006,14 +1006,14 @@ LayerTestResult<uint8_t, 4> TanhUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return TanhTestCommon<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager, 0.1f, 64);
+ return TanhTestCommon<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager, 0.1f, 64);
}
LayerTestResult<int16_t, 4> TanhInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return TanhTestCommon<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager, 0.1f, 0);
+ return TanhTestCommon<armnn::DataType::QSymmS16>(workloadFactory, memoryManager, 0.1f, 0);
}
@@ -1130,7 +1130,7 @@ LayerTestResult<uint8_t,4> CompareActivationUint8Test(
armnn::IWorkloadFactory& refWorkloadFactory,
armnn::ActivationFunction f)
{
- return CompareActivationTestImpl<armnn::DataType::QuantisedAsymm8>(
+ return CompareActivationTestImpl<armnn::DataType::QAsymmU8>(
workloadFactory, memoryManager, refWorkloadFactory, f, 5, 0.1f, 50);
}
@@ -1140,6 +1140,6 @@ LayerTestResult<int16_t,4> CompareActivationInt16Test(
armnn::IWorkloadFactory& refWorkloadFactory,
armnn::ActivationFunction f)
{
- return CompareActivationTestImpl<armnn::DataType::QuantisedSymm16>(
+ return CompareActivationTestImpl<armnn::DataType::QSymmS16>(
workloadFactory, memoryManager, refWorkloadFactory, f, 5, 0.1f, 0);
}
diff --git a/src/backends/backendsCommon/test/layerTests/AdditionTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/AdditionTestImpl.cpp
index a3a21ab131..82dc59b66b 100644
--- a/src/backends/backendsCommon/test/layerTests/AdditionTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/AdditionTestImpl.cpp
@@ -331,7 +331,7 @@ LayerTestResult<uint8_t, 4> AdditionBroadcastUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return AdditionBroadcastTestImpl<armnn::DataType::QuantisedAsymm8>(
+ return AdditionBroadcastTestImpl<armnn::DataType::QAsymmU8>(
workloadFactory, memoryManager, 2.f, 0);
}
@@ -339,7 +339,7 @@ LayerTestResult<int16_t, 4> AdditionBroadcastInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return AdditionBroadcastTestImpl<armnn::DataType::QuantisedSymm16>(
+ return AdditionBroadcastTestImpl<armnn::DataType::QSymmS16>(
workloadFactory, memoryManager, 2.f, 0);
}
@@ -355,7 +355,7 @@ LayerTestResult<uint8_t, 4> AdditionBroadcast1ElementUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return AdditionBroadcast1ElementTestImpl<armnn::DataType::QuantisedAsymm8>(
+ return AdditionBroadcast1ElementTestImpl<armnn::DataType::QAsymmU8>(
workloadFactory, memoryManager, 0.1333333f, 128);
}
@@ -363,7 +363,7 @@ LayerTestResult<int16_t, 4> AdditionBroadcast1ElementInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return AdditionBroadcast1ElementTestImpl<armnn::DataType::QuantisedSymm16>(
+ return AdditionBroadcast1ElementTestImpl<armnn::DataType::QSymmS16>(
workloadFactory, memoryManager, 0.1333333f, 0);
}
@@ -392,7 +392,7 @@ LayerTestResult<uint8_t, 4> AdditionUint8Test(
255, 186, 255, 186, 255, 214, // 2261(clamped), 1281, 2163(clamped), 1281, 2408(clamped), 1477
});
- return ElementwiseTestHelper<4, armnn::AdditionQueueDescriptor, armnn::DataType::QuantisedAsymm8>(
+ return ElementwiseTestHelper<4, armnn::AdditionQueueDescriptor, armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
shape0,
@@ -434,7 +434,7 @@ LayerTestResult<int16_t, 4> AdditionInt16Test(
329, 189, 315, 189, 350, 217, // 2303(clamped), 1323, 2205(clamped), 1323, 2450(clamped), 1519
};
- return ElementwiseTestHelper<4, armnn::AdditionQueueDescriptor, armnn::DataType::QuantisedSymm16>(
+ return ElementwiseTestHelper<4, armnn::AdditionQueueDescriptor, armnn::DataType::QSymmS16>(
workloadFactory,
memoryManager,
shape0,
diff --git a/src/backends/backendsCommon/test/layerTests/ArgMinMaxTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/ArgMinMaxTestImpl.cpp
index 2733100d6c..d63c179dfd 100644
--- a/src/backends/backendsCommon/test/layerTests/ArgMinMaxTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/ArgMinMaxTestImpl.cpp
@@ -264,12 +264,12 @@ ArgMaxSimpleTest<armnn::DataType::Float32>(
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
template LayerTestResult<int32_t, 3>
-ArgMaxSimpleTest<armnn::DataType::QuantisedAsymm8>(
+ArgMaxSimpleTest<armnn::DataType::QAsymmU8>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
template LayerTestResult<int32_t, 3>
-ArgMaxSimpleTest<armnn::DataType::QuantisedSymm16>(
+ArgMaxSimpleTest<armnn::DataType::QSymmS16>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
@@ -284,12 +284,12 @@ ArgMinSimpleTest<armnn::DataType::Float32>(
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
template LayerTestResult<int32_t, 3>
-ArgMinSimpleTest<armnn::DataType::QuantisedAsymm8>(
+ArgMinSimpleTest<armnn::DataType::QAsymmU8>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
template LayerTestResult<int32_t, 3>
-ArgMinSimpleTest<armnn::DataType::QuantisedSymm16>(
+ArgMinSimpleTest<armnn::DataType::QSymmS16>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
@@ -304,12 +304,12 @@ ArgMinChannelTest<armnn::DataType::Float32>(
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
template LayerTestResult<int32_t, 3>
-ArgMinChannelTest<armnn::DataType::QuantisedAsymm8>(
+ArgMinChannelTest<armnn::DataType::QAsymmU8>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
template LayerTestResult<int32_t, 3>
-ArgMinChannelTest<armnn::DataType::QuantisedSymm16>(
+ArgMinChannelTest<armnn::DataType::QSymmS16>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
@@ -324,12 +324,12 @@ ArgMaxChannelTest<armnn::DataType::Float32>(
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
template LayerTestResult<int32_t, 3>
-ArgMaxChannelTest<armnn::DataType::QuantisedAsymm8>(
+ArgMaxChannelTest<armnn::DataType::QAsymmU8>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
template LayerTestResult<int32_t, 3>
-ArgMaxChannelTest<armnn::DataType::QuantisedSymm16>(
+ArgMaxChannelTest<armnn::DataType::QSymmS16>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
@@ -349,7 +349,7 @@ ArgMaxHeightTest<armnn::DataType::Signed32>(
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
template LayerTestResult<int32_t, 3>
-ArgMaxHeightTest<armnn::DataType::QuantisedAsymm8>(
+ArgMaxHeightTest<armnn::DataType::QAsymmU8>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
@@ -364,6 +364,6 @@ ArgMinWidthTest<armnn::DataType::Signed32>(
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
template LayerTestResult<int32_t, 3>
-ArgMinWidthTest<armnn::DataType::QuantisedAsymm8>(
+ArgMinWidthTest<armnn::DataType::QAsymmU8>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
diff --git a/src/backends/backendsCommon/test/layerTests/BatchNormalizationTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/BatchNormalizationTestImpl.cpp
index 7857b3531b..7a55146b37 100644
--- a/src/backends/backendsCommon/test/layerTests/BatchNormalizationTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/BatchNormalizationTestImpl.cpp
@@ -432,7 +432,7 @@ LayerTestResult<uint8_t, 4> BatchNormUint8Test(
2.f, 4.f
};
- return BatchNormTestImpl<armnn::DataType::QuantisedAsymm8>(
+ return BatchNormTestImpl<armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
inputOutputShape,
@@ -482,7 +482,7 @@ LayerTestResult<uint8_t, 4> BatchNormUint8NhwcTest(
6.f, 4.f
};
- return BatchNormTestImpl<armnn::DataType::QuantisedAsymm8>(
+ return BatchNormTestImpl<armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
inputOutputShape, inputValues, expectedOutputValues,
@@ -524,7 +524,7 @@ LayerTestResult<int16_t, 4> BatchNormInt16Test(
2.f, 4.f
};
- return BatchNormTestImpl<armnn::DataType::QuantisedSymm16>(
+ return BatchNormTestImpl<armnn::DataType::QSymmS16>(
workloadFactory,
memoryManager,
inputOutputShape,
@@ -574,7 +574,7 @@ LayerTestResult<int16_t, 4> BatchNormInt16NhwcTest(
6.f, 4.f
};
- return BatchNormTestImpl<armnn::DataType::QuantisedSymm16>(
+ return BatchNormTestImpl<armnn::DataType::QSymmS16>(
workloadFactory,
memoryManager,
inputOutputShape,
diff --git a/src/backends/backendsCommon/test/layerTests/ComparisonTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/ComparisonTestImpl.cpp
index c8272f47f0..7327536dff 100644
--- a/src/backends/backendsCommon/test/layerTests/ComparisonTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/ComparisonTestImpl.cpp
@@ -408,7 +408,7 @@ LayerTestResult<uint8_t, 4> EqualSimpleUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return ComparisonTestImpl<armnn::DataType::QuantisedAsymm8>(
+ return ComparisonTestImpl<armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
s_SimpleTestData,
@@ -419,7 +419,7 @@ LayerTestResult<uint8_t, 4> EqualBroadcast1ElementUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return ComparisonTestImpl<armnn::DataType::QuantisedAsymm8>(
+ return ComparisonTestImpl<armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
s_Broadcast1ElementTestData,
@@ -430,7 +430,7 @@ LayerTestResult<uint8_t, 4> EqualBroadcast1dVectorUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return ComparisonTestImpl<armnn::DataType::QuantisedAsymm8>(
+ return ComparisonTestImpl<armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
s_Broadcast1dVectorTestData,
@@ -507,7 +507,7 @@ LayerTestResult<uint8_t, 4> GreaterSimpleUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return ComparisonTestImpl<armnn::DataType::QuantisedAsymm8>(
+ return ComparisonTestImpl<armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
s_SimpleTestData,
@@ -518,7 +518,7 @@ LayerTestResult<uint8_t, 4> GreaterBroadcast1ElementUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return ComparisonTestImpl<armnn::DataType::QuantisedAsymm8>(
+ return ComparisonTestImpl<armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
s_Broadcast1ElementTestData,
@@ -529,7 +529,7 @@ LayerTestResult<uint8_t, 4> GreaterBroadcast1dVectorUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return ComparisonTestImpl<armnn::DataType::QuantisedAsymm8>(
+ return ComparisonTestImpl<armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
s_Broadcast1dVectorTestData,
@@ -607,7 +607,7 @@ LayerTestResult<uint8_t, 4> GreaterOrEqualSimpleUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return ComparisonTestImpl<armnn::DataType::QuantisedAsymm8>(
+ return ComparisonTestImpl<armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
s_SimpleTestData,
@@ -618,7 +618,7 @@ LayerTestResult<uint8_t, 4> GreaterOrEqualBroadcast1ElementUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return ComparisonTestImpl<armnn::DataType::QuantisedAsymm8>(
+ return ComparisonTestImpl<armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
s_Broadcast1ElementTestData,
@@ -629,7 +629,7 @@ LayerTestResult<uint8_t, 4> GreaterOrEqualBroadcast1dVectorUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return ComparisonTestImpl<armnn::DataType::QuantisedAsymm8>(
+ return ComparisonTestImpl<armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
s_Broadcast1dVectorTestData,
@@ -706,7 +706,7 @@ LayerTestResult<uint8_t, 4> LessSimpleUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return ComparisonTestImpl<armnn::DataType::QuantisedAsymm8>(
+ return ComparisonTestImpl<armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
s_SimpleTestData,
@@ -717,7 +717,7 @@ LayerTestResult<uint8_t, 4> LessBroadcast1ElementUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return ComparisonTestImpl<armnn::DataType::QuantisedAsymm8>(
+ return ComparisonTestImpl<armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
s_Broadcast1ElementTestData,
@@ -728,7 +728,7 @@ LayerTestResult<uint8_t, 4> LessBroadcast1dVectorUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return ComparisonTestImpl<armnn::DataType::QuantisedAsymm8>(
+ return ComparisonTestImpl<armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
s_Broadcast1dVectorTestData,
@@ -806,7 +806,7 @@ LayerTestResult<uint8_t, 4> LessOrEqualSimpleUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return ComparisonTestImpl<armnn::DataType::QuantisedAsymm8>(
+ return ComparisonTestImpl<armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
s_SimpleTestData,
@@ -817,7 +817,7 @@ LayerTestResult<uint8_t, 4> LessOrEqualBroadcast1ElementUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return ComparisonTestImpl<armnn::DataType::QuantisedAsymm8>(
+ return ComparisonTestImpl<armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
s_Broadcast1ElementTestData,
@@ -828,7 +828,7 @@ LayerTestResult<uint8_t, 4> LessOrEqualBroadcast1dVectorUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return ComparisonTestImpl<armnn::DataType::QuantisedAsymm8>(
+ return ComparisonTestImpl<armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
s_Broadcast1dVectorTestData,
@@ -906,7 +906,7 @@ LayerTestResult<uint8_t, 4> NotEqualSimpleUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return ComparisonTestImpl<armnn::DataType::QuantisedAsymm8>(
+ return ComparisonTestImpl<armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
s_SimpleTestData,
@@ -917,7 +917,7 @@ LayerTestResult<uint8_t, 4> NotEqualBroadcast1ElementUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return ComparisonTestImpl<armnn::DataType::QuantisedAsymm8>(
+ return ComparisonTestImpl<armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
s_Broadcast1ElementTestData,
@@ -928,7 +928,7 @@ LayerTestResult<uint8_t, 4> NotEqualBroadcast1dVectorUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return ComparisonTestImpl<armnn::DataType::QuantisedAsymm8>(
+ return ComparisonTestImpl<armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
s_Broadcast1dVectorTestData,
diff --git a/src/backends/backendsCommon/test/layerTests/ConcatTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/ConcatTestImpl.cpp
index 57ed7542d2..29cd5ac560 100644
--- a/src/backends/backendsCommon/test/layerTests/ConcatTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/ConcatTestImpl.cpp
@@ -2054,14 +2054,14 @@ LayerTestResult<T, 3> ConcatDifferentInputOutputQParamTest(
// Explicit template specializations
//
-template LayerTestResult<ResolveType<DataType::QuantisedAsymm8>, 3>
-ConcatDifferentInputOutputQParamTest<DataType::QuantisedAsymm8>(
+template LayerTestResult<ResolveType<DataType::QAsymmU8>, 3>
+ConcatDifferentInputOutputQParamTest<DataType::QAsymmU8>(
IWorkloadFactory& workloadFactory,
const IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
bool useSubtensor);
-template LayerTestResult<ResolveType<DataType::QuantisedSymm16>, 3>
-ConcatDifferentInputOutputQParamTest<DataType::QuantisedSymm16>(
+template LayerTestResult<ResolveType<DataType::QSymmS16>, 3>
+ConcatDifferentInputOutputQParamTest<DataType::QSymmS16>(
IWorkloadFactory& workloadFactory,
const IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
bool useSubtensor);
@@ -2362,9 +2362,9 @@ LayerTestResult<uint8_t, 3> ConcatUint8DifferentQParamsTest(
unsigned int inputChannels2 = 1;
// Defines the tensor descriptors.
- TensorInfo outputTensorInfo({ outputChannels, outputHeight, outputWidth }, DataType::QuantisedAsymm8);
- TensorInfo inputTensorInfo1({ inputChannels1, inputHeight1, inputWidth1 }, DataType::QuantisedAsymm8);
- TensorInfo inputTensorInfo2({ inputChannels2, inputHeight2, inputWidth2 }, DataType::QuantisedAsymm8);
+ TensorInfo outputTensorInfo({ outputChannels, outputHeight, outputWidth }, DataType::QAsymmU8);
+ TensorInfo inputTensorInfo1({ inputChannels1, inputHeight1, inputWidth1 }, DataType::QAsymmU8);
+ TensorInfo inputTensorInfo2({ inputChannels2, inputHeight2, inputWidth2 }, DataType::QAsymmU8);
// Quantized input1 tensor. Range [-3, 1]
const float inputScale1 = 0.015686f;
@@ -2507,9 +2507,9 @@ LayerTestResult<uint8_t, 3> ConcatUint8Test(
unsigned int inputChannels2 = 1;
// Defines the tensor descriptors.
- TensorInfo outputTensorInfo({ outputChannels, outputHeight, outputWidth }, DataType::QuantisedAsymm8);
- TensorInfo inputTensorInfo1({ inputChannels1, inputHeight1, inputWidth1 }, DataType::QuantisedAsymm8);
- TensorInfo inputTensorInfo2({ inputChannels2, inputHeight2, inputWidth2 }, DataType::QuantisedAsymm8);
+ TensorInfo outputTensorInfo({ outputChannels, outputHeight, outputWidth }, DataType::QAsymmU8);
+ TensorInfo inputTensorInfo1({ inputChannels1, inputHeight1, inputWidth1 }, DataType::QAsymmU8);
+ TensorInfo inputTensorInfo2({ inputChannels2, inputHeight2, inputWidth2 }, DataType::QAsymmU8);
// Arbitrary scale and offsets. They don't really matter as the Concat operator doesn't dequantize/quantize them.
const float scale = 0.13497836f;
@@ -2645,9 +2645,9 @@ LayerTestResult<uint16_t, 3> ConcatUint16Test(
unsigned int inputChannels2 = 1;
// Defines the tensor descriptors.
- TensorInfo outputTensorInfo({ outputChannels, outputHeight, outputWidth }, DataType::QuantisedSymm16);
- TensorInfo inputTensorInfo1({ inputChannels1, inputHeight1, inputWidth1 }, DataType::QuantisedSymm16);
- TensorInfo inputTensorInfo2({ inputChannels2, inputHeight2, inputWidth2 }, DataType::QuantisedSymm16);
+ TensorInfo outputTensorInfo({ outputChannels, outputHeight, outputWidth }, DataType::QSymmS16);
+ TensorInfo inputTensorInfo1({ inputChannels1, inputHeight1, inputWidth1 }, DataType::QSymmS16);
+ TensorInfo inputTensorInfo2({ inputChannels2, inputHeight2, inputWidth2 }, DataType::QSymmS16);
// Arbitrary scale and offsets. They don't really matter as the Concat operator doesn't dequantize/quantize them.
const float scale = 0.13497836f;
@@ -2765,28 +2765,28 @@ LayerTestResult<uint8_t, 1> Concat1dUint8Test(
IWorkloadFactory& workloadFactory,
const IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Concat1dTestImpl<DataType::QuantisedAsymm8>(workloadFactory, memoryManager, 0.5f, -1);
+ return Concat1dTestImpl<DataType::QAsymmU8>(workloadFactory, memoryManager, 0.5f, -1);
}
LayerTestResult<uint8_t, 2> Concat2dDim0Uint8Test(
IWorkloadFactory& workloadFactory,
const IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Concat2dDim0TestImpl<DataType::QuantisedAsymm8>(workloadFactory, memoryManager, 0.5f, -1);
+ return Concat2dDim0TestImpl<DataType::QAsymmU8>(workloadFactory, memoryManager, 0.5f, -1);
}
LayerTestResult<uint8_t, 2> Concat2dDim1Uint8Test(
IWorkloadFactory& workloadFactory,
const IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Concat2dDim1TestImpl<DataType::QuantisedAsymm8>(workloadFactory, memoryManager, 0.5f, -1);
+ return Concat2dDim1TestImpl<DataType::QAsymmU8>(workloadFactory, memoryManager, 0.5f, -1);
}
LayerTestResult<uint8_t, 2> Concat2dDim0DiffInputDimsUint8Test(
IWorkloadFactory& workloadFactory,
const IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Concat2dDim0DiffInputDimsTestImpl<DataType::QuantisedAsymm8>(
+ return Concat2dDim0DiffInputDimsTestImpl<DataType::QAsymmU8>(
workloadFactory, memoryManager, 0.5f, -1);
}
@@ -2794,7 +2794,7 @@ LayerTestResult<uint8_t, 2> Concat2dDim1DiffInputDimsUint8Test(
IWorkloadFactory& workloadFactory,
const IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Concat2dDim1DiffInputDimsTestImpl<DataType::QuantisedAsymm8>(
+ return Concat2dDim1DiffInputDimsTestImpl<DataType::QAsymmU8>(
workloadFactory, memoryManager, 0.5f, -1);
}
@@ -2802,14 +2802,14 @@ LayerTestResult<uint8_t, 3> Concat3dDim0Uint8Test(
IWorkloadFactory& workloadFactory,
const IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Concat3dDim0TestImpl<DataType::QuantisedAsymm8>(workloadFactory, memoryManager, 0.5f, -1);
+ return Concat3dDim0TestImpl<DataType::QAsymmU8>(workloadFactory, memoryManager, 0.5f, -1);
}
LayerTestResult<uint8_t, 3> Concat3dDim1Uint8Test(
IWorkloadFactory& workloadFactory,
const IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Concat3dDim1TestImpl<DataType::QuantisedAsymm8>(workloadFactory, memoryManager, 0.5f, -1);
+ return Concat3dDim1TestImpl<DataType::QAsymmU8>(workloadFactory, memoryManager, 0.5f, -1);
}
LayerTestResult<uint8_t, 3> Concat3dDim2Uint8Test(
@@ -2817,7 +2817,7 @@ LayerTestResult<uint8_t, 3> Concat3dDim2Uint8Test(
const IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
bool useSubtensor)
{
- return Concat3dDim2TestImpl<DataType::QuantisedAsymm8>(
+ return Concat3dDim2TestImpl<DataType::QAsymmU8>(
workloadFactory, memoryManager, useSubtensor, 0.5f, -1);
}
@@ -2825,14 +2825,14 @@ LayerTestResult<uint8_t, 3> Concat3dDim0DiffInputDimsUint8Test(
IWorkloadFactory& workloadFactory,
const IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Concat3dDim0TestImpl<DataType::QuantisedAsymm8>(workloadFactory, memoryManager, 0.5f, -1);
+ return Concat3dDim0TestImpl<DataType::QAsymmU8>(workloadFactory, memoryManager, 0.5f, -1);
}
LayerTestResult<uint8_t, 3> Concat3dDim1DiffInputDimsUint8Test(
IWorkloadFactory& workloadFactory,
const IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Concat3dDim1DiffInputDimsTestImpl<DataType::QuantisedAsymm8>(
+ return Concat3dDim1DiffInputDimsTestImpl<DataType::QAsymmU8>(
workloadFactory, memoryManager, 0.5f, -1);
}
@@ -2841,7 +2841,7 @@ LayerTestResult<uint8_t, 3> Concat3dDim2DiffInputDimsUint8Test(
const IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
bool useSubtensor)
{
- return Concat3dDim2DiffInputDimsTestImpl<DataType::QuantisedAsymm8>(
+ return Concat3dDim2DiffInputDimsTestImpl<DataType::QAsymmU8>(
workloadFactory, memoryManager, useSubtensor, 0.5f, -1);
}
@@ -2849,28 +2849,28 @@ LayerTestResult<uint8_t, 4> Concat4dDim0Uint8Test(
IWorkloadFactory& workloadFactory,
const IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Concat4dDim0TestImpl<DataType::QuantisedAsymm8>(workloadFactory, memoryManager, 0.5f, -1);
+ return Concat4dDim0TestImpl<DataType::QAsymmU8>(workloadFactory, memoryManager, 0.5f, -1);
}
LayerTestResult<uint8_t, 4> Concat4dDim1Uint8Test(
IWorkloadFactory& workloadFactory,
const IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Concat4dDim1TestImpl<DataType::QuantisedAsymm8>(workloadFactory, memoryManager, 0.5f, -1);
+ return Concat4dDim1TestImpl<DataType::QAsymmU8>(workloadFactory, memoryManager, 0.5f, -1);
}
LayerTestResult<uint8_t, 4> Concat4dDim2Uint8Test(
IWorkloadFactory& workloadFactory,
const IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Concat4dDim2TestImpl<DataType::QuantisedAsymm8>(workloadFactory, memoryManager, 0.5f, -1);
+ return Concat4dDim2TestImpl<DataType::QAsymmU8>(workloadFactory, memoryManager, 0.5f, -1);
}
LayerTestResult<uint8_t, 4> Concat4dDim3Uint8Test(
IWorkloadFactory& workloadFactory,
const IBackendInternal::IMemoryManagerSharedPtr& memoryManager, bool useSubtensor)
{
- return Concat4dDim3TestImpl<DataType::QuantisedAsymm8>(
+ return Concat4dDim3TestImpl<DataType::QAsymmU8>(
workloadFactory, memoryManager, 0.5f, -1, useSubtensor);
}
@@ -2878,7 +2878,7 @@ LayerTestResult<uint8_t, 4> Concat4dDiffShapeDim0Uint8Test(
IWorkloadFactory& workloadFactory,
const IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Concat4dDiffShapeDim0TestImpl<DataType::QuantisedAsymm8>(
+ return Concat4dDiffShapeDim0TestImpl<DataType::QAsymmU8>(
workloadFactory, memoryManager, 0.5f, -1);
}
@@ -2886,7 +2886,7 @@ LayerTestResult<uint8_t, 4> Concat4dDiffShapeDim1Uint8Test(
IWorkloadFactory& workloadFactory,
const IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Concat4dDiffShapeDim1TestImpl<DataType::QuantisedAsymm8>(
+ return Concat4dDiffShapeDim1TestImpl<DataType::QAsymmU8>(
workloadFactory, memoryManager, 0.5f, -1);
}
@@ -2894,7 +2894,7 @@ LayerTestResult<uint8_t, 4> Concat4dDiffShapeDim2Uint8Test(
IWorkloadFactory& workloadFactory,
const IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Concat4dDiffShapeDim2TestImpl<DataType::QuantisedAsymm8>(
+ return Concat4dDiffShapeDim2TestImpl<DataType::QAsymmU8>(
workloadFactory, memoryManager, 0.5f, -1);
}
@@ -2903,6 +2903,6 @@ LayerTestResult<uint8_t, 4> Concat4dDiffShapeDim3Uint8Test(
const IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
bool useSubtensor)
{
- return Concat4dDiffShapeDim3TestImpl<DataType::QuantisedAsymm8>(
+ return Concat4dDiffShapeDim3TestImpl<DataType::QAsymmU8>(
workloadFactory, memoryManager, 0.5f, -1, useSubtensor);
}
diff --git a/src/backends/backendsCommon/test/layerTests/ConstantTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/ConstantTestImpl.cpp
index cfb62637d1..35868405f1 100644
--- a/src/backends/backendsCommon/test/layerTests/ConstantTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/ConstantTestImpl.cpp
@@ -134,26 +134,26 @@ LayerTestResult<int16_t, 4> ConstantInt16SimpleQuantizationScaleNoOffsetTest(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return ConstantTestImpl<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager, 1.0f, 0);
+ return ConstantTestImpl<armnn::DataType::QSymmS16>(workloadFactory, memoryManager, 1.0f, 0);
}
LayerTestResult<uint8_t, 4> ConstantUint8SimpleQuantizationScaleNoOffsetTest(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return ConstantTestImpl<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager, 1.0f, 0);
+ return ConstantTestImpl<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager, 1.0f, 0);
}
LayerTestResult<uint8_t, 4> ConstantUint8CustomQuantizationScaleAndOffsetTest(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return ConstantTestImpl<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager, 2e-6f, 1);
+ return ConstantTestImpl<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager, 2e-6f, 1);
}
LayerTestResult<int16_t, 4> ConstantInt16CustomQuantizationScaleAndOffsetTest(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return ConstantTestImpl<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager, 2e-6f, 1);
+ return ConstantTestImpl<armnn::DataType::QSymmS16>(workloadFactory, memoryManager, 2e-6f, 1);
}
diff --git a/src/backends/backendsCommon/test/layerTests/Conv2dTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/Conv2dTestImpl.cpp
index a00fda7679..055c9ab6e8 100644
--- a/src/backends/backendsCommon/test/layerTests/Conv2dTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/Conv2dTestImpl.cpp
@@ -1018,13 +1018,13 @@ LayerTestResult<T, 4> Convolution2d3x3DilationTestCommon(
int32_t qOffset;
switch (ArmnnType)
{
- case armnn::DataType::QuantisedAsymm8:
+ case armnn::DataType::QAsymmU8:
{
qScale = 0.1f;
qOffset = 128;
break;
}
- case armnn::DataType::QuantisedSymm16:
+ case armnn::DataType::QSymmS16:
{
qScale = 0.1f;
qOffset = 0;
@@ -2304,13 +2304,13 @@ LayerTestResult<T, 4> DepthwiseConvolution2d3x3DilationTestCommon(
int32_t qOffset;
switch (ArmnnType)
{
- case armnn::DataType::QuantisedAsymm8:
+ case armnn::DataType::QAsymmU8:
{
qScale = 0.1f;
qOffset = 128;
break;
}
- case armnn::DataType::QuantisedSymm16:
+ case armnn::DataType::QSymmS16:
{
qScale = 0.1f;
qOffset = 0;
@@ -2800,15 +2800,15 @@ Convolution2d3x3Dilation3x3Test<armnn::DataType::Float32, armnn::DataType::Float
bool,
armnn::DataLayout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 4>
-Convolution2d3x3Dilation3x3Test<armnn::DataType::QuantisedAsymm8, armnn::DataType::Signed32>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 4>
+Convolution2d3x3Dilation3x3Test<armnn::DataType::QAsymmU8, armnn::DataType::Signed32>(
armnn::IWorkloadFactory&,
const armnn::IBackendInternal::IMemoryManagerSharedPtr&,
bool,
armnn::DataLayout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 4>
-Convolution2d3x3Dilation3x3Test<armnn::DataType::QuantisedSymm16, armnn::DataType::Signed32>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 4>
+Convolution2d3x3Dilation3x3Test<armnn::DataType::QSymmS16, armnn::DataType::Signed32>(
armnn::IWorkloadFactory&,
const armnn::IBackendInternal::IMemoryManagerSharedPtr&,
bool,
@@ -2821,15 +2821,15 @@ Convolution2d2x3x3Dilation3x3Test<armnn::DataType::Float32, armnn::DataType::Flo
bool,
armnn::DataLayout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 4>
-Convolution2d2x3x3Dilation3x3Test<armnn::DataType::QuantisedAsymm8, armnn::DataType::Signed32>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 4>
+Convolution2d2x3x3Dilation3x3Test<armnn::DataType::QAsymmU8, armnn::DataType::Signed32>(
armnn::IWorkloadFactory&,
const armnn::IBackendInternal::IMemoryManagerSharedPtr&,
bool,
armnn::DataLayout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 4>
-Convolution2d2x3x3Dilation3x3Test<armnn::DataType::QuantisedSymm16, armnn::DataType::Signed32>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 4>
+Convolution2d2x3x3Dilation3x3Test<armnn::DataType::QSymmS16, armnn::DataType::Signed32>(
armnn::IWorkloadFactory&,
const armnn::IBackendInternal::IMemoryManagerSharedPtr&,
bool,
@@ -2842,15 +2842,15 @@ Convolution2d2x2Dilation2x2Padding2x2Stride3x3Test<armnn::DataType::Float32, arm
bool biasEnabled,
const armnn::DataLayout layout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 4>
-Convolution2d2x2Dilation2x2Padding2x2Stride3x3Test<armnn::DataType::QuantisedAsymm8, armnn::DataType::Signed32>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 4>
+Convolution2d2x2Dilation2x2Padding2x2Stride3x3Test<armnn::DataType::QAsymmU8, armnn::DataType::Signed32>(
armnn::IWorkloadFactory &workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager,
bool biasEnabled,
const armnn::DataLayout layout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 4>
-Convolution2d2x2Dilation2x2Padding2x2Stride3x3Test<armnn::DataType::QuantisedSymm16, armnn::DataType::Signed32>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 4>
+Convolution2d2x2Dilation2x2Padding2x2Stride3x3Test<armnn::DataType::QSymmS16, armnn::DataType::Signed32>(
armnn::IWorkloadFactory &workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr &memoryManager,
bool biasEnabled,
@@ -2863,15 +2863,15 @@ DepthwiseConvolution2d3x3Dilation3x3Test<armnn::DataType::Float32, armnn::DataTy
bool,
armnn::DataLayout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 4>
-DepthwiseConvolution2d3x3Dilation3x3Test<armnn::DataType::QuantisedAsymm8, armnn::DataType::Signed32>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 4>
+DepthwiseConvolution2d3x3Dilation3x3Test<armnn::DataType::QAsymmU8, armnn::DataType::Signed32>(
armnn::IWorkloadFactory&,
const armnn::IBackendInternal::IMemoryManagerSharedPtr&,
bool,
armnn::DataLayout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 4>
-DepthwiseConvolution2d3x3Dilation3x3Test<armnn::DataType::QuantisedSymm16, armnn::DataType::Signed32>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 4>
+DepthwiseConvolution2d3x3Dilation3x3Test<armnn::DataType::QSymmS16, armnn::DataType::Signed32>(
armnn::IWorkloadFactory&,
const armnn::IBackendInternal::IMemoryManagerSharedPtr&,
bool,
@@ -2884,15 +2884,15 @@ DepthwiseConvolution2d2x3x3Dilation3x3Test<armnn::DataType::Float32, armnn::Data
bool,
armnn::DataLayout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 4>
-DepthwiseConvolution2d2x3x3Dilation3x3Test<armnn::DataType::QuantisedAsymm8, armnn::DataType::Signed32>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 4>
+DepthwiseConvolution2d2x3x3Dilation3x3Test<armnn::DataType::QAsymmU8, armnn::DataType::Signed32>(
armnn::IWorkloadFactory&,
const armnn::IBackendInternal::IMemoryManagerSharedPtr&,
bool,
armnn::DataLayout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 4>
-DepthwiseConvolution2d2x3x3Dilation3x3Test<armnn::DataType::QuantisedSymm16, armnn::DataType::Signed32>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 4>
+DepthwiseConvolution2d2x3x3Dilation3x3Test<armnn::DataType::QSymmS16, armnn::DataType::Signed32>(
armnn::IWorkloadFactory&,
const armnn::IBackendInternal::IMemoryManagerSharedPtr&,
bool,
@@ -2932,7 +2932,7 @@ LayerTestResult<uint8_t, 4> SimpleConvolution2d3x5Uint8Test(
bool biasEnabled,
const armnn::DataLayout layout)
{
- return SimpleConvolution2d3x5TestCommon<armnn::DataType::QuantisedAsymm8, armnn::DataType::Signed32>(
+ return SimpleConvolution2d3x5TestCommon<armnn::DataType::QAsymmU8, armnn::DataType::Signed32>(
workloadFactory, memoryManager, 0.5f, 50, biasEnabled, layout);
}
@@ -2981,7 +2981,7 @@ LayerTestResult<uint8_t, 4> SimpleConvolution2d3x3Uint8Test(
bool biasEnabled,
const armnn::DataLayout layout)
{
- return SimpleConvolution2d3x3TestCommon<armnn::DataType::QuantisedAsymm8, armnn::DataType::Signed32>(
+ return SimpleConvolution2d3x3TestCommon<armnn::DataType::QAsymmU8, armnn::DataType::Signed32>(
workloadFactory, memoryManager, 0.5f, 50, biasEnabled, layout);
}
@@ -2991,7 +2991,7 @@ LayerTestResult<int16_t, 4> SimpleConvolution2d3x5QSymm16Test(
bool biasEnabled,
const armnn::DataLayout layout)
{
- return SimpleConvolution2d3x5TestCommon<armnn::DataType::QuantisedSymm16, armnn::DataType::Signed32>(
+ return SimpleConvolution2d3x5TestCommon<armnn::DataType::QSymmS16, armnn::DataType::Signed32>(
workloadFactory, memoryManager, 0.5f, 50, biasEnabled, layout);
}
@@ -3001,7 +3001,7 @@ LayerTestResult<int16_t, 4> SimpleConvolution2d3x3QSymm16Test(
bool biasEnabled,
const armnn::DataLayout layout)
{
- return SimpleConvolution2d3x3TestCommon<armnn::DataType::QuantisedSymm16, armnn::DataType::Signed32>(
+ return SimpleConvolution2d3x3TestCommon<armnn::DataType::QSymmS16, armnn::DataType::Signed32>(
workloadFactory, memoryManager, 0.5f, 50, biasEnabled, layout);
}
@@ -3038,7 +3038,7 @@ LayerTestResult<uint8_t, 4> Convolution1dUint8Test(
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
bool biasEnabled)
{
- return Convolution1dTestImpl<armnn::DataType::QuantisedAsymm8, armnn::DataType::Signed32>(
+ return Convolution1dTestImpl<armnn::DataType::QAsymmU8, armnn::DataType::Signed32>(
workloadFactory, memoryManager, 0.1f, 128, biasEnabled);
}
@@ -3049,7 +3049,7 @@ LayerTestResult<uint8_t, 4> Convolution2dPerAxisQuantTest(
{
using namespace armnn;
- const DataType inputType = DataType::QuantisedAsymm8;
+ const DataType inputType = DataType::QAsymmU8;
const DataType kernelType = DataType::QuantizedSymm8PerAxis;
const DataType biasType = DataType::Signed32;
@@ -3220,7 +3220,7 @@ LayerTestResult<uint8_t, 4> DepthwiseConvolution2dUint8Test(
bool biasEnabled,
const armnn::DataLayout layout)
{
- return DepthwiseConvolution2dTestImpl<armnn::DataType::QuantisedAsymm8, armnn::DataType::Signed32>(
+ return DepthwiseConvolution2dTestImpl<armnn::DataType::QAsymmU8, armnn::DataType::Signed32>(
workloadFactory, memoryManager, 0.5f, 50, biasEnabled, layout);
}
@@ -3230,7 +3230,7 @@ LayerTestResult<uint8_t, 4> DepthwiseConvolution2dDepthMul1Uint8Test(
bool biasEnabled,
const armnn::DataLayout layout)
{
- return DepthwiseConvolution2dDepthMul1TestImpl<armnn::DataType::QuantisedAsymm8, armnn::DataType::Signed32>(
+ return DepthwiseConvolution2dDepthMul1TestImpl<armnn::DataType::QAsymmU8, armnn::DataType::Signed32>(
workloadFactory, memoryManager, 0.5f, 50, biasEnabled, layout);
}
@@ -3252,7 +3252,7 @@ LayerTestResult<int16_t, 4> DepthwiseConvolution2dInt16Test(
bool biasEnabled,
const armnn::DataLayout layout)
{
- return DepthwiseConvolution2dTestImpl<armnn::DataType::QuantisedSymm16, armnn::DataType::Signed32>(
+ return DepthwiseConvolution2dTestImpl<armnn::DataType::QSymmS16, armnn::DataType::Signed32>(
workloadFactory, memoryManager, 0.5f, 50, biasEnabled, layout);
}
@@ -3262,7 +3262,7 @@ LayerTestResult<int16_t, 4> DepthwiseConvolution2dDepthMul1Int16Test(
bool biasEnabled,
const armnn::DataLayout layout)
{
- return DepthwiseConvolution2dDepthMul1TestImpl<armnn::DataType::QuantisedSymm16, armnn::DataType::Signed32>(
+ return DepthwiseConvolution2dDepthMul1TestImpl<armnn::DataType::QSymmS16, armnn::DataType::Signed32>(
workloadFactory, memoryManager, 0.5f, 50, biasEnabled, layout);
}
@@ -3273,7 +3273,7 @@ LayerTestResult<uint8_t, 4> DepthwiseConvolution2dPerAxisQuantTest(
{
using namespace armnn;
- const DataType inputType = DataType::QuantisedAsymm8;
+ const DataType inputType = DataType::QAsymmU8;
const DataType kernelType = DataType::QuantizedSymm8PerAxis;
const DataType biasType = DataType::Signed32;
@@ -3390,6 +3390,6 @@ LayerTestResult<uint8_t, 4> CompareDepthwiseConvolution2dUint8Test(
armnn::IWorkloadFactory& refWorkloadFactory,
const armnn::DataLayout layout)
{
- return CompareDepthwiseConvolution2dTestImpl<armnn::DataType::QuantisedAsymm8>(
+ return CompareDepthwiseConvolution2dTestImpl<armnn::DataType::QAsymmU8>(
workloadFactory, memoryManager, refWorkloadFactory, layout);
}
diff --git a/src/backends/backendsCommon/test/layerTests/DebugTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/DebugTestImpl.cpp
index a4db5686b6..eef8372add 100644
--- a/src/backends/backendsCommon/test/layerTests/DebugTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/DebugTestImpl.cpp
@@ -314,54 +314,54 @@ LayerTestResult<uint8_t, 4> Debug4dUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Debug4dTest<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return Debug4dTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<uint8_t, 3> Debug3dUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Debug3dTest<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return Debug3dTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<uint8_t, 2> Debug2dUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Debug2dTest<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return Debug2dTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<uint8_t, 1> Debug1dUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Debug1dTest<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return Debug1dTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<int16_t, 4> Debug4dInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Debug4dTest<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return Debug4dTest<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
LayerTestResult<int16_t, 3> Debug3dInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Debug3dTest<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return Debug3dTest<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
LayerTestResult<int16_t, 2> Debug2dInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Debug2dTest<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return Debug2dTest<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
LayerTestResult<int16_t, 1> Debug1dInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Debug1dTest<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return Debug1dTest<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
diff --git a/src/backends/backendsCommon/test/layerTests/DepthToSpaceTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/DepthToSpaceTestImpl.cpp
index 5e5cba349e..96fa24a0cb 100644
--- a/src/backends/backendsCommon/test/layerTests/DepthToSpaceTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/DepthToSpaceTestImpl.cpp
@@ -284,51 +284,51 @@ DepthToSpaceTest4<armnn::DataType::Float16>(
armnn::DataLayout dataLayout);
// QuantisedAsymm8
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 4>
-DepthToSpaceTest1<armnn::DataType::QuantisedAsymm8>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 4>
+DepthToSpaceTest1<armnn::DataType::QAsymmU8>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
armnn::DataLayout dataLayout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 4>
-DepthToSpaceTest2<armnn::DataType::QuantisedAsymm8>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 4>
+DepthToSpaceTest2<armnn::DataType::QAsymmU8>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
armnn::DataLayout dataLayout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 4>
-DepthToSpaceTest3<armnn::DataType::QuantisedAsymm8>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 4>
+DepthToSpaceTest3<armnn::DataType::QAsymmU8>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
armnn::DataLayout dataLayout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 4>
-DepthToSpaceTest4<armnn::DataType::QuantisedAsymm8>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 4>
+DepthToSpaceTest4<armnn::DataType::QAsymmU8>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
armnn::DataLayout dataLayout);
// QuantisedSymm16
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 4>
-DepthToSpaceTest1<armnn::DataType::QuantisedSymm16>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 4>
+DepthToSpaceTest1<armnn::DataType::QSymmS16>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
armnn::DataLayout dataLayout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 4>
-DepthToSpaceTest2<armnn::DataType::QuantisedSymm16>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 4>
+DepthToSpaceTest2<armnn::DataType::QSymmS16>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
armnn::DataLayout dataLayout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 4>
-DepthToSpaceTest3<armnn::DataType::QuantisedSymm16>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 4>
+DepthToSpaceTest3<armnn::DataType::QSymmS16>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
armnn::DataLayout dataLayout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 4>
-DepthToSpaceTest4<armnn::DataType::QuantisedSymm16>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 4>
+DepthToSpaceTest4<armnn::DataType::QSymmS16>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
armnn::DataLayout dataLayout);
diff --git a/src/backends/backendsCommon/test/layerTests/DequantizeTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/DequantizeTestImpl.cpp
index fb225aeb54..7a757d59ee 100644
--- a/src/backends/backendsCommon/test/layerTests/DequantizeTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/DequantizeTestImpl.cpp
@@ -134,14 +134,14 @@ LayerTestResult<float, 4> DequantizeSimpleUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return DequantizeSimpleTest<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return DequantizeSimpleTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<float, 4> DequantizeOffsetUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return DequantizeOffsetTest<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return DequantizeOffsetTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<float, 4> DequantizeSimpleInt8Test(
@@ -155,14 +155,14 @@ LayerTestResult<float, 4> DequantizeSimpleInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return DequantizeSimpleTest<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return DequantizeSimpleTest<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
LayerTestResult<armnn::Half, 4> DequantizeSimpleUint8ToFp16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return DequantizeSimpleTest<armnn::DataType::QuantisedAsymm8, armnn::DataType::Float16>(workloadFactory,
+ return DequantizeSimpleTest<armnn::DataType::QAsymmU8, armnn::DataType::Float16>(workloadFactory,
memoryManager);
}
@@ -177,6 +177,6 @@ LayerTestResult<armnn::Half, 4> DequantizeSimpleInt16ToFp16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return DequantizeSimpleTest<armnn::DataType::QuantisedSymm16, armnn::DataType::Float16>(workloadFactory,
+ return DequantizeSimpleTest<armnn::DataType::QSymmS16, armnn::DataType::Float16>(workloadFactory,
memoryManager);
}
diff --git a/src/backends/backendsCommon/test/layerTests/DivisionTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/DivisionTestImpl.cpp
index b908f96e9f..223beb49e8 100644
--- a/src/backends/backendsCommon/test/layerTests/DivisionTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/DivisionTestImpl.cpp
@@ -288,7 +288,7 @@ LayerTestResult<uint8_t, 4> DivisionUint8Test(
4, 4, 4, 4, 5, 5, 5, 5
};
- return ElementwiseTestHelper<4, armnn::DivisionQueueDescriptor, armnn::DataType::QuantisedAsymm8>(
+ return ElementwiseTestHelper<4, armnn::DivisionQueueDescriptor, armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
shape,
@@ -314,7 +314,7 @@ LayerTestResult<uint8_t, 4> DivisionBroadcast1ElementUint8Test(
std::vector<uint8_t> output = { 1, 2, 3, 4, 5, 6, 7, 8};
- return ElementwiseTestHelper<4, armnn::DivisionQueueDescriptor, armnn::DataType::QuantisedAsymm8>(
+ return ElementwiseTestHelper<4, armnn::DivisionQueueDescriptor, armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
shape0,
@@ -348,7 +348,7 @@ LayerTestResult<uint8_t, 4> DivisionBroadcast1DVectorUint8Test(
13, 14, 15, 16, 17, 18
};
- return ElementwiseTestHelper<4, armnn::DivisionQueueDescriptor, armnn::DataType::QuantisedAsymm8>(
+ return ElementwiseTestHelper<4, armnn::DivisionQueueDescriptor, armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
shape0,
@@ -383,7 +383,7 @@ LayerTestResult<int16_t,4> DivisionInt16Test(
4, 4, 4, 4, 5, 5, 5, 5
};
- return ElementwiseTestHelper<4, armnn::DivisionQueueDescriptor, armnn::DataType::QuantisedSymm16>(
+ return ElementwiseTestHelper<4, armnn::DivisionQueueDescriptor, armnn::DataType::QSymmS16>(
workloadFactory,
memoryManager,
shape,
@@ -409,7 +409,7 @@ LayerTestResult<int16_t, 4> DivisionBroadcast1ElementInt16Test(
std::vector<int16_t> output = { 1, 2, 3, 4, 5, 6, 7, 8};
- return ElementwiseTestHelper<4, armnn::DivisionQueueDescriptor, armnn::DataType::QuantisedSymm16>(
+ return ElementwiseTestHelper<4, armnn::DivisionQueueDescriptor, armnn::DataType::QSymmS16>(
workloadFactory,
memoryManager,
shape0,
@@ -443,7 +443,7 @@ LayerTestResult<int16_t, 4> DivisionBroadcast1DVectorInt16Test(
13, 14, 15, 16, 17, 18
};
- return ElementwiseTestHelper<4, armnn::DivisionQueueDescriptor, armnn::DataType::QuantisedSymm16>(
+ return ElementwiseTestHelper<4, armnn::DivisionQueueDescriptor, armnn::DataType::QSymmS16>(
workloadFactory,
memoryManager,
shape0,
diff --git a/src/backends/backendsCommon/test/layerTests/FloorTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/FloorTestImpl.cpp
index a3d29dac71..ebad7fc91c 100644
--- a/src/backends/backendsCommon/test/layerTests/FloorTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/FloorTestImpl.cpp
@@ -71,7 +71,7 @@ SimpleFloorTest<armnn::DataType::Float16>(
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 4>
-SimpleFloorTest<armnn::DataType::QuantisedSymm16>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 4>
+SimpleFloorTest<armnn::DataType::QSymmS16>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
diff --git a/src/backends/backendsCommon/test/layerTests/FullyConnectedTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/FullyConnectedTestImpl.cpp
index a3fe8582c8..34bd9ec75e 100644
--- a/src/backends/backendsCommon/test/layerTests/FullyConnectedTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/FullyConnectedTestImpl.cpp
@@ -229,14 +229,14 @@ LayerTestResult<T, 2> FullyConnectedLargeTestCommon(
// Explicit template specializations
//
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 2>
-FullyConnectedTest<armnn::DataType::QuantisedAsymm8>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 2>
+FullyConnectedTest<armnn::DataType::QAsymmU8>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
bool biasEnabled);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 2>
-FullyConnectedTest<armnn::DataType::QuantisedSymm16>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 2>
+FullyConnectedTest<armnn::DataType::QSymmS16>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
bool biasEnabled);
diff --git a/src/backends/backendsCommon/test/layerTests/GatherTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/GatherTestImpl.cpp
index 1ccf51c7d2..c6f58057c5 100644
--- a/src/backends/backendsCommon/test/layerTests/GatherTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/GatherTestImpl.cpp
@@ -315,14 +315,14 @@ LayerTestResult<uint8_t, 1> Gather1dParamsUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return GatherTestHelper<armnn::DataType::QuantisedAsymm8>::Gather1dParamsTestImpl(workloadFactory, memoryManager);
+ return GatherTestHelper<armnn::DataType::QAsymmU8>::Gather1dParamsTestImpl(workloadFactory, memoryManager);
}
LayerTestResult<int16_t, 1> Gather1dParamsInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return GatherTestHelper<armnn::DataType::QuantisedSymm16>::Gather1dParamsTestImpl(workloadFactory, memoryManager);
+ return GatherTestHelper<armnn::DataType::QSymmS16>::Gather1dParamsTestImpl(workloadFactory, memoryManager);
}
LayerTestResult<float, 2> GatherMultiDimParamsFloat32Test(
@@ -343,7 +343,7 @@ LayerTestResult<uint8_t, 2> GatherMultiDimParamsUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return GatherTestHelper<armnn::DataType::QuantisedAsymm8>::GatherMultiDimParamsTestImpl(
+ return GatherTestHelper<armnn::DataType::QAsymmU8>::GatherMultiDimParamsTestImpl(
workloadFactory, memoryManager);
}
@@ -351,7 +351,7 @@ LayerTestResult<int16_t, 2> GatherMultiDimParamsInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return GatherTestHelper<armnn::DataType::QuantisedSymm16>::GatherMultiDimParamsTestImpl(
+ return GatherTestHelper<armnn::DataType::QSymmS16>::GatherMultiDimParamsTestImpl(
workloadFactory, memoryManager);
}
@@ -375,7 +375,7 @@ LayerTestResult<uint8_t, 4> GatherMultiDimParamsMultiDimIndicesUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return GatherTestHelper<armnn::DataType::QuantisedAsymm8>::GatherMultiDimParamsMultiDimIndicesTestImpl(
+ return GatherTestHelper<armnn::DataType::QAsymmU8>::GatherMultiDimParamsMultiDimIndicesTestImpl(
workloadFactory, memoryManager);
}
@@ -383,6 +383,6 @@ LayerTestResult<int16_t, 4> GatherMultiDimParamsMultiDimIndicesInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return GatherTestHelper<armnn::DataType::QuantisedSymm16>::GatherMultiDimParamsMultiDimIndicesTestImpl(
+ return GatherTestHelper<armnn::DataType::QSymmS16>::GatherMultiDimParamsMultiDimIndicesTestImpl(
workloadFactory, memoryManager);
}
diff --git a/src/backends/backendsCommon/test/layerTests/L2NormalizationTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/L2NormalizationTestImpl.cpp
index 4b16921990..e500a126f6 100644
--- a/src/backends/backendsCommon/test/layerTests/L2NormalizationTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/L2NormalizationTestImpl.cpp
@@ -599,7 +599,7 @@ LayerTestResult<int16_t, 4> L2Normalization1dInt16Test(
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout layout)
{
- return L2Normalization1dTestCommon<armnn::DataType::QuantisedSymm16>(
+ return L2Normalization1dTestCommon<armnn::DataType::QSymmS16>(
workloadFactory,
memoryManager,
1.f,
@@ -614,7 +614,7 @@ LayerTestResult<uint8_t, 4> L2Normalization1dUint8Test(
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout layout)
{
- return L2Normalization1dTestCommon<armnn::DataType::QuantisedAsymm8>(
+ return L2Normalization1dTestCommon<armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
1.f,
@@ -644,7 +644,7 @@ LayerTestResult<int16_t, 4> L2Normalization2dInt16Test(
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout layout)
{
- return L2Normalization1dTestCommon<armnn::DataType::QuantisedSymm16>(
+ return L2Normalization1dTestCommon<armnn::DataType::QSymmS16>(
workloadFactory,
memoryManager,
1.f,
@@ -659,7 +659,7 @@ LayerTestResult<uint8_t, 4> L2Normalization2dUint8Test(
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout layout)
{
- return L2Normalization1dTestCommon<armnn::DataType::QuantisedAsymm8>(
+ return L2Normalization1dTestCommon<armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
1.f,
@@ -748,7 +748,7 @@ LayerTestResult<int16_t, 4> L2Normalization3dInt16Test(
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout layout)
{
- return L2Normalization1dTestCommon<armnn::DataType::QuantisedSymm16>(
+ return L2Normalization1dTestCommon<armnn::DataType::QSymmS16>(
workloadFactory,
memoryManager,
1.f,
@@ -763,7 +763,7 @@ LayerTestResult<uint8_t, 4> L2Normalization3dUint8Test(
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout layout)
{
- return L2Normalization1dTestCommon<armnn::DataType::QuantisedAsymm8>(
+ return L2Normalization1dTestCommon<armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
1.f,
@@ -793,7 +793,7 @@ LayerTestResult<int16_t, 4> L2Normalization4dInt16Test(
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout layout)
{
- return L2Normalization1dTestCommon<armnn::DataType::QuantisedSymm16>(
+ return L2Normalization1dTestCommon<armnn::DataType::QSymmS16>(
workloadFactory,
memoryManager,
1.f,
@@ -808,7 +808,7 @@ LayerTestResult<uint8_t, 4> L2Normalization4dUint8Test(
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout layout)
{
- return L2Normalization1dTestCommon<armnn::DataType::QuantisedAsymm8>(
+ return L2Normalization1dTestCommon<armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
1.f,
diff --git a/src/backends/backendsCommon/test/layerTests/LstmTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/LstmTestImpl.cpp
index e755aa54cb..c61a0526a1 100644
--- a/src/backends/backendsCommon/test/layerTests/LstmTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/LstmTestImpl.cpp
@@ -1573,17 +1573,17 @@ LayerTestResult<uint8_t, 2> QuantizedLstmTestImpl(
// Input/Output tensor info
armnn::TensorInfo inputInfo({numBatches , inputSize},
- armnn::DataType::QuantisedAsymm8,
+ armnn::DataType::QAsymmU8,
inputOutputScale,
inputOutputOffset);
armnn::TensorInfo cellStateInfo({numBatches , outputSize},
- armnn::DataType::QuantisedSymm16,
+ armnn::DataType::QSymmS16,
cellStateScale,
cellStateOffset);
armnn::TensorInfo outputStateInfo({numBatches , outputSize},
- armnn::DataType::QuantisedAsymm8,
+ armnn::DataType::QAsymmU8,
inputOutputScale,
inputOutputOffset);
@@ -1635,12 +1635,12 @@ LayerTestResult<uint8_t, 2> QuantizedLstmTestImpl(
// Weights and bias tensor and quantization info
armnn::TensorInfo inputWeightsInfo({outputSize, inputSize},
- armnn::DataType::QuantisedAsymm8,
+ armnn::DataType::QAsymmU8,
weightsScale,
weightsOffset);
armnn::TensorInfo recurrentWeightsInfo({outputSize, outputSize},
- armnn::DataType::QuantisedAsymm8,
+ armnn::DataType::QAsymmU8,
weightsScale,
weightsOffset);
@@ -1965,8 +1965,8 @@ LayerTestResult<int16_t, 2> LstmLayerInt16NoCifgNoPeepholeNoProjectionTest(
const float qScale = 1.0f;
const int32_t qOffset = 0;
- const armnn::DataType datatype = armnn::DataType::QuantisedSymm16;
- const armnn::DataType constantDatatype = armnn::DataType::QuantisedAsymm8;
+ const armnn::DataType datatype = armnn::DataType::QSymmS16;
+ const armnn::DataType constantDatatype = armnn::DataType::QAsymmU8;
armnn::TensorInfo inputDesc({2, 2}, datatype);
boost::multi_array<int16_t , 2> input = MakeTensor<int16_t , 2>(
@@ -1995,8 +1995,8 @@ LayerTestResult<int16_t, 2> LstmLayerInt16WithCifgWithPeepholeNoProjectionTest(
const float qScale = 1.0f;
const int32_t qOffset = 0;
- const armnn::DataType datatype = armnn::DataType::QuantisedSymm16;
- const armnn::DataType constantDatatype = armnn::DataType::QuantisedAsymm8;
+ const armnn::DataType datatype = armnn::DataType::QSymmS16;
+ const armnn::DataType constantDatatype = armnn::DataType::QAsymmU8;
armnn::TensorInfo inputDesc({ 2, 2 }, datatype);
boost::multi_array<int16_t, 2> input =
@@ -2026,8 +2026,8 @@ LayerTestResult<int16_t, 2> LstmLayerInt16NoCifgWithPeepholeWithProjectionTest(
const float qScale = 2.0f;
const int32_t qOffset = 0;
- const armnn::DataType datatype = armnn::DataType::QuantisedSymm16;
- const armnn::DataType constantDatatype = armnn::DataType::QuantisedAsymm8;
+ const armnn::DataType datatype = armnn::DataType::QSymmS16;
+ const armnn::DataType constantDatatype = armnn::DataType::QAsymmU8;
armnn::TensorInfo inputDesc({ 2, 5 }, datatype);
boost::multi_array<int16_t, 2> input =
@@ -2068,7 +2068,7 @@ LayerTestResult<int16_t, 2> LstmLayerInt16NoCifgNoPeepholeNoProjectionInt16Const
const float qScale = 1.0f;
const int32_t qOffset = 0;
- const armnn::DataType datatype = armnn::DataType::QuantisedSymm16; // datatype & constants set to QSymm16
+ const armnn::DataType datatype = armnn::DataType::QSymmS16; // datatype & constants set to QSymm16
armnn::TensorInfo inputDesc({2, 2}, datatype);
boost::multi_array<int16_t , 2> input =
@@ -2098,11 +2098,11 @@ LayerTestResult<uint8_t, 2> QuantizedLstmTest(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- armnn::TensorInfo inputDesc({2, 2}, armnn::DataType::QuantisedAsymm8);
+ armnn::TensorInfo inputDesc({2, 2}, armnn::DataType::QAsymmU8);
boost::multi_array<uint8_t, 2> input = MakeTensor<uint8_t, 2>(inputDesc, std::vector<uint8_t>(
{166, 179, 50, 150}));
- armnn::TensorInfo outputDesc({2, 4}, armnn::DataType::QuantisedAsymm8);
+ armnn::TensorInfo outputDesc({2, 4}, armnn::DataType::QAsymmU8);
boost::multi_array<uint8_t, 2> expectedOutput = MakeTensor<uint8_t, 2>(outputDesc, std::vector<uint8_t>(
{140, 151, 146, 112, 136, 156, 142, 112 }));
diff --git a/src/backends/backendsCommon/test/layerTests/MaximumTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/MaximumTestImpl.cpp
index 0218697038..5147cffddb 100644
--- a/src/backends/backendsCommon/test/layerTests/MaximumTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/MaximumTestImpl.cpp
@@ -238,7 +238,7 @@ LayerTestResult<uint8_t, 4> MaximumUint8Test(
4, 4, 4, 4, 5, 5, 5, 5
};
- return ElementwiseTestHelper<4, armnn::MaximumQueueDescriptor, armnn::DataType::QuantisedAsymm8>(
+ return ElementwiseTestHelper<4, armnn::MaximumQueueDescriptor, armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
shape,
@@ -270,7 +270,7 @@ LayerTestResult<uint8_t, 4> MaximumBroadcast1ElementUint8Test(
7, 8, 9, 10, 11, 12
};
- return ElementwiseTestHelper<4, armnn::MaximumQueueDescriptor, armnn::DataType::QuantisedAsymm8>(
+ return ElementwiseTestHelper<4, armnn::MaximumQueueDescriptor, armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
shape0,
@@ -301,7 +301,7 @@ LayerTestResult<uint8_t, 4> MaximumBroadcast1DVectorUint8Test(
7, 10, 9, 10, 11, 12
};
- return ElementwiseTestHelper<4, armnn::MaximumQueueDescriptor, armnn::DataType::QuantisedAsymm8>(
+ return ElementwiseTestHelper<4, armnn::MaximumQueueDescriptor, armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
shape0,
@@ -327,7 +327,7 @@ LayerTestResult<int16_t, 4> MaximumInt16Test(
std::vector<int16_t> output({ 2, 2, 2, 2, 6, 6, 6, 6,
4, 4, 4, 4, 5, 5, 5, 5 });
- return ElementwiseTestHelper<4, armnn::MaximumQueueDescriptor, armnn::DataType::QuantisedSymm16>(
+ return ElementwiseTestHelper<4, armnn::MaximumQueueDescriptor, armnn::DataType::QSymmS16>(
workloadFactory,
memoryManager,
shape,
@@ -359,7 +359,7 @@ LayerTestResult<int16_t, 4> MaximumBroadcast1ElementInt16Test(
7, 8, 9, 10, 11, 12
};
- return ElementwiseTestHelper<4, armnn::MaximumQueueDescriptor, armnn::DataType::QuantisedSymm16>(
+ return ElementwiseTestHelper<4, armnn::MaximumQueueDescriptor, armnn::DataType::QSymmS16>(
workloadFactory,
memoryManager,
shape0,
@@ -391,7 +391,7 @@ LayerTestResult<int16_t, 4> MaximumBroadcast1DVectorInt16Test(
7, 10, 9, 10, 11, 12
};
- return ElementwiseTestHelper<4, armnn::MaximumQueueDescriptor, armnn::DataType::QuantisedSymm16>(
+ return ElementwiseTestHelper<4, armnn::MaximumQueueDescriptor, armnn::DataType::QSymmS16>(
workloadFactory,
memoryManager,
shape0,
diff --git a/src/backends/backendsCommon/test/layerTests/MinimumTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/MinimumTestImpl.cpp
index ed12c7fa2c..a0a4029115 100644
--- a/src/backends/backendsCommon/test/layerTests/MinimumTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/MinimumTestImpl.cpp
@@ -86,7 +86,7 @@ LayerTestResult<uint8_t, 4> MinimumBroadcast1DVectorUint8Test(
1, 1, 2, 1, 2, 3
};
- return ElementwiseTestHelper<4, armnn::MinimumQueueDescriptor, armnn::DataType::QuantisedAsymm8>(
+ return ElementwiseTestHelper<4, armnn::MinimumQueueDescriptor, armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
shape0,
@@ -226,7 +226,7 @@ LayerTestResult<int16_t, 4> MinimumInt16Test(
3, 3, 3, 3, 4, 4, 4, 4
};
- return ElementwiseTestHelper<4, armnn::MinimumQueueDescriptor, armnn::DataType::QuantisedSymm16>(
+ return ElementwiseTestHelper<4, armnn::MinimumQueueDescriptor, armnn::DataType::QSymmS16>(
workloadFactory,
memoryManager,
shape,
@@ -258,7 +258,7 @@ LayerTestResult<int16_t, 4> MinimumBroadcast1ElementInt16Test(
2, 2, 2, 2, 2, 2
};
- return ElementwiseTestHelper<4, armnn::MinimumQueueDescriptor, armnn::DataType::QuantisedSymm16>(
+ return ElementwiseTestHelper<4, armnn::MinimumQueueDescriptor, armnn::DataType::QSymmS16>(
workloadFactory,
memoryManager,
shape0,
@@ -290,7 +290,7 @@ LayerTestResult<int16_t, 4> MinimumBroadcast1DVectorInt16Test(
1, 8, 3, 1, 10, 3
};
- return ElementwiseTestHelper<4, armnn::MinimumQueueDescriptor, armnn::DataType::QuantisedSymm16>(
+ return ElementwiseTestHelper<4, armnn::MinimumQueueDescriptor, armnn::DataType::QSymmS16>(
workloadFactory,
memoryManager,
shape0,
diff --git a/src/backends/backendsCommon/test/layerTests/MultiplicationTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/MultiplicationTestImpl.cpp
index a39e6bd827..d32e0cf89b 100644
--- a/src/backends/backendsCommon/test/layerTests/MultiplicationTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/MultiplicationTestImpl.cpp
@@ -216,7 +216,7 @@ LayerTestResult<uint8_t, 4> MultiplicationUint8Test(
};
// Scale/offset chosen to have output values out of range
- return ElementwiseTestHelper<4, armnn::MultiplicationQueueDescriptor, armnn::DataType::QuantisedAsymm8>(
+ return ElementwiseTestHelper<4, armnn::MultiplicationQueueDescriptor, armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
shape,
@@ -254,7 +254,7 @@ LayerTestResult<uint8_t, 4> MultiplicationBroadcast1ElementUint8Test(
14, 16, 18, 20, 22, 24
};
- return ElementwiseTestHelper<4, armnn::MultiplicationQueueDescriptor, armnn::DataType::QuantisedAsymm8>(
+ return ElementwiseTestHelper<4, armnn::MultiplicationQueueDescriptor, armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
shape0,
@@ -286,7 +286,7 @@ LayerTestResult<uint8_t, 4> MultiplicationBroadcast1DVectorUint8Test(
7, 16, 27, 10, 22, 36
};
- return ElementwiseTestHelper<4, armnn::MultiplicationQueueDescriptor, armnn::DataType::QuantisedAsymm8>(
+ return ElementwiseTestHelper<4, armnn::MultiplicationQueueDescriptor, armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
shape0,
@@ -321,7 +321,7 @@ LayerTestResult<int16_t, 4> MultiplicationInt16Test(
84, 104, 126, 150, 176, 204
};
- return ElementwiseTestHelper<4, armnn::MultiplicationQueueDescriptor, armnn::DataType::QuantisedSymm16>(
+ return ElementwiseTestHelper<4, armnn::MultiplicationQueueDescriptor, armnn::DataType::QSymmS16>(
workloadFactory,
memoryManager,
shape,
@@ -353,7 +353,7 @@ LayerTestResult<int16_t, 4> MultiplicationBroadcast1ElementInt16Test(
14, 16, 18, 20, 22, 24
};
- return ElementwiseTestHelper<4, armnn::MultiplicationQueueDescriptor, armnn::DataType::QuantisedSymm16>(
+ return ElementwiseTestHelper<4, armnn::MultiplicationQueueDescriptor, armnn::DataType::QSymmS16>(
workloadFactory,
memoryManager,
shape0,
@@ -385,7 +385,7 @@ LayerTestResult<int16_t, 4> MultiplicationBroadcast1DVectorInt16Test(
7, 16, 27, 10, 22, 36
};
- return ElementwiseTestHelper<4, armnn::MultiplicationQueueDescriptor, armnn::DataType::QuantisedSymm16>(
+ return ElementwiseTestHelper<4, armnn::MultiplicationQueueDescriptor, armnn::DataType::QSymmS16>(
workloadFactory,
memoryManager,
shape0,
diff --git a/src/backends/backendsCommon/test/layerTests/PadTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/PadTestImpl.cpp
index 3a8d2b7bbf..9239c665eb 100644
--- a/src/backends/backendsCommon/test/layerTests/PadTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/PadTestImpl.cpp
@@ -416,23 +416,23 @@ LayerTestResult<T, 4> Pad4dTestCommon(
// Explicit template specializations
//
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 2>
-Pad2dTestCommon<armnn::DataType::QuantisedSymm16>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 2>
+Pad2dTestCommon<armnn::DataType::QSymmS16>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
float qScale,
int32_t qOffset,
const float customPaddingValue);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 3>
-Pad3dTestCommon<armnn::DataType::QuantisedSymm16>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 3>
+Pad3dTestCommon<armnn::DataType::QSymmS16>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
float qScale,
int32_t qOffset);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 4>
-Pad4dTestCommon<armnn::DataType::QuantisedSymm16>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 4>
+Pad4dTestCommon<armnn::DataType::QSymmS16>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
float qScale,
@@ -446,28 +446,28 @@ LayerTestResult<uint8_t, 2> PadUint82dTest(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Pad2dTestCommon<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager, 1.0f, 0);
+ return Pad2dTestCommon<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager, 1.0f, 0);
}
LayerTestResult<uint8_t, 2> PadUint82dCustomPaddingTest(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Pad2dTestCommon<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager, 1.0f, 0, 1.0f);
+ return Pad2dTestCommon<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager, 1.0f, 0, 1.0f);
}
LayerTestResult<uint8_t, 3> PadUint83dTest(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Pad3dTestCommon<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager, 1.0f, 0);
+ return Pad3dTestCommon<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager, 1.0f, 0);
}
LayerTestResult<uint8_t, 4> PadUint84dTest(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Pad4dTestCommon<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager, 1.0f, 0);
+ return Pad4dTestCommon<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager, 1.0f, 0);
}
LayerTestResult<float, 2> PadFloat322dTest(
diff --git a/src/backends/backendsCommon/test/layerTests/Pooling2dTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/Pooling2dTestImpl.cpp
index 160e6582d5..b58e9826b8 100644
--- a/src/backends/backendsCommon/test/layerTests/Pooling2dTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/Pooling2dTestImpl.cpp
@@ -1366,7 +1366,7 @@ LayerTestResult<uint8_t, 4> SimpleMaxPooling2dSize2x2Stride2x2Uint8Test(
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
bool forceNoPadding)
{
- return SimpleMaxPooling2dSize2x2Stride2x2TestCommon<armnn::DataType::QuantisedAsymm8>(
+ return SimpleMaxPooling2dSize2x2Stride2x2TestCommon<armnn::DataType::QAsymmU8>(
workloadFactory, memoryManager, forceNoPadding, 3.0f, -5);
}
@@ -1375,7 +1375,7 @@ LayerTestResult<int16_t, 4> SimpleMaxPooling2dSize2x2Stride2x2Int16Test(
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
bool forceNoPadding)
{
- return SimpleMaxPooling2dSize2x2Stride2x2TestCommon<armnn::DataType::QuantisedSymm16>(
+ return SimpleMaxPooling2dSize2x2Stride2x2TestCommon<armnn::DataType::QSymmS16>(
workloadFactory, memoryManager, forceNoPadding);
}
@@ -1393,7 +1393,7 @@ LayerTestResult<uint8_t, 4> SimpleMaxPooling2dSize3x3Stride2x4Uint8Test(
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
bool forceNoPadding)
{
- return SimpleMaxPooling2dSize3x3Stride2x4TestCommon<armnn::DataType::QuantisedAsymm8>(
+ return SimpleMaxPooling2dSize3x3Stride2x4TestCommon<armnn::DataType::QAsymmU8>(
workloadFactory, memoryManager, forceNoPadding, 0.1f, 128);
}
@@ -1402,7 +1402,7 @@ LayerTestResult<int16_t, 4> SimpleMaxPooling2dSize3x3Stride2x4Int16Test(
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
bool forceNoPadding)
{
- return SimpleMaxPooling2dSize3x3Stride2x4TestCommon<armnn::DataType::QuantisedSymm16>(
+ return SimpleMaxPooling2dSize3x3Stride2x4TestCommon<armnn::DataType::QSymmS16>(
workloadFactory, memoryManager, forceNoPadding);
}
@@ -1419,7 +1419,7 @@ LayerTestResult<uint8_t, 4> SimpleMaxPooling2dUint8Test(
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout dataLayout)
{
- return SimpleMaxPooling2dTestCommon<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager, dataLayout);
+ return SimpleMaxPooling2dTestCommon<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager, dataLayout);
}
LayerTestResult<int16_t, 4> SimpleMaxPooling2dInt16Test(
@@ -1427,7 +1427,7 @@ LayerTestResult<int16_t, 4> SimpleMaxPooling2dInt16Test(
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout dataLayout)
{
- return SimpleMaxPooling2dTestCommon<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager, dataLayout);
+ return SimpleMaxPooling2dTestCommon<armnn::DataType::QSymmS16>(workloadFactory, memoryManager, dataLayout);
}
LayerTestResult<float, 4> IgnorePaddingSimpleMaxPooling2dTest(
armnn::IWorkloadFactory& workloadFactory,
@@ -1440,7 +1440,7 @@ LayerTestResult<uint8_t, 4> IgnorePaddingSimpleMaxPooling2dUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return IgnorePaddingSimpleMaxPooling2dTestCommon<armnn::DataType::QuantisedAsymm8>(
+ return IgnorePaddingSimpleMaxPooling2dTestCommon<armnn::DataType::QAsymmU8>(
workloadFactory, memoryManager, 1.0f, -5);
}
@@ -1448,7 +1448,7 @@ LayerTestResult<int16_t, 4> IgnorePaddingSimpleMaxPooling2dInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return IgnorePaddingSimpleMaxPooling2dTestCommon<armnn::DataType::QuantisedSymm16>(
+ return IgnorePaddingSimpleMaxPooling2dTestCommon<armnn::DataType::QSymmS16>(
workloadFactory, memoryManager);
}
@@ -1463,7 +1463,7 @@ LayerTestResult<uint8_t, 4> IgnorePaddingMaxPooling2dSize3Uint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return IgnorePaddingMaxPooling2dSize3TestCommon<armnn::DataType::QuantisedAsymm8>(
+ return IgnorePaddingMaxPooling2dSize3TestCommon<armnn::DataType::QAsymmU8>(
workloadFactory, memoryManager, 1.0f, -5);
}
@@ -1471,7 +1471,7 @@ LayerTestResult<int16_t, 4> IgnorePaddingMaxPooling2dSize3Int16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return IgnorePaddingMaxPooling2dSize3TestCommon<armnn::DataType::QuantisedSymm16>(
+ return IgnorePaddingMaxPooling2dSize3TestCommon<armnn::DataType::QSymmS16>(
workloadFactory, memoryManager);
}
@@ -1488,7 +1488,7 @@ LayerTestResult<uint8_t, 4> SimpleAveragePooling2dUint8Test(
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout dataLayout)
{
- return SimpleAveragePooling2dTestCommon<armnn::DataType::QuantisedAsymm8>(
+ return SimpleAveragePooling2dTestCommon<armnn::DataType::QAsymmU8>(
workloadFactory, memoryManager, dataLayout, 0.5, -1);
}
@@ -1497,7 +1497,7 @@ LayerTestResult<int16_t, 4> SimpleAveragePooling2dInt16Test(
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout dataLayout)
{
- return SimpleAveragePooling2dTestCommon<armnn::DataType::QuantisedSymm16>(
+ return SimpleAveragePooling2dTestCommon<armnn::DataType::QSymmS16>(
workloadFactory, memoryManager, dataLayout);
}
@@ -1521,7 +1521,7 @@ LayerTestResult<uint8_t, 4> LargeTensorsAveragePooling2dUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return LargeTensorsAveragePooling2dTestCommon<armnn::DataType::QuantisedAsymm8>(
+ return LargeTensorsAveragePooling2dTestCommon<armnn::DataType::QAsymmU8>(
workloadFactory, memoryManager, 0.5, -1);
}
@@ -1529,7 +1529,7 @@ LayerTestResult<int16_t, 4> LargeTensorsAveragePooling2dInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return LargeTensorsAveragePooling2dTestCommon<armnn::DataType::QuantisedSymm16>(
+ return LargeTensorsAveragePooling2dTestCommon<armnn::DataType::QSymmS16>(
workloadFactory, memoryManager);
}
LayerTestResult<float, 4> IgnorePaddingSimpleAveragePooling2dTest(
@@ -1543,7 +1543,7 @@ LayerTestResult<uint8_t, 4> IgnorePaddingSimpleAveragePooling2dUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return IgnorePaddingSimpleAveragePooling2dTestCommon<armnn::DataType::QuantisedAsymm8>(
+ return IgnorePaddingSimpleAveragePooling2dTestCommon<armnn::DataType::QAsymmU8>(
workloadFactory, memoryManager);
}
@@ -1551,7 +1551,7 @@ LayerTestResult<int16_t, 4> IgnorePaddingSimpleAveragePooling2dInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return IgnorePaddingSimpleAveragePooling2dTestCommon<armnn::DataType::QuantisedSymm16>(
+ return IgnorePaddingSimpleAveragePooling2dTestCommon<armnn::DataType::QSymmS16>(
workloadFactory, memoryManager);
}
@@ -1567,7 +1567,7 @@ LayerTestResult<uint8_t, 4> IgnorePaddingSimpleAveragePooling2dNoPaddingUint8Tes
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return IgnorePaddingSimpleAveragePooling2dNoPaddingTestCommon<armnn::DataType::QuantisedAsymm8>(
+ return IgnorePaddingSimpleAveragePooling2dNoPaddingTestCommon<armnn::DataType::QAsymmU8>(
workloadFactory, memoryManager);
}
@@ -1575,7 +1575,7 @@ LayerTestResult<int16_t, 4> IgnorePaddingSimpleAveragePooling2dNoPaddingInt16Tes
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return IgnorePaddingSimpleAveragePooling2dNoPaddingTestCommon<armnn::DataType::QuantisedSymm16>(
+ return IgnorePaddingSimpleAveragePooling2dNoPaddingTestCommon<armnn::DataType::QSymmS16>(
workloadFactory, memoryManager);
}
@@ -1590,7 +1590,7 @@ LayerTestResult<uint8_t, 4> IgnorePaddingAveragePooling2dSize3Uint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return IgnorePaddingAveragePooling2dSize3TestCommon<armnn::DataType::QuantisedAsymm8>(
+ return IgnorePaddingAveragePooling2dSize3TestCommon<armnn::DataType::QAsymmU8>(
workloadFactory, memoryManager);
}
@@ -1598,7 +1598,7 @@ LayerTestResult<int16_t, 4> IgnorePaddingAveragePooling2dSize3Int16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return IgnorePaddingAveragePooling2dSize3TestCommon<armnn::DataType::QuantisedSymm16>(
+ return IgnorePaddingAveragePooling2dSize3TestCommon<armnn::DataType::QSymmS16>(
workloadFactory, memoryManager);
}
@@ -1615,7 +1615,7 @@ LayerTestResult<uint8_t, 4> SimpleL2Pooling2dUint8Test(
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout dataLayout)
{
- return SimpleL2Pooling2dTestCommon<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager, dataLayout);
+ return SimpleL2Pooling2dTestCommon<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager, dataLayout);
}
LayerTestResult<int16_t, 4> SimpleL2Pooling2dInt16Test(
@@ -1623,7 +1623,7 @@ LayerTestResult<int16_t, 4> SimpleL2Pooling2dInt16Test(
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout dataLayout)
{
- return SimpleL2Pooling2dTestCommon<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager, dataLayout);
+ return SimpleL2Pooling2dTestCommon<armnn::DataType::QSymmS16>(workloadFactory, memoryManager, dataLayout);
}
LayerTestResult<float, 4> L2Pooling2dSize3Stride1Test(
@@ -1637,14 +1637,14 @@ LayerTestResult<uint8_t, 4> L2Pooling2dSize3Stride1Uint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return L2Pooling2dSize3Stride1TestCommon<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return L2Pooling2dSize3Stride1TestCommon<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<int16_t, 4> L2Pooling2dSize3Stride1Int16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return L2Pooling2dSize3Stride1TestCommon<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return L2Pooling2dSize3Stride1TestCommon<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
LayerTestResult<float, 4> L2Pooling2dSize3Stride3Test(
@@ -1658,14 +1658,14 @@ LayerTestResult<uint8_t, 4> L2Pooling2dSize3Stride3Uint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return L2Pooling2dSize3Stride3TestCommon<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return L2Pooling2dSize3Stride3TestCommon<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<int16_t, 4> L2Pooling2dSize3Stride3Int16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return L2Pooling2dSize3Stride3TestCommon<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return L2Pooling2dSize3Stride3TestCommon<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
LayerTestResult<float, 4> L2Pooling2dSize3Stride4Test(
armnn::IWorkloadFactory& workloadFactory,
@@ -1678,14 +1678,14 @@ LayerTestResult<uint8_t, 4> L2Pooling2dSize3Stride4Uint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return L2Pooling2dSize3Stride4TestCommon<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return L2Pooling2dSize3Stride4TestCommon<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<int16_t, 4> L2Pooling2dSize3Stride4Int16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return L2Pooling2dSize3Stride4TestCommon<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return L2Pooling2dSize3Stride4TestCommon<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
LayerTestResult<float, 4> L2Pooling2dSize7Test(
@@ -1699,14 +1699,14 @@ LayerTestResult<uint8_t, 4> L2Pooling2dSize7Uint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return L2Pooling2dSize7TestCommon<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return L2Pooling2dSize7TestCommon<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<int16_t, 4> L2Pooling2dSize7Int16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return L2Pooling2dSize7TestCommon<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return L2Pooling2dSize7TestCommon<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
LayerTestResult<float, 4> L2Pooling2dSize9Test(
@@ -1720,14 +1720,14 @@ LayerTestResult<uint8_t, 4> L2Pooling2dSize9Uint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return L2Pooling2dSize9TestCommon<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return L2Pooling2dSize9TestCommon<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<int16_t, 4> L2Pooling2dSize9Int16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return L2Pooling2dSize9TestCommon<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return L2Pooling2dSize9TestCommon<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
LayerTestResult<float, 4> IgnorePaddingSimpleL2Pooling2dTest(
armnn::IWorkloadFactory& workloadFactory,
@@ -1740,14 +1740,14 @@ LayerTestResult<uint8_t, 4> IgnorePaddingSimpleL2Pooling2dUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return IgnorePaddingSimpleL2Pooling2dTestCommon<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return IgnorePaddingSimpleL2Pooling2dTestCommon<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<int16_t, 4> IgnorePaddingSimpleL2Pooling2dInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return IgnorePaddingSimpleL2Pooling2dTestCommon<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return IgnorePaddingSimpleL2Pooling2dTestCommon<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
LayerTestResult<float, 4> IgnorePaddingL2Pooling2dSize3Test(
@@ -1761,14 +1761,14 @@ LayerTestResult<uint8_t, 4> IgnorePaddingL2Pooling2dSize3Uint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return IgnorePaddingL2Pooling2dSize3TestCommon<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return IgnorePaddingL2Pooling2dSize3TestCommon<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<int16_t, 4> IgnorePaddingL2Pooling2dSize3Int16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return IgnorePaddingL2Pooling2dSize3TestCommon<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return IgnorePaddingL2Pooling2dSize3TestCommon<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
LayerTestResult<float, 4> AsymmetricNonSquarePooling2dTest(
@@ -1782,14 +1782,14 @@ LayerTestResult<uint8_t, 4> AsymmetricNonSquarePooling2dUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return AsymmetricNonSquarePooling2dTestCommon<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return AsymmetricNonSquarePooling2dTestCommon<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<int16_t, 4> AsymmetricNonSquarePooling2dInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return AsymmetricNonSquarePooling2dTestCommon<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return AsymmetricNonSquarePooling2dTestCommon<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
LayerTestResult<float, 4> ComparePooling2dTest(
@@ -1808,7 +1808,7 @@ LayerTestResult<uint8_t, 4> ComparePooling2dUint8Test(
armnn::IWorkloadFactory& refWorkloadFactory,
armnn::PoolingAlgorithm poolingType)
{
- return ComparePooling2dTestCommon<armnn::DataType::QuantisedAsymm8>(
+ return ComparePooling2dTestCommon<armnn::DataType::QAsymmU8>(
workloadFactory, memoryManager, refWorkloadFactory, poolingType, 0.1f, 128);
}
@@ -1818,6 +1818,6 @@ LayerTestResult<int16_t, 4> ComparePooling2dInt16Test(
armnn::IWorkloadFactory& refWorkloadFactory,
armnn::PoolingAlgorithm poolingType)
{
- return ComparePooling2dTestCommon<armnn::DataType::QuantisedSymm16>(
+ return ComparePooling2dTestCommon<armnn::DataType::QSymmS16>(
workloadFactory, memoryManager, refWorkloadFactory, poolingType);
}
diff --git a/src/backends/backendsCommon/test/layerTests/QuantizeTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/QuantizeTestImpl.cpp
index e23f92a5a9..ab6a35b16f 100644
--- a/src/backends/backendsCommon/test/layerTests/QuantizeTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/QuantizeTestImpl.cpp
@@ -130,14 +130,14 @@ LayerTestResult<uint8_t, 4> QuantizeSimpleUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return QuantizeSimpleTest<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return QuantizeSimpleTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<uint8_t, 4> QuantizeClampUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return QuantizeClampTest<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return QuantizeClampTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<int8_t, 4> QuantizeClampInt8Test(
@@ -151,5 +151,5 @@ LayerTestResult<int16_t, 4> QuantizeClampInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return QuantizeClampTest<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return QuantizeClampTest<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
diff --git a/src/backends/backendsCommon/test/layerTests/ReshapeTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/ReshapeTestImpl.cpp
index 485e7eab80..894ece65a5 100644
--- a/src/backends/backendsCommon/test/layerTests/ReshapeTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/ReshapeTestImpl.cpp
@@ -176,13 +176,13 @@ SimpleReshapeTest<armnn::DataType::Float32>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 4>
-SimpleReshapeTest<armnn::DataType::QuantisedAsymm8>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 4>
+SimpleReshapeTest<armnn::DataType::QAsymmU8>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 4>
-SimpleReshapeTest<armnn::DataType::QuantisedSymm16>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 4>
+SimpleReshapeTest<armnn::DataType::QSymmS16>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
@@ -191,12 +191,12 @@ Reshape5dTest<armnn::DataType::Float32>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 5>
-Reshape5dTest<armnn::DataType::QuantisedAsymm8>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 5>
+Reshape5dTest<armnn::DataType::QAsymmU8>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 5>
-Reshape5dTest<armnn::DataType::QuantisedSymm16>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 5>
+Reshape5dTest<armnn::DataType::QSymmS16>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
diff --git a/src/backends/backendsCommon/test/layerTests/ResizeTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/ResizeTestImpl.cpp
index 080155eebf..0389e82c7a 100644
--- a/src/backends/backendsCommon/test/layerTests/ResizeTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/ResizeTestImpl.cpp
@@ -664,62 +664,62 @@ ResizeNearestNeighborMagTest<armnn::DataType::Float16>(
int32_t outQuantOffset);
// QAsymm8
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 4>
-ResizeBilinearNopTest<armnn::DataType::QuantisedAsymm8>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 4>
+ResizeBilinearNopTest<armnn::DataType::QAsymmU8>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout dataLayout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 4>
-SimpleResizeBilinearTest<armnn::DataType::QuantisedAsymm8>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 4>
+SimpleResizeBilinearTest<armnn::DataType::QAsymmU8>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout dataLayout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 4>
-ResizeBilinearSqMinTest<armnn::DataType::QuantisedAsymm8>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 4>
+ResizeBilinearSqMinTest<armnn::DataType::QAsymmU8>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout dataLayout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 4>
-ResizeBilinearMinTest<armnn::DataType::QuantisedAsymm8>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 4>
+ResizeBilinearMinTest<armnn::DataType::QAsymmU8>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout dataLayout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 4>
-ResizeBilinearMagTest<armnn::DataType::QuantisedAsymm8>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 4>
+ResizeBilinearMagTest<armnn::DataType::QAsymmU8>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout dataLayout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 4>
-ResizeNearestNeighborNopTest<armnn::DataType::QuantisedAsymm8>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 4>
+ResizeNearestNeighborNopTest<armnn::DataType::QAsymmU8>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout dataLayout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 4>
-SimpleResizeNearestNeighborTest<armnn::DataType::QuantisedAsymm8>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 4>
+SimpleResizeNearestNeighborTest<armnn::DataType::QAsymmU8>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout dataLayout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 4>
-ResizeNearestNeighborSqMinTest<armnn::DataType::QuantisedAsymm8>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 4>
+ResizeNearestNeighborSqMinTest<armnn::DataType::QAsymmU8>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout dataLayout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 4>
-ResizeNearestNeighborMinTest<armnn::DataType::QuantisedAsymm8>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 4>
+ResizeNearestNeighborMinTest<armnn::DataType::QAsymmU8>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout dataLayout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 4>
-ResizeNearestNeighborMagTest<armnn::DataType::QuantisedAsymm8>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 4>
+ResizeNearestNeighborMagTest<armnn::DataType::QAsymmU8>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout dataLayout,
@@ -729,62 +729,62 @@ ResizeNearestNeighborMagTest<armnn::DataType::QuantisedAsymm8>(
int32_t outQuantOffset);
// QSymm16
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 4>
-ResizeBilinearNopTest<armnn::DataType::QuantisedSymm16>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 4>
+ResizeBilinearNopTest<armnn::DataType::QSymmS16>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout dataLayout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 4>
-SimpleResizeBilinearTest<armnn::DataType::QuantisedSymm16>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 4>
+SimpleResizeBilinearTest<armnn::DataType::QSymmS16>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout dataLayout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 4>
-ResizeBilinearSqMinTest<armnn::DataType::QuantisedSymm16>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 4>
+ResizeBilinearSqMinTest<armnn::DataType::QSymmS16>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout dataLayout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 4>
-ResizeBilinearMinTest<armnn::DataType::QuantisedSymm16>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 4>
+ResizeBilinearMinTest<armnn::DataType::QSymmS16>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout dataLayout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 4>
-ResizeBilinearMagTest<armnn::DataType::QuantisedSymm16>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 4>
+ResizeBilinearMagTest<armnn::DataType::QSymmS16>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout dataLayout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 4>
-ResizeNearestNeighborNopTest<armnn::DataType::QuantisedSymm16>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 4>
+ResizeNearestNeighborNopTest<armnn::DataType::QSymmS16>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout dataLayout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 4>
-SimpleResizeNearestNeighborTest<armnn::DataType::QuantisedSymm16>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 4>
+SimpleResizeNearestNeighborTest<armnn::DataType::QSymmS16>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout dataLayout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 4>
-ResizeNearestNeighborSqMinTest<armnn::DataType::QuantisedSymm16>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 4>
+ResizeNearestNeighborSqMinTest<armnn::DataType::QSymmS16>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout dataLayout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 4>
-ResizeNearestNeighborMinTest<armnn::DataType::QuantisedSymm16>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 4>
+ResizeNearestNeighborMinTest<armnn::DataType::QSymmS16>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout dataLayout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 4>
-ResizeNearestNeighborMagTest<armnn::DataType::QuantisedSymm16>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 4>
+ResizeNearestNeighborMagTest<armnn::DataType::QSymmS16>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout dataLayout,
diff --git a/src/backends/backendsCommon/test/layerTests/RsqrtTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/RsqrtTestImpl.cpp
index 24a3b21e96..4107e13eb8 100644
--- a/src/backends/backendsCommon/test/layerTests/RsqrtTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/RsqrtTestImpl.cpp
@@ -227,13 +227,13 @@ Rsqrt2dTest<armnn::DataType::Float16>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 2>
-Rsqrt2dTest<armnn::DataType::QuantisedAsymm8>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 2>
+Rsqrt2dTest<armnn::DataType::QAsymmU8>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 2>
-Rsqrt2dTest<armnn::DataType::QuantisedSymm16>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 2>
+Rsqrt2dTest<armnn::DataType::QSymmS16>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
@@ -247,13 +247,13 @@ Rsqrt3dTest<armnn::DataType::Float16>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 3>
-Rsqrt3dTest<armnn::DataType::QuantisedAsymm8>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 3>
+Rsqrt3dTest<armnn::DataType::QAsymmU8>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 3>
-Rsqrt3dTest<armnn::DataType::QuantisedSymm16>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 3>
+Rsqrt3dTest<armnn::DataType::QSymmS16>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
diff --git a/src/backends/backendsCommon/test/layerTests/SliceTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/SliceTestImpl.cpp
index 65b17164f3..fc78074a43 100644
--- a/src/backends/backendsCommon/test/layerTests/SliceTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/SliceTestImpl.cpp
@@ -246,48 +246,48 @@ LayerTestResult<float, 1> Slice1dFloat32Test(armnn::IWorkloadFactory& workloadFa
LayerTestResult<uint8_t, 4> Slice4dUint8Test(armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Slice4dTest<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return Slice4dTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<uint8_t, 3> Slice3dUint8Test(armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Slice3dTest<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return Slice3dTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<uint8_t, 2> Slice2dUint8Test(armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Slice2dTest<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return Slice2dTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<uint8_t, 1> Slice1dUint8Test(armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Slice1dTest<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return Slice1dTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
// Int16 tests
LayerTestResult<int16_t, 4> Slice4dInt16Test(armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Slice4dTest<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return Slice4dTest<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
LayerTestResult<int16_t, 3> Slice3dInt16Test(armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Slice3dTest<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return Slice3dTest<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
LayerTestResult<int16_t, 2> Slice2dInt16Test(armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Slice2dTest<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return Slice2dTest<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
LayerTestResult<int16_t, 1> Slice1dInt16Test(armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return Slice1dTest<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return Slice1dTest<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
diff --git a/src/backends/backendsCommon/test/layerTests/SoftmaxTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/SoftmaxTestImpl.cpp
index 2a1aa76fce..4147cc8516 100644
--- a/src/backends/backendsCommon/test/layerTests/SoftmaxTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/SoftmaxTestImpl.cpp
@@ -602,7 +602,7 @@ LayerTestResult<uint8_t,2> SimpleSoftmaxUint8Test(
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
float beta)
{
- return SimpleSoftmaxTestImpl<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager, beta);
+ return SimpleSoftmaxTestImpl<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager, beta);
}
LayerTestResult<uint8_t,3> Simple3dSoftmaxUint8Test(
@@ -611,7 +611,7 @@ LayerTestResult<uint8_t,3> Simple3dSoftmaxUint8Test(
float beta)
{
Simple3dSoftmaxOutputData data;
- return Simple3dSoftmaxTestImpl<armnn::DataType::QuantisedAsymm8>(
+ return Simple3dSoftmaxTestImpl<armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
beta,
@@ -627,7 +627,7 @@ LayerTestResult<uint8_t,4> Simple4dSoftmaxUint8Test(
{
Simple4dSoftmaxData data;
- return Simple4dSoftmaxTestImpl<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager, beta,
+ return Simple4dSoftmaxTestImpl<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager, beta,
data.inputShape, data.outputData, data.inputData);
}
@@ -664,7 +664,7 @@ LayerTestResult<int16_t,2> SimpleSoftmaxUint16Test(
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
float beta)
{
- return SimpleSoftmaxTestImpl<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager, beta);
+ return SimpleSoftmaxTestImpl<armnn::DataType::QSymmS16>(workloadFactory, memoryManager, beta);
}
LayerTestResult<int16_t,3> Simple3dSoftmaxUint16Test(
@@ -673,7 +673,7 @@ LayerTestResult<int16_t,3> Simple3dSoftmaxUint16Test(
float beta)
{
Simple3dSoftmaxOutputData data;
- return Simple3dSoftmaxTestImpl<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager, beta,
+ return Simple3dSoftmaxTestImpl<armnn::DataType::QSymmS16>(workloadFactory, memoryManager, beta,
data.inputShape, data.outputData, data.inputData);
}
@@ -684,7 +684,7 @@ LayerTestResult<int16_t,4> Simple4dSoftmaxUint16Test(
{
Simple4dSoftmaxData data;
- return Simple4dSoftmaxTestImpl<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager, beta,
+ return Simple4dSoftmaxTestImpl<armnn::DataType::QSymmS16>(workloadFactory, memoryManager, beta,
data.inputShape, data.outputData, data.inputData);
}
@@ -704,6 +704,6 @@ LayerTestResult<uint8_t,2> CompareSoftmaxUint8Test(
armnn::IWorkloadFactory& refWorkloadFactory,
float beta)
{
- return CompareSoftmaxTestImpl<armnn::DataType::QuantisedAsymm8>(
+ return CompareSoftmaxTestImpl<armnn::DataType::QAsymmU8>(
workloadFactory, memoryManager, refWorkloadFactory, beta);
}
diff --git a/src/backends/backendsCommon/test/layerTests/SpaceToBatchNdTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/SpaceToBatchNdTestImpl.cpp
index 2793875c5b..afb4796703 100644
--- a/src/backends/backendsCommon/test/layerTests/SpaceToBatchNdTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/SpaceToBatchNdTestImpl.cpp
@@ -337,28 +337,28 @@ LayerTestResult<uint8_t, 4> SpaceToBatchNdSimpleUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return SpaceToBatchNdSimpleTest<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return SpaceToBatchNdSimpleTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<uint8_t, 4> SpaceToBatchNdMultiChannelsUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return SpaceToBatchNdMultiChannelsTest<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return SpaceToBatchNdMultiChannelsTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<uint8_t, 4> SpaceToBatchNdMultiBlockUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return SpaceToBatchNdMultiBlockTest<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return SpaceToBatchNdMultiBlockTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<uint8_t, 4> SpaceToBatchNdPaddingUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return SpaceToBatchNdPaddingTest<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return SpaceToBatchNdPaddingTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<float, 4> SpaceToBatchNdSimpleNhwcFloat32Test(
@@ -421,82 +421,82 @@ LayerTestResult<uint8_t, 4> SpaceToBatchNdSimpleNhwcUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return SpaceToBatchNdSimpleNhwcTest<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return SpaceToBatchNdSimpleNhwcTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<uint8_t, 4> SpaceToBatchNdMultiChannelsNhwcUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return SpaceToBatchNdMultiChannelsNhwcTest<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return SpaceToBatchNdMultiChannelsNhwcTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<uint8_t, 4> SpaceToBatchNdMultiBlockNhwcUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return SpaceToBatchNdMultiBlockNhwcTest<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return SpaceToBatchNdMultiBlockNhwcTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<uint8_t, 4> SpaceToBatchNdPaddingNhwcUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return SpaceToBatchNdPaddingNhwcTest<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return SpaceToBatchNdPaddingNhwcTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<int16_t, 4> SpaceToBatchNdSimpleUint16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return SpaceToBatchNdSimpleTest<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return SpaceToBatchNdSimpleTest<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
LayerTestResult<int16_t, 4> SpaceToBatchNdMultiChannelsUint16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return SpaceToBatchNdMultiChannelsTest<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return SpaceToBatchNdMultiChannelsTest<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
LayerTestResult<int16_t, 4> SpaceToBatchNdMultiBlockUint16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return SpaceToBatchNdMultiBlockTest<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return SpaceToBatchNdMultiBlockTest<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
LayerTestResult<int16_t, 4> SpaceToBatchNdPaddingUint16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return SpaceToBatchNdPaddingTest<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return SpaceToBatchNdPaddingTest<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
LayerTestResult<int16_t, 4> SpaceToBatchNdSimpleNhwcUint16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return SpaceToBatchNdSimpleNhwcTest<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return SpaceToBatchNdSimpleNhwcTest<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
LayerTestResult<int16_t, 4> SpaceToBatchNdMultiChannelsNhwcUint16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return SpaceToBatchNdMultiChannelsNhwcTest<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return SpaceToBatchNdMultiChannelsNhwcTest<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
LayerTestResult<int16_t, 4> SpaceToBatchNdMultiBlockNhwcUint16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return SpaceToBatchNdMultiBlockNhwcTest<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return SpaceToBatchNdMultiBlockNhwcTest<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
LayerTestResult<int16_t, 4> SpaceToBatchNdPaddingNhwcUint16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return SpaceToBatchNdPaddingNhwcTest<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return SpaceToBatchNdPaddingNhwcTest<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
diff --git a/src/backends/backendsCommon/test/layerTests/SpaceToDepthTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/SpaceToDepthTestImpl.cpp
index b6bf530da3..59e1481ad1 100644
--- a/src/backends/backendsCommon/test/layerTests/SpaceToDepthTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/SpaceToDepthTestImpl.cpp
@@ -159,7 +159,7 @@ LayerTestResult<uint8_t, 4> SpaceToDepthNhwcAsymmQ8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return SpaceToDepthSimpleTest1<armnn::DataType::QuantisedAsymm8>(
+ return SpaceToDepthSimpleTest1<armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager);
}
@@ -168,7 +168,7 @@ LayerTestResult<uint8_t, 4> SpaceToDepthNchwAsymmQ8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return SpaceToDepthSimpleTest1<armnn::DataType::QuantisedAsymm8>(
+ return SpaceToDepthSimpleTest1<armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
armnn::DataLayout::NCHW);
@@ -235,7 +235,7 @@ LayerTestResult<int16_t, 4> SpaceToDepthNhwcQSymm16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return SpaceToDepthSimpleTest2<armnn::DataType::QuantisedSymm16>(
+ return SpaceToDepthSimpleTest2<armnn::DataType::QSymmS16>(
workloadFactory,
memoryManager);
}
@@ -244,7 +244,7 @@ LayerTestResult<int16_t, 4> SpaceToDepthNchwQSymm16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return SpaceToDepthSimpleTest2<armnn::DataType::QuantisedSymm16>(
+ return SpaceToDepthSimpleTest2<armnn::DataType::QSymmS16>(
workloadFactory,
memoryManager,
armnn::DataLayout::NCHW);
diff --git a/src/backends/backendsCommon/test/layerTests/SplitterTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/SplitterTestImpl.cpp
index c8c2f9c7d1..ef81a1dd1d 100644
--- a/src/backends/backendsCommon/test/layerTests/SplitterTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/SplitterTestImpl.cpp
@@ -341,14 +341,14 @@ std::vector<LayerTestResult<uint8_t,3>> SplitterUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return SplitterTestCommon<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager, 1.0f, 0);
+ return SplitterTestCommon<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager, 1.0f, 0);
}
std::vector<LayerTestResult<int16_t,3>> SplitterInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return SplitterTestCommon<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager, 1.0f, 0);
+ return SplitterTestCommon<armnn::DataType::QSymmS16>(workloadFactory, memoryManager, 1.0f, 0);
}
LayerTestResult<float, 3> CopyViaSplitterFloat32Test(
@@ -369,12 +369,12 @@ LayerTestResult<uint8_t, 3> CopyViaSplitterUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return CopyViaSplitterTestImpl<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager, 1.0f, 0);
+ return CopyViaSplitterTestImpl<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager, 1.0f, 0);
}
LayerTestResult<int16_t, 3> CopyViaSplitterInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return CopyViaSplitterTestImpl<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager, 1.0f, 0);
+ return CopyViaSplitterTestImpl<armnn::DataType::QSymmS16>(workloadFactory, memoryManager, 1.0f, 0);
}
diff --git a/src/backends/backendsCommon/test/layerTests/StridedSliceTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/StridedSliceTestImpl.cpp
index 23f5df0df9..c6c330e875 100644
--- a/src/backends/backendsCommon/test/layerTests/StridedSliceTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/StridedSliceTestImpl.cpp
@@ -959,42 +959,42 @@ LayerTestResult<uint8_t, 4> StridedSlice4dUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return StridedSlice4dTest<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return StridedSlice4dTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<uint8_t, 4> StridedSlice4dReverseUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return StridedSlice4dReverseTest<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return StridedSlice4dReverseTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<uint8_t, 4> StridedSliceSimpleStrideUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return StridedSliceSimpleStrideTest<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return StridedSliceSimpleStrideTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<uint8_t, 4> StridedSliceSimpleRangeMaskUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return StridedSliceSimpleRangeMaskTest<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return StridedSliceSimpleRangeMaskTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<uint8_t, 2> StridedSliceShrinkAxisMaskUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return StridedSliceShrinkAxisMaskTest<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return StridedSliceShrinkAxisMaskTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<uint8_t, 2> StridedSliceShrinkAxisMaskBitPosition0Dim3Uint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return StridedSliceShrinkAxisMaskBitPosition0Dim3Test<armnn::DataType::QuantisedAsymm8>(workloadFactory,
+ return StridedSliceShrinkAxisMaskBitPosition0Dim3Test<armnn::DataType::QAsymmU8>(workloadFactory,
memoryManager);
}
@@ -1002,7 +1002,7 @@ LayerTestResult<uint8_t, 3> StridedSliceShrinkAxisMaskBitPosition0Uint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return StridedSliceShrinkAxisMaskBitPosition0Test<armnn::DataType::QuantisedAsymm8>(workloadFactory,
+ return StridedSliceShrinkAxisMaskBitPosition0Test<armnn::DataType::QAsymmU8>(workloadFactory,
memoryManager);
}
@@ -1010,7 +1010,7 @@ LayerTestResult<uint8_t, 3> StridedSliceShrinkAxisMaskBitPosition1Uint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return StridedSliceShrinkAxisMaskBitPosition1Test<armnn::DataType::QuantisedAsymm8>(workloadFactory,
+ return StridedSliceShrinkAxisMaskBitPosition1Test<armnn::DataType::QAsymmU8>(workloadFactory,
memoryManager);
}
@@ -1018,7 +1018,7 @@ LayerTestResult<uint8_t, 3> StridedSliceShrinkAxisMaskBitPosition2Uint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return StridedSliceShrinkAxisMaskBitPosition2Test<armnn::DataType::QuantisedAsymm8>(workloadFactory,
+ return StridedSliceShrinkAxisMaskBitPosition2Test<armnn::DataType::QAsymmU8>(workloadFactory,
memoryManager);
}
@@ -1026,7 +1026,7 @@ LayerTestResult<uint8_t, 3> StridedSliceShrinkAxisMaskBitPosition3Uint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return StridedSliceShrinkAxisMaskBitPosition3Test<armnn::DataType::QuantisedAsymm8>(workloadFactory,
+ return StridedSliceShrinkAxisMaskBitPosition3Test<armnn::DataType::QAsymmU8>(workloadFactory,
memoryManager);
}
@@ -1034,7 +1034,7 @@ LayerTestResult<uint8_t, 2> StridedSliceShrinkAxisMaskBitPosition0And1Uint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return StridedSliceShrinkAxisMaskBitPosition0And1Test<armnn::DataType::QuantisedAsymm8>(workloadFactory,
+ return StridedSliceShrinkAxisMaskBitPosition0And1Test<armnn::DataType::QAsymmU8>(workloadFactory,
memoryManager);
}
@@ -1042,7 +1042,7 @@ LayerTestResult<uint8_t, 2> StridedSliceShrinkAxisMaskBitPosition0And2Uint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return StridedSliceShrinkAxisMaskBitPosition0And2Test<armnn::DataType::QuantisedAsymm8>(workloadFactory,
+ return StridedSliceShrinkAxisMaskBitPosition0And2Test<armnn::DataType::QAsymmU8>(workloadFactory,
memoryManager);
}
@@ -1050,7 +1050,7 @@ LayerTestResult<uint8_t, 2> StridedSliceShrinkAxisMaskBitPosition0And3Uint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return StridedSliceShrinkAxisMaskBitPosition0And3Test<armnn::DataType::QuantisedAsymm8>(workloadFactory,
+ return StridedSliceShrinkAxisMaskBitPosition0And3Test<armnn::DataType::QAsymmU8>(workloadFactory,
memoryManager);
}
@@ -1058,7 +1058,7 @@ LayerTestResult<uint8_t, 1> StridedSliceShrinkAxisMaskBitPosition0And1And3Uint8T
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return StridedSliceShrinkAxisMaskBitPosition0And1And3Test<armnn::DataType::QuantisedAsymm8>(workloadFactory,
+ return StridedSliceShrinkAxisMaskBitPosition0And1And3Test<armnn::DataType::QAsymmU8>(workloadFactory,
memoryManager);
}
@@ -1066,89 +1066,89 @@ LayerTestResult<uint8_t, 3> StridedSlice3dUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return StridedSlice3dTest<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return StridedSlice3dTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<uint8_t, 3> StridedSlice3dReverseUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return StridedSlice3dReverseTest<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return StridedSlice3dReverseTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<uint8_t, 2> StridedSlice2dUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return StridedSlice2dTest<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return StridedSlice2dTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<uint8_t, 2> StridedSlice2dReverseUint8Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return StridedSlice2dReverseTest<armnn::DataType::QuantisedAsymm8>(workloadFactory, memoryManager);
+ return StridedSlice2dReverseTest<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager);
}
LayerTestResult<int16_t, 4> StridedSlice4dInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return StridedSlice4dTest<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return StridedSlice4dTest<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
LayerTestResult<int16_t, 4> StridedSlice4dReverseInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return StridedSlice4dReverseTest<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return StridedSlice4dReverseTest<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
LayerTestResult<int16_t, 4> StridedSliceSimpleStrideInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return StridedSliceSimpleStrideTest<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return StridedSliceSimpleStrideTest<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
LayerTestResult<int16_t, 4> StridedSliceSimpleRangeMaskInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return StridedSliceSimpleRangeMaskTest<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return StridedSliceSimpleRangeMaskTest<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
LayerTestResult<int16_t, 2> StridedSliceShrinkAxisMaskInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return StridedSliceShrinkAxisMaskTest<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return StridedSliceShrinkAxisMaskTest<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
LayerTestResult<int16_t, 3> StridedSlice3dInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return StridedSlice3dTest<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return StridedSlice3dTest<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
LayerTestResult<int16_t, 3> StridedSlice3dReverseInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return StridedSlice3dReverseTest<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return StridedSlice3dReverseTest<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
LayerTestResult<int16_t, 2> StridedSlice2dInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return StridedSlice2dTest<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return StridedSlice2dTest<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
LayerTestResult<int16_t, 2> StridedSlice2dReverseInt16Test(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
{
- return StridedSlice2dReverseTest<armnn::DataType::QuantisedSymm16>(workloadFactory, memoryManager);
+ return StridedSlice2dReverseTest<armnn::DataType::QSymmS16>(workloadFactory, memoryManager);
}
diff --git a/src/backends/backendsCommon/test/layerTests/SubtractionTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/SubtractionTestImpl.cpp
index d180021639..525fb46d56 100644
--- a/src/backends/backendsCommon/test/layerTests/SubtractionTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/SubtractionTestImpl.cpp
@@ -27,7 +27,7 @@ LayerTestResult<uint8_t, 4> SubtractionUint8Test(
std::vector<uint8_t> input1 = { 1, 2, 1, 2 };
std::vector<uint8_t> output = { 3, 3, 5, 5 };
- return ElementwiseTestHelper<4, armnn::SubtractionQueueDescriptor, armnn::DataType::QuantisedAsymm8>(
+ return ElementwiseTestHelper<4, armnn::SubtractionQueueDescriptor, armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
shape0,
@@ -57,7 +57,7 @@ LayerTestResult<uint8_t, 4> SubtractionBroadcast1ElementUint8Test(
std::vector<uint8_t> output = { 5, 6, 7, 8 };
- return ElementwiseTestHelper<4, armnn::SubtractionQueueDescriptor, armnn::DataType::QuantisedAsymm8>(
+ return ElementwiseTestHelper<4, armnn::SubtractionQueueDescriptor, armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
shape0,
@@ -87,7 +87,7 @@ LayerTestResult<uint8_t, 4> SubtractionBroadcastUint8Test(
std::vector<uint8_t> output = { 8, 11, 12, 15 };
- return ElementwiseTestHelper<4, armnn::SubtractionQueueDescriptor, armnn::DataType::QuantisedAsymm8>(
+ return ElementwiseTestHelper<4, armnn::SubtractionQueueDescriptor, armnn::DataType::QAsymmU8>(
workloadFactory,
memoryManager,
shape0,
@@ -254,7 +254,7 @@ LayerTestResult<int16_t, 4> SubtractionInt16Test(
std::vector<int16_t> input1 = { 1, 2, 1, 2 };
std::vector<int16_t> output = { 3, 3, 5, 5 };
- return ElementwiseTestHelper<4, armnn::SubtractionQueueDescriptor, armnn::DataType::QuantisedSymm16>(
+ return ElementwiseTestHelper<4, armnn::SubtractionQueueDescriptor, armnn::DataType::QSymmS16>(
workloadFactory,
memoryManager,
shape,
@@ -284,7 +284,7 @@ LayerTestResult<int16_t, 4> SubtractionBroadcast1ElementInt16Test(
std::vector<int16_t> output = { 3, 4, 5, 6 };
- return ElementwiseTestHelper<4, armnn::SubtractionQueueDescriptor, armnn::DataType::QuantisedSymm16>(
+ return ElementwiseTestHelper<4, armnn::SubtractionQueueDescriptor, armnn::DataType::QSymmS16>(
workloadFactory,
memoryManager,
shape0,
@@ -314,7 +314,7 @@ LayerTestResult<int16_t, 4> SubtractionBroadcastInt16Test(
std::vector<int16_t> output = { 8, 11, 12, 15 };
- return ElementwiseTestHelper<4, armnn::SubtractionQueueDescriptor, armnn::DataType::QuantisedSymm16>(
+ return ElementwiseTestHelper<4, armnn::SubtractionQueueDescriptor, armnn::DataType::QSymmS16>(
workloadFactory,
memoryManager,
shape0,
diff --git a/src/backends/backendsCommon/test/layerTests/TransposeConvolution2dTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/TransposeConvolution2dTestImpl.cpp
index 4b4894f4d2..3ac25f0534 100644
--- a/src/backends/backendsCommon/test/layerTests/TransposeConvolution2dTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/TransposeConvolution2dTestImpl.cpp
@@ -566,7 +566,7 @@ LayerTestResult<uint8_t, 4> TransposeConvolution2dPerAxisQuantTest(
{
using namespace armnn;
- const DataType inputType = DataType::QuantisedAsymm8;
+ const DataType inputType = DataType::QAsymmU8;
const DataType kernelType = DataType::QuantizedSymm8PerAxis;
const DataType biasType = DataType::Signed32;
@@ -672,15 +672,15 @@ SimpleTransposeConvolution2dTest<armnn::DataType::Float32, armnn::DataType::Floa
bool biasEnabled,
const armnn::DataLayout layout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 4>
-SimpleTransposeConvolution2dTest<armnn::DataType::QuantisedAsymm8, armnn::DataType::Signed32>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 4>
+SimpleTransposeConvolution2dTest<armnn::DataType::QAsymmU8, armnn::DataType::Signed32>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
bool biasEnabled,
const armnn::DataLayout layout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 4>
-SimpleTransposeConvolution2dTest<armnn::DataType::QuantisedSymm16, armnn::DataType::Signed32>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 4>
+SimpleTransposeConvolution2dTest<armnn::DataType::QSymmS16, armnn::DataType::Signed32>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
bool biasEnabled,
@@ -693,15 +693,15 @@ PaddedTransposeConvolution2dTest<armnn::DataType::Float32, armnn::DataType::Floa
bool biasEnabled,
const armnn::DataLayout layout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 4>
-PaddedTransposeConvolution2dTest<armnn::DataType::QuantisedAsymm8, armnn::DataType::Signed32>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 4>
+PaddedTransposeConvolution2dTest<armnn::DataType::QAsymmU8, armnn::DataType::Signed32>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
bool biasEnabled,
const armnn::DataLayout layout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 4>
-PaddedTransposeConvolution2dTest<armnn::DataType::QuantisedSymm16, armnn::DataType::Signed32>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 4>
+PaddedTransposeConvolution2dTest<armnn::DataType::QSymmS16, armnn::DataType::Signed32>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
bool biasEnabled,
@@ -714,15 +714,15 @@ StridedTransposeConvolution2dTest<armnn::DataType::Float32, armnn::DataType::Flo
bool biasEnabled,
const armnn::DataLayout layout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 4>
-StridedTransposeConvolution2dTest<armnn::DataType::QuantisedAsymm8, armnn::DataType::Signed32>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 4>
+StridedTransposeConvolution2dTest<armnn::DataType::QAsymmU8, armnn::DataType::Signed32>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
bool biasEnabled,
const armnn::DataLayout layout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 4>
-StridedTransposeConvolution2dTest<armnn::DataType::QuantisedSymm16, armnn::DataType::Signed32>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 4>
+StridedTransposeConvolution2dTest<armnn::DataType::QSymmS16, armnn::DataType::Signed32>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
bool biasEnabled,
@@ -734,14 +734,14 @@ MultiChannelTransposeConvolution2dTest<armnn::DataType::Float32, armnn::DataType
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout layout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedAsymm8>, 4>
-MultiChannelTransposeConvolution2dTest<armnn::DataType::QuantisedAsymm8, armnn::DataType::Signed32>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 4>
+MultiChannelTransposeConvolution2dTest<armnn::DataType::QAsymmU8, armnn::DataType::Signed32>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout layout);
-template LayerTestResult<armnn::ResolveType<armnn::DataType::QuantisedSymm16>, 4>
-MultiChannelTransposeConvolution2dTest<armnn::DataType::QuantisedSymm16, armnn::DataType::Signed32>(
+template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 4>
+MultiChannelTransposeConvolution2dTest<armnn::DataType::QSymmS16, armnn::DataType::Signed32>(
armnn::IWorkloadFactory& workloadFactory,
const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
const armnn::DataLayout layout);