aboutsummaryrefslogtreecommitdiff
path: root/tests/NetworkExecutionUtils/NetworkExecutionUtils.cpp
diff options
context:
space:
mode:
authorFinn Williams <Finn.Williams@arm.com>2021-02-22 15:13:12 +0000
committerJim Flynn <jim.flynn@arm.com>2021-03-03 17:06:43 +0000
commitf806c4d075814a9dc9d206a4db123d3060ad7ebd (patch)
treea110c106598a6830d0862526742314f5048b8acb /tests/NetworkExecutionUtils/NetworkExecutionUtils.cpp
parent82c59d75ef0f191887fae1cc2864bbf4b37ac0c5 (diff)
downloadarmnn-experimental/abi-tests.tar.gz
IVGCVSW-5612 Fix tiny_wav2letter_relu_fixed_int8 delegate outputexperimental/abi-tests
* fix delegate perchannel quantization * change delegate to check reshape options before inputs * Add int8 "qsymms8" option to ExecuteNetwork * Add option to run ExecuteNetwork on tflite w/o delegate !referencetests:301301 Signed-off-by: Finn Williams <Finn.Williams@arm.com> Change-Id: If3e12599b17aff1199d7ab0a55e1c901e480083d
Diffstat (limited to 'tests/NetworkExecutionUtils/NetworkExecutionUtils.cpp')
-rw-r--r--tests/NetworkExecutionUtils/NetworkExecutionUtils.cpp28
1 files changed, 27 insertions, 1 deletions
diff --git a/tests/NetworkExecutionUtils/NetworkExecutionUtils.cpp b/tests/NetworkExecutionUtils/NetworkExecutionUtils.cpp
index 2afd941636..d902d23d86 100644
--- a/tests/NetworkExecutionUtils/NetworkExecutionUtils.cpp
+++ b/tests/NetworkExecutionUtils/NetworkExecutionUtils.cpp
@@ -52,6 +52,16 @@ auto ParseDataArray<armnn::DataType::QAsymmU8>(std::istream& stream)
[](const std::string& s) { return armnn::numeric_cast<uint8_t>(std::stoi(s)); });
}
+
+template<>
+auto ParseDataArray<armnn::DataType::QSymmS8>(std::istream& stream)
+{
+ return ParseArrayImpl<int8_t>(stream,
+ [](const std::string& s) { return armnn::numeric_cast<int8_t>(std::stoi(s)); });
+}
+
+
+
template<>
auto ParseDataArray<armnn::DataType::QAsymmU8>(std::istream& stream,
const float& quantizationScale,
@@ -130,6 +140,15 @@ void TensorPrinter::operator()(const std::vector<uint8_t>& values)
}
}
+void TensorPrinter::operator()(const std::vector<int8_t>& values)
+{
+ ForEachValue(values, [](int8_t value)
+ {
+ printf("%d ", value);
+ });
+ WriteToFile(values);
+}
+
void TensorPrinter::operator()(const std::vector<int>& values)
{
ForEachValue(values, [](int value)
@@ -170,7 +189,8 @@ void TensorPrinter::WriteToFile(const std::vector<T>& values)
}
}
-using TContainer = mapbox::util::variant<std::vector<float>, std::vector<int>, std::vector<unsigned char>>;
+using TContainer =
+ mapbox::util::variant<std::vector<float>, std::vector<int>, std::vector<unsigned char>, std::vector<int8_t>>;
using QuantizationParams = std::pair<float, int32_t>;
void PopulateTensorWithData(TContainer& tensorData,
@@ -212,6 +232,12 @@ void PopulateTensorWithData(TContainer& tensorData,
ParseDataArray<armnn::DataType::Signed32>(inputTensorFile) :
GenerateDummyTensorData<armnn::DataType::Signed32>(numElements);
}
+ else if (dataTypeStr.compare("qsymms8") == 0)
+ {
+ tensorData = readFromFile ?
+ ParseDataArray<armnn::DataType::QSymmS8>(inputTensorFile) :
+ GenerateDummyTensorData<armnn::DataType::QSymmS8>(numElements);
+ }
else if (dataTypeStr.compare("qasymm8") == 0)
{
tensorData = readFromFile ?