From 4f55a25217f679205bd39587a26f2a2d1866cb67 Mon Sep 17 00:00:00 2001 From: Finn Williams Date: Fri, 20 Nov 2020 13:57:53 +0000 Subject: IVGCVSW-5559 Add int8_t to tflite delegate on ExecuteNetwork Signed-off-by: Finn Williams Signed-off-by: Kevin May Change-Id: I56afc73d48848bc40842692831c05316484757a4 --- tests/ExecuteNetwork/ExecuteNetwork.cpp | 98 ++++++++++++---------- .../NetworkExecutionUtils.cpp | 30 ------- .../NetworkExecutionUtils.hpp | 52 +++++++++++- 3 files changed, 106 insertions(+), 74 deletions(-) diff --git a/tests/ExecuteNetwork/ExecuteNetwork.cpp b/tests/ExecuteNetwork/ExecuteNetwork.cpp index ba7ce29cd7..be341b670a 100644 --- a/tests/ExecuteNetwork/ExecuteNetwork.cpp +++ b/tests/ExecuteNetwork/ExecuteNetwork.cpp @@ -88,57 +88,50 @@ int TfLiteDelegateMainImpl(const ExecuteNetworkParams& params, if (params.m_InputTypes[inputIndex].compare("float") == 0) { auto inputData = tfLiteInterpreter->typed_tensor(input); - TContainer tensorData; - PopulateTensorWithData(tensorData, - params.m_InputTensorShapes[inputIndex]->GetNumElements(), - params.m_InputTypes[inputIndex], - armnn::EmptyOptional(), - dataFile); - - mapbox::util::apply_visitor([&](auto&& value) - { - for (unsigned int i = 0; i < inputSize; ++i) - { - inputData[i] = value.data()[i]; - } - }, - tensorData); + std::vector tensorData; + PopulateTensorWithDataGeneric(tensorData, + params.m_InputTensorShapes[inputIndex]->GetNumElements(), + dataFile, + [](const std::string& s) + { return std::stof(s); }); + + std::copy(tensorData.begin(), tensorData.end(), inputData); + } + else if (params.m_InputTypes[inputIndex].compare("int8") == 0) + { + auto inputData = tfLiteInterpreter->typed_tensor(input); + std::vector tensorData; + PopulateTensorWithDataGeneric(tensorData, + params.m_InputTensorShapes[inputIndex]->GetNumElements(), + dataFile, + [](const std::string& s) + { return armnn::numeric_cast(std::stoi(s)); }); + + std::copy(tensorData.begin(), tensorData.end(), inputData); } else if (params.m_InputTypes[inputIndex].compare("int") == 0) { auto inputData = tfLiteInterpreter->typed_tensor(input); - TContainer tensorData; - PopulateTensorWithData(tensorData, - params.m_InputTensorShapes[inputIndex]->GetNumElements(), - params.m_InputTypes[inputIndex], - armnn::EmptyOptional(), - dataFile); - mapbox::util::apply_visitor([&](auto&& value) - { - for (unsigned int i = 0; i < inputSize; ++i) - { - inputData[i] = value.data()[i]; - } - }, - tensorData); + std::vector tensorData; + PopulateTensorWithDataGeneric(tensorData, + params.m_InputTensorShapes[inputIndex]->GetNumElements(), + dataFile, + [](const std::string& s) + { return std::stoi(s); }); + + std::copy(tensorData.begin(), tensorData.end(), inputData); } else if (params.m_InputTypes[inputIndex].compare("qasymm8") == 0) { auto inputData = tfLiteInterpreter->typed_tensor(input); - TContainer tensorData; - PopulateTensorWithData(tensorData, - params.m_InputTensorShapes[inputIndex]->GetNumElements(), - params.m_InputTypes[inputIndex], - armnn::EmptyOptional(), - dataFile); - mapbox::util::apply_visitor([&](auto&& value) - { - for (unsigned int i = 0; i < inputSize; ++i) - { - inputData[i] = value.data()[i]; - } - }, - tensorData); + std::vector tensorData; + PopulateTensorWithDataGeneric(tensorData, + params.m_InputTensorShapes[inputIndex]->GetNumElements(), + dataFile, + [](const std::string& s) + { return armnn::numeric_cast(std::stoi(s)); }); + + std::copy(tensorData.begin(), tensorData.end(), inputData); } else { @@ -203,6 +196,25 @@ int TfLiteDelegateMainImpl(const ExecuteNetworkParams& params, } } } + else if (params.m_OutputTypes[outputIndex].compare("int8") == 0) + { + auto tfLiteDelageOutputData = tfLiteInterpreter->typed_tensor(tfLiteDelegateOutputId); + if(tfLiteDelageOutputData == NULL) + { + ARMNN_LOG(fatal) << "Output tensor is null, output type: " + "\"" << params.m_OutputTypes[outputIndex] << "\" may be incorrect."; + return EXIT_FAILURE; + } + + for (int i = 0; i < outputSize; ++i) + { + std::cout << signed(tfLiteDelageOutputData[i]) << ", "; + if (i % 60 == 0) + { + std::cout << std::endl; + } + } + } else if (params.m_OutputTypes[outputIndex].compare("qasymm8") == 0) { auto tfLiteDelageOutputData = tfLiteInterpreter->typed_tensor(tfLiteDelegateOutputId); diff --git a/tests/NetworkExecutionUtils/NetworkExecutionUtils.cpp b/tests/NetworkExecutionUtils/NetworkExecutionUtils.cpp index 3e7c87d653..2afd941636 100644 --- a/tests/NetworkExecutionUtils/NetworkExecutionUtils.cpp +++ b/tests/NetworkExecutionUtils/NetworkExecutionUtils.cpp @@ -25,36 +25,6 @@ #include "armnnOnnxParser/IOnnxParser.hpp" #endif - -template -std::vector ParseArrayImpl(std::istream& stream, TParseElementFunc parseElementFunc, const char* chars = "\t ,:") -{ - std::vector result; - // Processes line-by-line. - std::string line; - while (std::getline(stream, line)) - { - std::vector tokens = armnn::stringUtils::StringTokenizer(line, chars); - for (const std::string& token : tokens) - { - if (!token.empty()) // See https://stackoverflow.com/questions/10437406/ - { - try - { - result.push_back(parseElementFunc(token)); - } - catch (const std::exception&) - { - ARMNN_LOG(error) << "'" << token << "' is not a valid number. It has been ignored."; - } - } - } - } - - return result; -} - - template auto ParseDataArray(std::istream& stream); diff --git a/tests/NetworkExecutionUtils/NetworkExecutionUtils.hpp b/tests/NetworkExecutionUtils/NetworkExecutionUtils.hpp index 9d9e616e98..742f968a7a 100644 --- a/tests/NetworkExecutionUtils/NetworkExecutionUtils.hpp +++ b/tests/NetworkExecutionUtils/NetworkExecutionUtils.hpp @@ -7,10 +7,13 @@ #include #include +#include +#include #include #include +#include std::vector ParseArray(std::istream& stream); @@ -68,4 +71,51 @@ bool ValidatePath(const std::string& file, const bool expectFile); * @param expectFile bool - If true, checks for a regular file. * @return bool - True if all given strings are valid paths., false otherwise. * */ -bool ValidatePaths(const std::vector& fileVec, const bool expectFile); \ No newline at end of file +bool ValidatePaths(const std::vector& fileVec, const bool expectFile); + +template +std::vector ParseArrayImpl(std::istream& stream, TParseElementFunc parseElementFunc, const char* chars = "\t ,:") +{ + std::vector result; + // Processes line-by-line. + std::string line; + while (std::getline(stream, line)) + { + std::vector tokens = armnn::stringUtils::StringTokenizer(line, chars); + for (const std::string& token : tokens) + { + if (!token.empty()) // See https://stackoverflow.com/questions/10437406/ + { + try + { + result.push_back(parseElementFunc(token)); + } + catch (const std::exception&) + { + ARMNN_LOG(error) << "'" << token << "' is not a valid number. It has been ignored."; + } + } + } + } + + return result; +} + +template +void PopulateTensorWithDataGeneric(std::vector& tensorData, + unsigned int numElements, + const armnn::Optional& dataFile, + TParseElementFunc parseFunction) +{ + const bool readFromFile = dataFile.has_value() && !dataFile.value().empty(); + + std::ifstream inputTensorFile; + if (readFromFile) + { + inputTensorFile = std::ifstream(dataFile.value()); + } + + tensorData = readFromFile ? + ParseArrayImpl(inputTensorFile, parseFunction) : + std::vector(numElements, static_cast(0)); +} -- cgit v1.2.1