From bee4bc944aa50782ff22cb4a31fbc611212a5e89 Mon Sep 17 00:00:00 2001 From: Francis Murtagh Date: Tue, 18 Jun 2019 12:30:37 +0100 Subject: IVGCVSW-3299 Add Uint8 Support to Model Accuracy Checker * Seperate ExecuteNetwork main function into standalone application * Include NetworkExecutionUtils header and remove duplicate functions * Add uint8 and int32 support to ModelAccuracyChecker Change-Id: I5fb4bc147232f8388f37eea7db5130b04fd215d1 Signed-off-by: Francis Murtagh --- .../ModelAccuracyTool-Armnn.cpp | 108 ++++++++------------- 1 file changed, 43 insertions(+), 65 deletions(-) (limited to 'tests/ModelAccuracyTool-Armnn') diff --git a/tests/ModelAccuracyTool-Armnn/ModelAccuracyTool-Armnn.cpp b/tests/ModelAccuracyTool-Armnn/ModelAccuracyTool-Armnn.cpp index 3abfb3c2ec..aec4d70271 100644 --- a/tests/ModelAccuracyTool-Armnn/ModelAccuracyTool-Armnn.cpp +++ b/tests/ModelAccuracyTool-Armnn/ModelAccuracyTool-Armnn.cpp @@ -4,9 +4,9 @@ // #include "ModelAccuracyChecker.hpp" -#include "../InferenceTest.hpp" #include "../ImagePreprocessor.hpp" #include "armnnDeserializer/IDeserializer.hpp" +#include "../NetworkExecutionUtils/NetworkExecutionUtils.hpp" #include #include @@ -14,70 +14,8 @@ using namespace armnn::test; -namespace po = boost::program_options; - -bool CheckOption(const po::variables_map& vm, - const char* option) -{ - // Check that the given option is valid. - if (option == nullptr) - { - return false; - } - - // Check whether 'option' is provided. - return vm.find(option) != vm.end(); -} - -template -std::vector ParseArrayImpl(std::istream& stream, TParseElementFunc parseElementFunc, const char * chars = "\t ,:") -{ - std::vector result; - // Processes line-by-line. - std::string line; - while (std::getline(stream, line)) - { - std::vector tokens; - try - { - // Coverity fix: boost::split() may throw an exception of type boost::bad_function_call. - boost::split(tokens, line, boost::algorithm::is_any_of(chars), boost::token_compress_on); - } - catch (const std::exception& e) - { - BOOST_LOG_TRIVIAL(error) << "An error occurred when splitting tokens: " << e.what(); - continue; - } - for (const std::string& token : tokens) - { - if (!token.empty()) // See https://stackoverflow.com/questions/10437406/ - { - try - { - result.push_back(parseElementFunc(token)); - } - catch (const std::exception&) - { - BOOST_LOG_TRIVIAL(error) << "'" << token << "' is not a valid number. It has been ignored."; - } - } - } - } - - return result; -} - map LoadValidationLabels(const string & validationLabelPath); -template -auto ParseDataArray(std::istream & stream); - -template<> -auto ParseDataArray(std::istream & stream) -{ - return ParseArrayImpl(stream, [](const std::string& s) { return std::stof(s); }); -} - int main(int argc, char* argv[]) { try @@ -94,6 +32,7 @@ int main(int argc, char* argv[]) std::vector defaultBackends = {armnn::Compute::CpuAcc, armnn::Compute::CpuRef}; std::string modelPath; std::string dataDir; + std::string inputType = "float"; std::string inputName; std::string outputName; std::string validationLabelPath; @@ -112,6 +51,9 @@ int main(int argc, char* argv[]) backendsMessage.c_str()) ("data-dir,d", po::value(&dataDir)->required(), "Path to directory containing the ImageNet test data") + ("input-type,y", po::value(&inputType), "The data type of the input tensors." + "If unset, defaults to \"float\" for all defined inputs. " + "Accepted values (float, int or qasymm8)") ("input-name,i", po::value(&inputName)->required(), "Identifier of the input tensors in the network separated by comma.") ("output-name,o", po::value(&outputName)->required(), @@ -217,14 +159,50 @@ int main(int argc, char* argv[]) if(ValidateDirectory(dataDir)) { + InferenceModel::Params params; + params.m_ModelPath = modelPath; + params.m_IsModelBinary = true; + params.m_ComputeDevices = computeDevice; + params.m_InputBindings.push_back(inputName); + params.m_OutputBindings.push_back(outputName); + + using TParser = armnnDeserializer::IDeserializer; + InferenceModel model(params, false); for (auto & imageEntry : boost::make_iterator_range(directory_iterator(pathToDataDir), {})) { cout << "Processing image: " << imageEntry << "\n"; std::ifstream inputTensorFile(imageEntry.path().string()); vector inputDataContainers; - inputDataContainers.push_back(ParseDataArray(inputTensorFile)); - vector outputDataContainers = {vector(1001)}; + vector outputDataContainers; + + if (inputType.compare("float") == 0) + { + inputDataContainers.push_back( + ParseDataArray(inputTensorFile)); + outputDataContainers = {vector(1001)}; + } + else if (inputType.compare("int") == 0) + { + inputDataContainers.push_back( + ParseDataArray(inputTensorFile)); + outputDataContainers = {vector(1001)}; + } + else if (inputType.compare("qasymm8") == 0) + { + auto inputBinding = model.GetInputBindingInfo(); + inputDataContainers.push_back( + ParseDataArray( + inputTensorFile, + inputBinding.second.GetQuantizationScale(), + inputBinding.second.GetQuantizationOffset())); + outputDataContainers = {vector(1001)}; + } + else + { + BOOST_LOG_TRIVIAL(fatal) << "Unsupported tensor data type \"" << inputType << "\". "; + return EXIT_FAILURE; + } status = runtime->EnqueueWorkload(networkId, armnnUtils::MakeInputTensors(inputBindings, inputDataContainers), -- cgit v1.2.1