From e7ba17ec386ac5e6ddca02f49e9b8e06faa95d4e Mon Sep 17 00:00:00 2001 From: Matthew Sloyan Date: Tue, 6 Oct 2020 10:03:21 +0100 Subject: IVGCVSW-5286 Switch tests/ModelAccuracyTool-Armnn over to cxxopts * Fixed two code errors which were causing compiler issues. * Added support for multiple input and output tensors. Signed-off-by: Matthew Sloyan Change-Id: I298e4bf52a0b41349adab60295e0f22da3cc057b --- .../ModelAccuracyTool-Armnn.cpp | 197 ++++++++++++--------- 1 file changed, 114 insertions(+), 83 deletions(-) (limited to 'tests/ModelAccuracyTool-Armnn/ModelAccuracyTool-Armnn.cpp') diff --git a/tests/ModelAccuracyTool-Armnn/ModelAccuracyTool-Armnn.cpp b/tests/ModelAccuracyTool-Armnn/ModelAccuracyTool-Armnn.cpp index dccf5ffdcd..f8337a5286 100644 --- a/tests/ModelAccuracyTool-Armnn/ModelAccuracyTool-Armnn.cpp +++ b/tests/ModelAccuracyTool-Armnn/ModelAccuracyTool-Armnn.cpp @@ -9,7 +9,7 @@ #include "armnnDeserializer/IDeserializer.hpp" #include -#include +#include #include using namespace armnn::test; @@ -51,55 +51,88 @@ int main(int argc, char* argv[]) armnn::LogSeverity level = armnn::LogSeverity::Debug; armnn::ConfigureLogging(true, true, level); - // Set-up program Options - namespace po = boost::program_options; - - std::vector computeDevice; - std::vector defaultBackends = {armnn::Compute::CpuAcc, armnn::Compute::CpuRef}; std::string modelPath; std::string modelFormat; + std::vector inputNames; + std::vector outputNames; std::string dataDir; - std::string inputName; - std::string inputLayout; - std::string outputName; std::string modelOutputLabelsPath; std::string validationLabelPath; + std::string inputLayout; + std::vector computeDevice; std::string validationRange; std::string blacklistPath; const std::string backendsMessage = "Which device to run layers on by default. Possible choices: " + armnn::BackendRegistryInstance().GetBackendIdsAsString(); - po::options_description desc("Options"); try { - // Adds generic options needed to run Accuracy Tool. - desc.add_options() - ("help,h", "Display help messages") - ("model-path,m", po::value(&modelPath)->required(), "Path to armnn format model file") - ("model-format,f", po::value(&modelFormat)->required(), - "The model format. Supported values: caffe, tensorflow, tflite") - ("input-name,i", po::value(&inputName)->required(), - "Identifier of the input tensors in the network separated by comma.") - ("output-name,o", po::value(&outputName)->required(), - "Identifier of the output tensors in the network separated by comma.") - ("data-dir,d", po::value(&dataDir)->required(), - "Path to directory containing the ImageNet test data") - ("model-output-labels,p", po::value(&modelOutputLabelsPath)->required(), - "Path to model output labels file.") - ("validation-labels-path,v", po::value(&validationLabelPath)->required(), - "Path to ImageNet Validation Label file") - ("data-layout,l", po::value(&inputLayout)->default_value("NHWC"), - "Data layout. Supported value: NHWC, NCHW. Default: NHWC") - ("compute,c", po::value>(&computeDevice)->default_value(defaultBackends), - backendsMessage.c_str()) - ("validation-range,r", po::value(&validationRange)->default_value("1:0"), - "The range of the images to be evaluated. Specified in the form :." - "The index starts at 1 and the range is inclusive." - "By default the evaluation will be performed on all images.") - ("blacklist-path,b", po::value(&blacklistPath)->default_value(""), - "Path to a blacklist file where each line denotes the index of an image to be " - "excluded from evaluation."); + cxxopts::Options options("ModeAccuracyTool-Armnn","Options"); + + options.add_options() + ("h,help", "Display help messages") + ("m,model-path", + "Path to armnn format model file", + cxxopts::value(modelPath)) + ("f,model-format", + "The model format. Supported values: caffe, tensorflow, tflite", + cxxopts::value(modelFormat)) + ("i,input-name", + "Identifier of the input tensors in the network separated by comma with no space.", + cxxopts::value>(inputNames)) + ("o,output-name", + "Identifier of the output tensors in the network separated by comma with no space.", + cxxopts::value>(outputNames)) + ("d,data-dir", + "Path to directory containing the ImageNet test data", + cxxopts::value(dataDir)) + ("p,model-output-labels", + "Path to model output labels file.", + cxxopts::value(modelOutputLabelsPath)) + ("v,validation-labels-path", + "Path to ImageNet Validation Label file", + cxxopts::value(validationLabelPath)) + ("l,data-layout", + "Data layout. Supported value: NHWC, NCHW. Default: NHWC", + cxxopts::value(inputLayout)->default_value("NHWC")) + ("c,compute", + backendsMessage.c_str(), + cxxopts::value>(computeDevice)->default_value("CpuAcc,CpuRef")) + ("r,validation-range", + "The range of the images to be evaluated. Specified in the form :." + "The index starts at 1 and the range is inclusive." + "By default the evaluation will be performed on all images.", + cxxopts::value(validationRange)->default_value("1:0")) + ("b,blacklist-path", + "Path to a blacklist file where each line denotes the index of an image to be " + "excluded from evaluation.", + cxxopts::value(blacklistPath)->default_value("")); + + auto result = options.parse(argc, argv); + + if (result.count("help") > 0) + { + std::cout << options.help() << std::endl; + return EXIT_FAILURE; + } + + // Check for mandatory single options. + std::string mandatorySingleParameters[] = { "model-path", "model-format", "input-name", "output-name", + "data-dir", "model-output-labels", "validation-labels-path" }; + for (auto param : mandatorySingleParameters) + { + if (result.count(param) != 1) + { + std::cerr << "Parameter \'--" << param << "\' is required but missing." << std::endl; + return EXIT_FAILURE; + } + } + } + catch (const cxxopts::OptionException& e) + { + std::cerr << e.what() << std::endl << std::endl; + return EXIT_FAILURE; } catch (const std::exception& e) { @@ -108,26 +141,7 @@ int main(int argc, char* argv[]) // They really won't in any of these cases. ARMNN_ASSERT_MSG(false, "Caught unexpected exception"); std::cerr << "Fatal internal error: " << e.what() << std::endl; - return 1; - } - - po::variables_map vm; - try - { - po::store(po::parse_command_line(argc, argv, desc), vm); - - if (vm.count("help")) - { - std::cout << desc << std::endl; - return 1; - } - po::notify(vm); - } - catch (po::error& e) - { - std::cerr << e.what() << std::endl << std::endl; - std::cerr << desc << std::endl; - return 1; + return EXIT_FAILURE; } // Check if the requested backend are all valid @@ -163,7 +177,7 @@ int main(int argc, char* argv[]) std::stringstream message; message << "armnn::Exception (" << e.what() << ") caught from optimize."; ARMNN_LOG(fatal) << message.str(); - return 1; + return EXIT_FAILURE; } // Loads the network into the runtime. @@ -172,25 +186,34 @@ int main(int argc, char* argv[]) if (status == armnn::Status::Failure) { ARMNN_LOG(fatal) << "armnn::IRuntime: Failed to load network"; - return 1; + return EXIT_FAILURE; } // Set up Network using BindingPointInfo = InferenceModelInternal::BindingPointInfo; - const armnnDeserializer::BindingPointInfo& - inputBindingInfo = armnnparser->GetNetworkInputBindingInfo(0, inputName); + // Handle inputNames and outputNames, there can be multiple. + std::vector inputBindings; + for(auto& input: inputNames) + { + const armnnDeserializer::BindingPointInfo& + inputBindingInfo = armnnparser->GetNetworkInputBindingInfo(0, input); - std::pair - m_InputBindingInfo(inputBindingInfo.m_BindingId, inputBindingInfo.m_TensorInfo); - std::vector inputBindings = { m_InputBindingInfo }; + std::pair + m_InputBindingInfo(inputBindingInfo.m_BindingId, inputBindingInfo.m_TensorInfo); + inputBindings.push_back(m_InputBindingInfo); + } - const armnnDeserializer::BindingPointInfo& - outputBindingInfo = armnnparser->GetNetworkOutputBindingInfo(0, outputName); + std::vector outputBindings; + for(auto& output: outputNames) + { + const armnnDeserializer::BindingPointInfo& + outputBindingInfo = armnnparser->GetNetworkOutputBindingInfo(0, output); - std::pair - m_OutputBindingInfo(outputBindingInfo.m_BindingId, outputBindingInfo.m_TensorInfo); - std::vector outputBindings = { m_OutputBindingInfo }; + std::pair + m_OutputBindingInfo(outputBindingInfo.m_BindingId, outputBindingInfo.m_TensorInfo); + outputBindings.push_back(m_OutputBindingInfo); + } // Load model output labels if (modelOutputLabelsPath.empty() || !fs::exists(modelOutputLabelsPath) || @@ -208,7 +231,7 @@ int main(int argc, char* argv[]) if (imageIndexStrs.size() != 2) { ARMNN_LOG(fatal) << "Invalid validation range specification: Invalid format " << validationRange; - return 1; + return EXIT_FAILURE; } try { @@ -218,7 +241,7 @@ int main(int argc, char* argv[]) catch (const std::exception& e) { ARMNN_LOG(fatal) << "Invalid validation range specification: " << validationRange; - return 1; + return EXIT_FAILURE; } // Validate blacklist file if it's specified @@ -226,10 +249,10 @@ int main(int argc, char* argv[]) !(fs::exists(blacklistPath) && fs::is_regular_file(blacklistPath))) { ARMNN_LOG(fatal) << "Invalid path to blacklist file at " << blacklistPath; - return 1; + return EXIT_FAILURE; } - path pathToDataDir(dataDir); + fs::path pathToDataDir(dataDir); const map imageNameToLabel = LoadValidationImageFilenamesAndLabels( validationLabelPath, pathToDataDir.string(), imageBegIndex, imageEndIndex, blacklistPath); armnnUtils::ModelAccuracyChecker checker(imageNameToLabel, modelOutputLabels); @@ -238,14 +261,22 @@ int main(int argc, char* argv[]) if (ValidateDirectory(dataDir)) { InferenceModel::Params params; + params.m_ModelPath = modelPath; params.m_IsModelBinary = true; params.m_ComputeDevices = computeDevice; - params.m_InputBindings.push_back(inputName); - params.m_OutputBindings.push_back(outputName); + // Insert inputNames and outputNames into params vector + params.m_InputBindings.insert(std::end(params.m_InputBindings), + std::begin(inputNames), + std::end(inputNames)); + params.m_OutputBindings.insert(std::end(params.m_OutputBindings), + std::begin(outputNames), + std::end(outputNames)); using TParser = armnnDeserializer::IDeserializer; - InferenceModel model(params, false); + // If dynamicBackends is empty it will be disabled by default. + InferenceModel model(params, false, ""); + // Get input tensor information const armnn::TensorInfo& inputTensorInfo = model.GetInputBindingInfo().second; const armnn::TensorShape& inputTensorShape = inputTensorInfo.GetShape(); @@ -262,7 +293,7 @@ int main(int argc, char* argv[]) else { ARMNN_LOG(fatal) << "Invalid Data layout: " << inputLayout; - return 1; + return EXIT_FAILURE; } const unsigned int inputTensorWidth = inputTensorDataLayout == armnn::DataLayout::NCHW ? inputTensorShape[3] : inputTensorShape[2]; @@ -275,7 +306,7 @@ int main(int argc, char* argv[]) { ARMNN_LOG(fatal) << "Number of output elements: " << outputNumElements << " , mismatches the number of output labels: " << modelOutputLabels.size(); - return 1; + return EXIT_FAILURE; } const unsigned int batchSize = 1; @@ -296,7 +327,7 @@ int main(int argc, char* argv[]) else { ARMNN_LOG(fatal) << "Unsupported frontend: " << modelFormat; - return 1; + return EXIT_FAILURE; } const NormalizationParameters& normParams = GetNormalizationParameters(modelFrontend, inputTensorDataType); for (const auto& imageEntry : imageNameToLabel) @@ -354,7 +385,7 @@ int main(int argc, char* argv[]) } else { - return 1; + return EXIT_SUCCESS; } for(unsigned int i = 1; i <= 5; ++i) @@ -363,7 +394,7 @@ int main(int argc, char* argv[]) } ARMNN_LOG(info) << "Accuracy Tool ran successfully!"; - return 0; + return EXIT_SUCCESS; } catch (const armnn::Exception& e) { @@ -371,14 +402,14 @@ int main(int argc, char* argv[]) // exception of type std::length_error. // Using stderr instead in this context as there is no point in nesting try-catch blocks here. std::cerr << "Armnn Error: " << e.what() << std::endl; - return 1; + return EXIT_FAILURE; } catch (const std::exception& e) { // Coverity fix: various boost exceptions can be thrown by methods called by this test. std::cerr << "WARNING: ModelAccuracyTool-Armnn: An error has occurred when running the " "Accuracy Tool: " << e.what() << std::endl; - return 1; + return EXIT_FAILURE; } } -- cgit v1.2.1