6 #include "../ImageTensorGenerator/ImageTensorGenerator.hpp" 7 #include "../InferenceTest.hpp" 11 #include <boost/algorithm/string.hpp> 12 #include <boost/filesystem.hpp> 13 #include <boost/program_options/variables_map.hpp> 14 #include <boost/range/iterator_range.hpp> 35 const string& imageDirectoryPath,
38 const string& blacklistPath =
"");
47 std::vector<armnnUtils::LabelCategoryNames>
LoadModelOutputLabels(
const std::string& modelOutputLabelsPath);
49 int main(
int argc,
char* argv[])
53 using namespace boost::filesystem;
58 namespace po = boost::program_options;
60 std::vector<armnn::BackendId> computeDevice;
62 std::string modelPath;
63 std::string modelFormat;
65 std::string inputName;
66 std::string inputLayout;
67 std::string outputName;
68 std::string modelOutputLabelsPath;
69 std::string validationLabelPath;
70 std::string validationRange;
71 std::string blacklistPath;
73 const std::string backendsMessage =
"Which device to run layers on by default. Possible choices: " 76 po::options_description desc(
"Options");
81 (
"help,h",
"Display help messages")
82 (
"model-path,m", po::value<std::string>(&modelPath)->required(),
"Path to armnn format model file")
83 (
"model-format,f", po::value<std::string>(&modelFormat)->required(),
84 "The model format. Supported values: caffe, tensorflow, tflite")
85 (
"input-name,i", po::value<std::string>(&inputName)->required(),
86 "Identifier of the input tensors in the network separated by comma.")
87 (
"output-name,o", po::value<std::string>(&outputName)->required(),
88 "Identifier of the output tensors in the network separated by comma.")
89 (
"data-dir,d", po::value<std::string>(&dataDir)->required(),
90 "Path to directory containing the ImageNet test data")
91 (
"model-output-labels,p", po::value<std::string>(&modelOutputLabelsPath)->required(),
92 "Path to model output labels file.")
93 (
"validation-labels-path,v", po::value<std::string>(&validationLabelPath)->required(),
94 "Path to ImageNet Validation Label file")
95 (
"data-layout,l", po::value<std::string>(&inputLayout)->default_value(
"NHWC"),
96 "Data layout. Supported value: NHWC, NCHW. Default: NHWC")
97 (
"compute,c", po::value<std::vector<armnn::BackendId>>(&computeDevice)->default_value(defaultBackends),
98 backendsMessage.c_str())
99 (
"validation-range,r", po::value<std::string>(&validationRange)->default_value(
"1:0"),
100 "The range of the images to be evaluated. Specified in the form <begin index>:<end index>." 101 "The index starts at 1 and the range is inclusive." 102 "By default the evaluation will be performed on all images.")
103 (
"blacklist-path,b", po::value<std::string>(&blacklistPath)->default_value(
""),
104 "Path to a blacklist file where each line denotes the index of an image to be " 105 "excluded from evaluation.");
107 catch (
const std::exception& e)
112 BOOST_ASSERT_MSG(
false,
"Caught unexpected exception");
113 std::cerr <<
"Fatal internal error: " << e.what() << std::endl;
117 po::variables_map vm;
120 po::store(po::parse_command_line(argc, argv, desc), vm);
122 if (vm.count(
"help"))
124 std::cout << desc << std::endl;
131 std::cerr << e.what() << std::endl << std::endl;
132 std::cerr << desc << std::endl;
137 std::string invalidBackends;
140 ARMNN_LOG(fatal) <<
"The list of preferred devices contains invalid backend IDs: " 149 std::ifstream file(modelPath);
153 auto armnnparser(IParser::Create());
162 optimizedNet =
armnn::Optimize(*network, computeDevice, runtime->GetDeviceSpec());
166 std::stringstream message;
167 message <<
"armnn::Exception (" << e.
what() <<
") caught from optimize.";
174 status = runtime->LoadNetwork(networkId, std::move(optimizedNet));
177 ARMNN_LOG(fatal) <<
"armnn::IRuntime: Failed to load network";
185 inputBindingInfo = armnnparser->GetNetworkInputBindingInfo(0, inputName);
187 std::pair<armnn::LayerBindingId, armnn::TensorInfo>
189 std::vector<BindingPointInfo> inputBindings = { m_InputBindingInfo };
192 outputBindingInfo = armnnparser->GetNetworkOutputBindingInfo(0, outputName);
194 std::pair<armnn::LayerBindingId, armnn::TensorInfo>
196 std::vector<BindingPointInfo> outputBindings = { m_OutputBindingInfo };
199 if (modelOutputLabelsPath.empty() || !boost::filesystem::exists(modelOutputLabelsPath) ||
200 !boost::filesystem::is_regular_file(modelOutputLabelsPath))
202 ARMNN_LOG(fatal) <<
"Invalid model output labels path at " << modelOutputLabelsPath;
204 const std::vector<armnnUtils::LabelCategoryNames> modelOutputLabels =
209 size_t imageBegIndex;
210 size_t imageEndIndex;
211 if (imageIndexStrs.size() != 2)
213 ARMNN_LOG(fatal) <<
"Invalid validation range specification: Invalid format " << validationRange;
218 imageBegIndex = std::stoul(imageIndexStrs[0]);
219 imageEndIndex = std::stoul(imageIndexStrs[1]);
221 catch (
const std::exception& e)
223 ARMNN_LOG(fatal) <<
"Invalid validation range specification: " << validationRange;
228 if (!blacklistPath.empty() &&
229 !(boost::filesystem::exists(blacklistPath) && boost::filesystem::is_regular_file(blacklistPath)))
231 ARMNN_LOG(fatal) <<
"Invalid path to blacklist file at " << blacklistPath;
235 path pathToDataDir(dataDir);
237 validationLabelPath, pathToDataDir.string(), imageBegIndex, imageEndIndex, blacklistPath);
239 using TContainer = boost::variant<std::vector<float>, std::vector<int>, std::vector<uint8_t>>;
257 if (inputLayout ==
"NCHW")
261 else if (inputLayout ==
"NHWC")
267 ARMNN_LOG(fatal) <<
"Invalid Data layout: " << inputLayout;
270 const unsigned int inputTensorWidth =
272 const unsigned int inputTensorHeight =
275 const unsigned int outputNumElements = model.
GetOutputSize();
277 if (modelOutputLabels.size() != outputNumElements)
279 ARMNN_LOG(fatal) <<
"Number of output elements: " << outputNumElements
280 <<
" , mismatches the number of output labels: " << modelOutputLabels.size();
284 const unsigned int batchSize = 1;
287 if (modelFormat ==
"caffe")
291 else if (modelFormat ==
"tensorflow")
295 else if (modelFormat ==
"tflite")
301 ARMNN_LOG(fatal) <<
"Unsupported frontend: " << modelFormat;
305 for (
const auto& imageEntry : imageNameToLabel)
307 const std::string imageName = imageEntry.first;
308 std::cout <<
"Processing image: " << imageName <<
"\n";
310 vector<TContainer> inputDataContainers;
311 vector<TContainer> outputDataContainers;
313 auto imagePath = pathToDataDir / boost::filesystem::path(imageName);
314 switch (inputTensorDataType)
317 inputDataContainers.push_back(
319 inputTensorWidth, inputTensorHeight,
322 inputTensorDataLayout));
323 outputDataContainers = { vector<int>(outputNumElements) };
326 inputDataContainers.push_back(
328 inputTensorWidth, inputTensorHeight,
331 inputTensorDataLayout));
332 outputDataContainers = { vector<uint8_t>(outputNumElements) };
336 inputDataContainers.push_back(
338 inputTensorWidth, inputTensorHeight,
341 inputTensorDataLayout));
342 outputDataContainers = { vector<float>(outputNumElements) };
346 status = runtime->EnqueueWorkload(networkId,
352 ARMNN_LOG(fatal) <<
"armnn::IRuntime: Failed to enqueue workload for image: " << imageName;
355 checker.AddImageResult<
TContainer>(imageName, outputDataContainers);
363 for(
unsigned int i = 1; i <= 5; ++i)
365 std::cout <<
"Top " << i <<
" Accuracy: " << checker.GetAccuracy(i) <<
"%" <<
"\n";
368 ARMNN_LOG(info) <<
"Accuracy Tool ran successfully!";
376 std::cerr <<
"Armnn Error: " << e.
what() << std::endl;
379 catch (
const std::exception & e)
382 std::cerr <<
"WARNING: ModelAccuracyTool-Armnn: An error has occurred when running the " 383 "Accuracy Tool: " << e.what() << std::endl;
389 const string& imageDirectoryPath,
392 const string& blacklistPath)
395 std::vector<std::string> imageFilenames;
396 for (
const auto& imageEntry :
397 boost::make_iterator_range(boost::filesystem::directory_iterator(boost::filesystem::path(imageDirectoryPath))))
399 boost::filesystem::path imagePath = imageEntry.path();
400 std::string imageExtension = boost::to_upper_copy<std::string>(imagePath.extension().string());
401 if (boost::filesystem::is_regular_file(imagePath) && (imageExtension ==
".JPEG" || imageExtension ==
".PNG"))
403 imageFilenames.push_back(imagePath.filename().string());
406 if (imageFilenames.empty())
408 throw armnn::Exception(
"No image file (JPEG, PNG) found at " + imageDirectoryPath);
412 std::sort(imageFilenames.begin(), imageFilenames.end());
414 std::cout << imageFilenames.size() <<
" images found at " << imageDirectoryPath << std::endl;
417 if (begIndex < 1 || endIndex > imageFilenames.size())
421 endIndex = endIndex == 0 ? imageFilenames.size() : endIndex;
422 if (begIndex > endIndex)
428 std::vector<unsigned int> blacklist;
429 if (!blacklistPath.empty())
431 std::ifstream blacklistFile(blacklistPath);
433 while (blacklistFile >> index)
435 blacklist.push_back(index);
440 std::string classification;
441 map<std::string, std::string> imageNameToLabel;
442 ifstream infile(validationLabelPath);
443 size_t imageIndex = begIndex;
444 size_t blacklistIndexCount = 0;
445 while (std::getline(infile, classification))
447 if (imageIndex > endIndex)
452 if (blacklistIndexCount < blacklist.size() && imageIndex == blacklist[blacklistIndexCount])
455 ++blacklistIndexCount;
458 imageNameToLabel.insert(std::pair<std::string, std::string>(imageFilenames[imageIndex - 1], classification));
461 std::cout << blacklistIndexCount <<
" images blacklisted" << std::endl;
462 std::cout << imageIndex - begIndex - blacklistIndexCount <<
" images to be loaded" << std::endl;
463 return imageNameToLabel;
468 std::vector<armnnUtils::LabelCategoryNames> modelOutputLabels;
469 ifstream modelOutputLablesFile(modelOutputLabelsPath);
471 while (std::getline(modelOutputLablesFile, line))
475 std::transform(predictionCategoryNames.begin(), predictionCategoryNames.end(), predictionCategoryNames.begin(),
477 modelOutputLabels.push_back(predictionCategoryNames);
479 return modelOutputLabels;
static IRuntimePtr Create(const CreationOptions &options)
const TensorShape & GetShape() const
CPU Execution: Reference C++ kernels.
void ConfigureLogging(bool printToStandardOutput, bool printToDebugOutput, LogSeverity severity)
Configures the logging behaviour of the ARMNN library.
NormalizationParameters GetNormalizationParameters(const SupportedFrontend &modelFormat, const armnn::DataType &outputType)
Get normalization parameters.
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
virtual const char * what() const noexcept override
#define ARMNN_LOG(severity)
BackendRegistry & BackendRegistryInstance()
const armnn::BindingPointInfo & GetInputBindingInfo(unsigned int inputIndex=0u) const
std::vector< uint8_t > PrepareImageTensor< uint8_t >(const std::string &imagePath, unsigned int newWidth, unsigned int newHeight, const NormalizationParameters &normParams, unsigned int batchSize, const armnn::DataLayout &outputLayout)
armnn::BindingPointInfo BindingPointInfo
std::string GetBackendIdsAsString() const
unsigned int GetOutputSize(unsigned int outputIndex=0u) const
boost::variant< std::vector< float >, std::vector< int >, std::vector< unsigned char > > TContainer
std::vector< std::string > m_InputBindings
std::string Strip(const std::string &originalString, const std::string &characterSet)
Remove any preceding and trailing character specified in the characterSet.
armnn::InputTensors MakeInputTensors(const std::vector< armnn::BindingPointInfo > &inputBindings, const std::vector< TContainer > &inputDataContainers)
std::vector< std::string > SplitBy(const std::string &originalString, const std::string &delimiter, bool includeEmptyToken)
Split a string into tokens by a delimiter.
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
std::vector< std::string > m_OutputBindings
std::vector< armnn::BackendId > m_ComputeDevices
DataType GetDataType() const
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
std::vector< int > PrepareImageTensor< int >(const std::string &imagePath, unsigned int newWidth, unsigned int newHeight, const NormalizationParameters &normParams, unsigned int batchSize, const armnn::DataLayout &outputLayout)
armnn::OutputTensors MakeOutputTensors(const std::vector< armnn::BindingPointInfo > &outputBindings, std::vector< TContainer > &outputDataContainers)
std::pair< armnn::LayerBindingId, armnn::TensorInfo > BindingPointInfo
std::vector< armnn::BackendId > defaultBackends
armnn::TensorInfo m_TensorInfo
Base class for all ArmNN exceptions so that users can filter to just those.
CPU Execution: NEON: ArmCompute.
bool ValidateDirectory(std::string &dir)
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
armnn::LayerBindingId m_BindingId
armnn::Runtime::CreationOptions::ExternalProfilingOptions options
std::vector< std::string > LabelCategoryNames
std::vector< float > PrepareImageTensor< float >(const std::string &imagePath, unsigned int newWidth, unsigned int newHeight, const NormalizationParameters &normParams, unsigned int batchSize, const armnn::DataLayout &outputLayout)