ArmNN
 22.02
ModelAccuracyTool-Armnn.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #include "../ImageTensorGenerator/ImageTensorGenerator.hpp"
7 #include "../InferenceTest.hpp"
10 
13 
14 #include <cxxopts/cxxopts.hpp>
15 #include <map>
16 
17 using namespace armnn::test;
18 
19 /** Load image names and ground-truth labels from the image directory and the ground truth label file
20  *
21  * @pre \p validationLabelPath exists and is valid regular file
22  * @pre \p imageDirectoryPath exists and is valid directory
23  * @pre labels in validation file correspond to images which are in lexicographical order with the image name
24  * @pre image index starts at 1
25  * @pre \p begIndex and \p endIndex are end-inclusive
26  *
27  * @param[in] validationLabelPath Path to validation label file
28  * @param[in] imageDirectoryPath Path to directory containing validation images
29  * @param[in] begIndex Begin index of images to be loaded. Inclusive
30  * @param[in] endIndex End index of images to be loaded. Inclusive
31  * @param[in] excludelistPath Path to excludelist file
32  * @return A map mapping image file names to their corresponding ground-truth labels
33  */
34 map<std::string, std::string> LoadValidationImageFilenamesAndLabels(const string& validationLabelPath,
35  const string& imageDirectoryPath,
36  size_t begIndex = 0,
37  size_t endIndex = 0,
38  const string& excludelistPath = "");
39 
40 /** Load model output labels from file
41  *
42  * @pre \p modelOutputLabelsPath exists and is a regular file
43  *
44  * @param[in] modelOutputLabelsPath path to model output labels file
45  * @return A vector of labels, which in turn is described by a list of category names
46  */
47 std::vector<armnnUtils::LabelCategoryNames> LoadModelOutputLabels(const std::string& modelOutputLabelsPath);
48 
49 int main(int argc, char* argv[])
50 {
51  try
52  {
54  armnn::ConfigureLogging(true, true, level);
55 
56  std::string modelPath;
57  std::string modelFormat;
58  std::vector<std::string> inputNames;
59  std::vector<std::string> outputNames;
60  std::string dataDir;
61  std::string modelOutputLabelsPath;
62  std::string validationLabelPath;
63  std::string inputLayout;
64  std::vector<armnn::BackendId> computeDevice;
65  std::string validationRange;
66  std::string excludelistPath;
67 
68  const std::string backendsMessage = "Which device to run layers on by default. Possible choices: "
70 
71  try
72  {
73  cxxopts::Options options("ModeAccuracyTool-Armnn","Options");
74 
75  options.add_options()
76  ("h,help", "Display help messages")
77  ("m,model-path",
78  "Path to armnn format model file",
79  cxxopts::value<std::string>(modelPath))
80  ("f,model-format",
81  "The model format. Supported values: tflite",
82  cxxopts::value<std::string>(modelFormat))
83  ("i,input-name",
84  "Identifier of the input tensors in the network separated by comma with no space.",
85  cxxopts::value<std::vector<std::string>>(inputNames))
86  ("o,output-name",
87  "Identifier of the output tensors in the network separated by comma with no space.",
88  cxxopts::value<std::vector<std::string>>(outputNames))
89  ("d,data-dir",
90  "Path to directory containing the ImageNet test data",
91  cxxopts::value<std::string>(dataDir))
92  ("p,model-output-labels",
93  "Path to model output labels file.",
94  cxxopts::value<std::string>(modelOutputLabelsPath))
95  ("v,validation-labels-path",
96  "Path to ImageNet Validation Label file",
97  cxxopts::value<std::string>(validationLabelPath))
98  ("l,data-layout",
99  "Data layout. Supported value: NHWC, NCHW. Default: NHWC",
100  cxxopts::value<std::string>(inputLayout)->default_value("NHWC"))
101  ("c,compute",
102  backendsMessage.c_str(),
103  cxxopts::value<std::vector<armnn::BackendId>>(computeDevice)->default_value("CpuAcc,CpuRef"))
104  ("r,validation-range",
105  "The range of the images to be evaluated. Specified in the form <begin index>:<end index>."
106  "The index starts at 1 and the range is inclusive."
107  "By default the evaluation will be performed on all images.",
108  cxxopts::value<std::string>(validationRange)->default_value("1:0"))
109  ("e,excludelist-path",
110  "Path to a excludelist file where each line denotes the index of an image to be "
111  "excluded from evaluation.",
112  cxxopts::value<std::string>(excludelistPath)->default_value(""));
113  ARMNN_DEPRECATED_MSG_REMOVAL_DATE("This b,blacklist-path command is deprecated", "22.08")
114  ("b,blacklist-path",
115  "Path to a blacklist file where each line denotes the index of an image to be "
116  "excluded from evaluation. This command will be deprecated in favor of: --excludelist-path ",
117  cxxopts::value<std::string>(excludelistPath)->default_value(""));
118 
119  auto result = options.parse(argc, argv);
120 
121  if (result.count("help") > 0)
122  {
123  std::cout << options.help() << std::endl;
124  return EXIT_FAILURE;
125  }
126 
127  // Check for mandatory single options.
128  std::string mandatorySingleParameters[] = { "model-path", "model-format", "input-name", "output-name",
129  "data-dir", "model-output-labels", "validation-labels-path" };
130  for (auto param : mandatorySingleParameters)
131  {
132  if (result.count(param) != 1)
133  {
134  std::cerr << "Parameter \'--" << param << "\' is required but missing." << std::endl;
135  return EXIT_FAILURE;
136  }
137  }
138  }
139  catch (const cxxopts::OptionException& e)
140  {
141  std::cerr << e.what() << std::endl << std::endl;
142  return EXIT_FAILURE;
143  }
144  catch (const std::exception& e)
145  {
146  ARMNN_ASSERT_MSG(false, "Caught unexpected exception");
147  std::cerr << "Fatal internal error: " << e.what() << std::endl;
148  return EXIT_FAILURE;
149  }
150 
151  // Check if the requested backend are all valid
152  std::string invalidBackends;
153  if (!CheckRequestedBackendsAreValid(computeDevice, armnn::Optional<std::string&>(invalidBackends)))
154  {
155  ARMNN_LOG(fatal) << "The list of preferred devices contains invalid backend IDs: "
156  << invalidBackends;
157  return EXIT_FAILURE;
158  }
159  armnn::Status status;
160 
161  // Create runtime
164  std::ifstream file(modelPath);
165 
166  // Create Parser
167  using IParser = armnnDeserializer::IDeserializer;
168  auto armnnparser(IParser::Create());
169 
170  // Create a network
171  armnn::INetworkPtr network = armnnparser->CreateNetworkFromBinary(file);
172 
173  // Optimizes the network.
174  armnn::IOptimizedNetworkPtr optimizedNet(nullptr, nullptr);
175  try
176  {
177  optimizedNet = armnn::Optimize(*network, computeDevice, runtime->GetDeviceSpec());
178  }
179  catch (const armnn::Exception& e)
180  {
181  std::stringstream message;
182  message << "armnn::Exception (" << e.what() << ") caught from optimize.";
183  ARMNN_LOG(fatal) << message.str();
184  return EXIT_FAILURE;
185  }
186 
187  // Loads the network into the runtime.
188  armnn::NetworkId networkId;
189  status = runtime->LoadNetwork(networkId, std::move(optimizedNet));
190  if (status == armnn::Status::Failure)
191  {
192  ARMNN_LOG(fatal) << "armnn::IRuntime: Failed to load network";
193  return EXIT_FAILURE;
194  }
195 
196  // Set up Network
198 
199  // Handle inputNames and outputNames, there can be multiple.
200  std::vector<BindingPointInfo> inputBindings;
201  for(auto& input: inputNames)
202  {
204  inputBindingInfo = armnnparser->GetNetworkInputBindingInfo(0, input);
205 
206  std::pair<armnn::LayerBindingId, armnn::TensorInfo>
207  m_InputBindingInfo(inputBindingInfo.m_BindingId, inputBindingInfo.m_TensorInfo);
208  inputBindings.push_back(m_InputBindingInfo);
209  }
210 
211  std::vector<BindingPointInfo> outputBindings;
212  for(auto& output: outputNames)
213  {
215  outputBindingInfo = armnnparser->GetNetworkOutputBindingInfo(0, output);
216 
217  std::pair<armnn::LayerBindingId, armnn::TensorInfo>
218  m_OutputBindingInfo(outputBindingInfo.m_BindingId, outputBindingInfo.m_TensorInfo);
219  outputBindings.push_back(m_OutputBindingInfo);
220  }
221 
222  // Load model output labels
223  if (modelOutputLabelsPath.empty() || !fs::exists(modelOutputLabelsPath) ||
224  !fs::is_regular_file(modelOutputLabelsPath))
225  {
226  ARMNN_LOG(fatal) << "Invalid model output labels path at " << modelOutputLabelsPath;
227  }
228  const std::vector<armnnUtils::LabelCategoryNames> modelOutputLabels =
229  LoadModelOutputLabels(modelOutputLabelsPath);
230 
231  // Parse begin and end image indices
232  std::vector<std::string> imageIndexStrs = armnnUtils::SplitBy(validationRange, ":");
233  size_t imageBegIndex;
234  size_t imageEndIndex;
235  if (imageIndexStrs.size() != 2)
236  {
237  ARMNN_LOG(fatal) << "Invalid validation range specification: Invalid format " << validationRange;
238  return EXIT_FAILURE;
239  }
240  try
241  {
242  imageBegIndex = std::stoul(imageIndexStrs[0]);
243  imageEndIndex = std::stoul(imageIndexStrs[1]);
244  }
245  catch (const std::exception& e)
246  {
247  ARMNN_LOG(fatal) << "Invalid validation range specification: " << validationRange;
248  return EXIT_FAILURE;
249  }
250 
251  // Validate excludelist file if it's specified
252  if (!excludelistPath.empty() &&
253  !(fs::exists(excludelistPath) && fs::is_regular_file(excludelistPath)))
254  {
255  ARMNN_LOG(fatal) << "Invalid path to excludelist file at " << excludelistPath;
256  return EXIT_FAILURE;
257  }
258 
259  fs::path pathToDataDir(dataDir);
260  const map<std::string, std::string> imageNameToLabel = LoadValidationImageFilenamesAndLabels(
261  validationLabelPath, pathToDataDir.string(), imageBegIndex, imageEndIndex, excludelistPath);
262  armnnUtils::ModelAccuracyChecker checker(imageNameToLabel, modelOutputLabels);
263 
264  if (ValidateDirectory(dataDir))
265  {
267 
268  params.m_ModelPath = modelPath;
269  params.m_IsModelBinary = true;
270  params.m_ComputeDevices = computeDevice;
271  // Insert inputNames and outputNames into params vector
272  params.m_InputBindings.insert(std::end(params.m_InputBindings),
273  std::begin(inputNames),
274  std::end(inputNames));
275  params.m_OutputBindings.insert(std::end(params.m_OutputBindings),
276  std::begin(outputNames),
277  std::end(outputNames));
278 
279  using TParser = armnnDeserializer::IDeserializer;
280  // If dynamicBackends is empty it will be disabled by default.
281  InferenceModel<TParser, float> model(params, false, "");
282 
283  // Get input tensor information
284  const armnn::TensorInfo& inputTensorInfo = model.GetInputBindingInfo().second;
285  const armnn::TensorShape& inputTensorShape = inputTensorInfo.GetShape();
286  const armnn::DataType& inputTensorDataType = inputTensorInfo.GetDataType();
287  armnn::DataLayout inputTensorDataLayout;
288  if (inputLayout == "NCHW")
289  {
290  inputTensorDataLayout = armnn::DataLayout::NCHW;
291  }
292  else if (inputLayout == "NHWC")
293  {
294  inputTensorDataLayout = armnn::DataLayout::NHWC;
295  }
296  else
297  {
298  ARMNN_LOG(fatal) << "Invalid Data layout: " << inputLayout;
299  return EXIT_FAILURE;
300  }
301  const unsigned int inputTensorWidth =
302  inputTensorDataLayout == armnn::DataLayout::NCHW ? inputTensorShape[3] : inputTensorShape[2];
303  const unsigned int inputTensorHeight =
304  inputTensorDataLayout == armnn::DataLayout::NCHW ? inputTensorShape[2] : inputTensorShape[1];
305  // Get output tensor info
306  const unsigned int outputNumElements = model.GetOutputSize();
307  // Check output tensor shape is valid
308  if (modelOutputLabels.size() != outputNumElements)
309  {
310  ARMNN_LOG(fatal) << "Number of output elements: " << outputNumElements
311  << " , mismatches the number of output labels: " << modelOutputLabels.size();
312  return EXIT_FAILURE;
313  }
314 
315  const unsigned int batchSize = 1;
316  // Get normalisation parameters
317  SupportedFrontend modelFrontend;
318  if (modelFormat == "tflite")
319  {
320  modelFrontend = SupportedFrontend::TFLite;
321  }
322  else
323  {
324  ARMNN_LOG(fatal) << "Unsupported frontend: " << modelFormat;
325  return EXIT_FAILURE;
326  }
327  const NormalizationParameters& normParams = GetNormalizationParameters(modelFrontend, inputTensorDataType);
328  for (const auto& imageEntry : imageNameToLabel)
329  {
330  const std::string imageName = imageEntry.first;
331  std::cout << "Processing image: " << imageName << "\n";
332 
333  vector<armnnUtils::TContainer> inputDataContainers;
334  vector<armnnUtils::TContainer> outputDataContainers;
335 
336  auto imagePath = pathToDataDir / fs::path(imageName);
337  switch (inputTensorDataType)
338  {
340  inputDataContainers.push_back(
341  PrepareImageTensor<int>(imagePath.string(),
342  inputTensorWidth, inputTensorHeight,
343  normParams,
344  batchSize,
345  inputTensorDataLayout));
346  outputDataContainers = { vector<int>(outputNumElements) };
347  break;
349  inputDataContainers.push_back(
350  PrepareImageTensor<uint8_t>(imagePath.string(),
351  inputTensorWidth, inputTensorHeight,
352  normParams,
353  batchSize,
354  inputTensorDataLayout));
355  outputDataContainers = { vector<uint8_t>(outputNumElements) };
356  break;
358  default:
359  inputDataContainers.push_back(
360  PrepareImageTensor<float>(imagePath.string(),
361  inputTensorWidth, inputTensorHeight,
362  normParams,
363  batchSize,
364  inputTensorDataLayout));
365  outputDataContainers = { vector<float>(outputNumElements) };
366  break;
367  }
368 
369  status = runtime->EnqueueWorkload(networkId,
370  armnnUtils::MakeInputTensors(inputBindings, inputDataContainers),
371  armnnUtils::MakeOutputTensors(outputBindings, outputDataContainers));
372 
373  if (status == armnn::Status::Failure)
374  {
375  ARMNN_LOG(fatal) << "armnn::IRuntime: Failed to enqueue workload for image: " << imageName;
376  }
377 
378  checker.AddImageResult<armnnUtils::TContainer>(imageName, outputDataContainers);
379  }
380  }
381  else
382  {
383  return EXIT_SUCCESS;
384  }
385 
386  for(unsigned int i = 1; i <= 5; ++i)
387  {
388  std::cout << "Top " << i << " Accuracy: " << checker.GetAccuracy(i) << "%" << "\n";
389  }
390 
391  ARMNN_LOG(info) << "Accuracy Tool ran successfully!";
392  return EXIT_SUCCESS;
393  }
394  catch (const armnn::Exception& e)
395  {
396  // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
397  // exception of type std::length_error.
398  // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
399  std::cerr << "Armnn Error: " << e.what() << std::endl;
400  return EXIT_FAILURE;
401  }
402  catch (const std::exception& e)
403  {
404  // Coverity fix: various boost exceptions can be thrown by methods called by this test.
405  std::cerr << "WARNING: ModelAccuracyTool-Armnn: An error has occurred when running the "
406  "Accuracy Tool: " << e.what() << std::endl;
407  return EXIT_FAILURE;
408  }
409 }
410 
411 map<std::string, std::string> LoadValidationImageFilenamesAndLabels(const string& validationLabelPath,
412  const string& imageDirectoryPath,
413  size_t begIndex,
414  size_t endIndex,
415  const string& excludelistPath)
416 {
417  // Populate imageFilenames with names of all .JPEG, .PNG images
418  std::vector<std::string> imageFilenames;
419  for (const auto& imageEntry : fs::directory_iterator(fs::path(imageDirectoryPath)))
420  {
421  fs::path imagePath = imageEntry.path();
422 
423  // Get extension and convert to uppercase
424  std::string imageExtension = imagePath.extension().string();
425  std::transform(imageExtension.begin(), imageExtension.end(), imageExtension.begin(), ::toupper);
426 
427  if (fs::is_regular_file(imagePath) && (imageExtension == ".JPEG" || imageExtension == ".PNG"))
428  {
429  imageFilenames.push_back(imagePath.filename().string());
430  }
431  }
432  if (imageFilenames.empty())
433  {
434  throw armnn::Exception("No image file (JPEG, PNG) found at " + imageDirectoryPath);
435  }
436 
437  // Sort the image filenames lexicographically
438  std::sort(imageFilenames.begin(), imageFilenames.end());
439 
440  std::cout << imageFilenames.size() << " images found at " << imageDirectoryPath << std::endl;
441 
442  // Get default end index
443  if (begIndex < 1 || endIndex > imageFilenames.size())
444  {
445  throw armnn::Exception("Invalid image index range");
446  }
447  endIndex = endIndex == 0 ? imageFilenames.size() : endIndex;
448  if (begIndex > endIndex)
449  {
450  throw armnn::Exception("Invalid image index range");
451  }
452 
453  // Load excludelist if there is one
454  std::vector<unsigned int> excludelist;
455  if (!excludelistPath.empty())
456  {
457  std::ifstream excludelistFile(excludelistPath);
458  unsigned int index;
459  while (excludelistFile >> index)
460  {
461  excludelist.push_back(index);
462  }
463  }
464 
465  // Load ground truth labels and pair them with corresponding image names
466  std::string classification;
467  map<std::string, std::string> imageNameToLabel;
468  ifstream infile(validationLabelPath);
469  size_t imageIndex = begIndex;
470  size_t excludelistIndexCount = 0;
471  while (std::getline(infile, classification))
472  {
473  if (imageIndex > endIndex)
474  {
475  break;
476  }
477  // If current imageIndex is included in excludelist, skip the current image
478  if (excludelistIndexCount < excludelist.size() && imageIndex == excludelist[excludelistIndexCount])
479  {
480  ++imageIndex;
481  ++excludelistIndexCount;
482  continue;
483  }
484  imageNameToLabel.insert(std::pair<std::string, std::string>(imageFilenames[imageIndex - 1], classification));
485  ++imageIndex;
486  }
487  std::cout << excludelistIndexCount << " images in excludelist" << std::endl;
488  std::cout << imageIndex - begIndex - excludelistIndexCount << " images to be loaded" << std::endl;
489  return imageNameToLabel;
490 }
491 
492 std::vector<armnnUtils::LabelCategoryNames> LoadModelOutputLabels(const std::string& modelOutputLabelsPath)
493 {
494  std::vector<armnnUtils::LabelCategoryNames> modelOutputLabels;
495  ifstream modelOutputLablesFile(modelOutputLabelsPath);
496  std::string line;
497  while (std::getline(modelOutputLablesFile, line))
498  {
500  armnnUtils::LabelCategoryNames predictionCategoryNames = armnnUtils::SplitBy(tokens.back(), ",");
501  std::transform(predictionCategoryNames.begin(), predictionCategoryNames.end(), predictionCategoryNames.begin(),
502  [](const std::string& category) { return armnnUtils::Strip(category); });
503  modelOutputLabels.push_back(predictionCategoryNames);
504  }
505  return modelOutputLabels;
506 }
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:40
DataLayout
Definition: Types.hpp:49
const TensorShape & GetShape() const
Definition: Tensor.hpp:191
void ConfigureLogging(bool printToStandardOutput, bool printToDebugOutput, LogSeverity severity)
Configures the logging behaviour of the ARMNN library.
Definition: Utils.cpp:18
NormalizationParameters GetNormalizationParameters(const SupportedFrontend &modelFormat, const armnn::DataType &outputType)
Get normalization parameters.
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:31
virtual const char * what() const noexcept override
Definition: Exceptions.cpp:32
#define ARMNN_LOG(severity)
Definition: Logging.hpp:205
BackendRegistry & BackendRegistryInstance()
const armnn::BindingPointInfo & GetInputBindingInfo(unsigned int inputIndex=0u) const
std::vector< uint8_t > PrepareImageTensor< uint8_t >(const std::string &imagePath, unsigned int newWidth, unsigned int newHeight, const NormalizationParameters &normParams, unsigned int batchSize, const armnn::DataLayout &outputLayout)
armnn::BindingPointInfo BindingPointInfo
std::string GetBackendIdsAsString() const
unsigned int GetOutputSize(unsigned int outputIndex=0u) const
std::vector< std::string > m_InputBindings
std::string Strip(const std::string &originalString, const std::string &characterSet)
Remove any preceding and trailing character specified in the characterSet.
DataType
Definition: Types.hpp:35
armnn::InputTensors MakeInputTensors(const std::vector< armnn::BindingPointInfo > &inputBindings, const std::vector< TContainer > &inputDataContainers)
std::vector< std::string > SplitBy(const std::string &originalString, const std::string &delimiter, bool includeEmptyToken)
Split a string into tokens by a delimiter.
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:1680
#define ARMNN_ASSERT_MSG(COND, MSG)
Definition: Assert.hpp:15
std::vector< std::string > m_OutputBindings
std::vector< armnn::BackendId > m_ComputeDevices
DataType GetDataType() const
Definition: Tensor.hpp:198
int NetworkId
Definition: IRuntime.hpp:25
Status
enumeration
Definition: Types.hpp:29
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:242
ARMNN_NO_DEPRECATE_WARN_BEGIN struct ARMNN_DEPRECATED_MSG_REMOVAL_DATE("ResizeBilinearQueueDescriptor is deprecated use ResizeQueueDescriptor instead", "22.08") ResizeBilinearQueueDescriptor
std::vector< int > PrepareImageTensor< int >(const std::string &imagePath, unsigned int newWidth, unsigned int newHeight, const NormalizationParameters &normParams, unsigned int batchSize, const armnn::DataLayout &outputLayout)
map< std::string, std::string > LoadValidationImageFilenamesAndLabels(const string &validationLabelPath, const string &imageDirectoryPath, size_t begIndex=0, size_t endIndex=0, const string &excludelistPath="")
Load image names and ground-truth labels from the image directory and the ground truth label file...
armnn::OutputTensors MakeOutputTensors(const std::vector< armnn::BindingPointInfo > &outputBindings, std::vector< TContainer > &outputDataContainers)
int main(int argc, char *argv[])
std::pair< armnn::LayerBindingId, armnn::TensorInfo > BindingPointInfo
Definition: Tensor.hpp:274
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46
mapbox::util::variant< std::vector< float >, std::vector< int >, std::vector< unsigned char >, std::vector< int8_t > > TContainer
Definition: TContainer.hpp:18
bool ValidateDirectory(std::string &dir)
std::vector< armnnUtils::LabelCategoryNames > LoadModelOutputLabels(const std::string &modelOutputLabelsPath)
Load model output labels from file.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:241
LogSeverity
Definition: Utils.hpp:14
std::vector< std::string > LabelCategoryNames
std::vector< float > PrepareImageTensor< float >(const std::string &imagePath, unsigned int newWidth, unsigned int newHeight, const NormalizationParameters &normParams, unsigned int batchSize, const armnn::DataLayout &outputLayout)