ArmNN
 20.08
ModelAccuracyTool-Armnn.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #include "../ImageTensorGenerator/ImageTensorGenerator.hpp"
7 #include "../InferenceTest.hpp"
10 #include <Filesystem.hpp>
11 
12 #include <boost/program_options/variables_map.hpp>
13 #include <boost/range/iterator_range.hpp>
14 #include <map>
15 
16 using namespace armnn::test;
17 
18 /** Load image names and ground-truth labels from the image directory and the ground truth label file
19  *
20  * @pre \p validationLabelPath exists and is valid regular file
21  * @pre \p imageDirectoryPath exists and is valid directory
22  * @pre labels in validation file correspond to images which are in lexicographical order with the image name
23  * @pre image index starts at 1
24  * @pre \p begIndex and \p endIndex are end-inclusive
25  *
26  * @param[in] validationLabelPath Path to validation label file
27  * @param[in] imageDirectoryPath Path to directory containing validation images
28  * @param[in] begIndex Begin index of images to be loaded. Inclusive
29  * @param[in] endIndex End index of images to be loaded. Inclusive
30  * @param[in] blacklistPath Path to blacklist file
31  * @return A map mapping image file names to their corresponding ground-truth labels
32  */
33 map<std::string, std::string> LoadValidationImageFilenamesAndLabels(const string& validationLabelPath,
34  const string& imageDirectoryPath,
35  size_t begIndex = 0,
36  size_t endIndex = 0,
37  const string& blacklistPath = "");
38 
39 /** Load model output labels from file
40  *
41  * @pre \p modelOutputLabelsPath exists and is a regular file
42  *
43  * @param[in] modelOutputLabelsPath path to model output labels file
44  * @return A vector of labels, which in turn is described by a list of category names
45  */
46 std::vector<armnnUtils::LabelCategoryNames> LoadModelOutputLabels(const std::string& modelOutputLabelsPath);
47 
48 int main(int argc, char* argv[])
49 {
50  try
51  {
53  armnn::ConfigureLogging(true, true, level);
54 
55  // Set-up program Options
56  namespace po = boost::program_options;
57 
58  std::vector<armnn::BackendId> computeDevice;
59  std::vector<armnn::BackendId> defaultBackends = {armnn::Compute::CpuAcc, armnn::Compute::CpuRef};
60  std::string modelPath;
61  std::string modelFormat;
62  std::string dataDir;
63  std::string inputName;
64  std::string inputLayout;
65  std::string outputName;
66  std::string modelOutputLabelsPath;
67  std::string validationLabelPath;
68  std::string validationRange;
69  std::string blacklistPath;
70 
71  const std::string backendsMessage = "Which device to run layers on by default. Possible choices: "
73 
74  po::options_description desc("Options");
75  try
76  {
77  // Adds generic options needed to run Accuracy Tool.
78  desc.add_options()
79  ("help,h", "Display help messages")
80  ("model-path,m", po::value<std::string>(&modelPath)->required(), "Path to armnn format model file")
81  ("model-format,f", po::value<std::string>(&modelFormat)->required(),
82  "The model format. Supported values: caffe, tensorflow, tflite")
83  ("input-name,i", po::value<std::string>(&inputName)->required(),
84  "Identifier of the input tensors in the network separated by comma.")
85  ("output-name,o", po::value<std::string>(&outputName)->required(),
86  "Identifier of the output tensors in the network separated by comma.")
87  ("data-dir,d", po::value<std::string>(&dataDir)->required(),
88  "Path to directory containing the ImageNet test data")
89  ("model-output-labels,p", po::value<std::string>(&modelOutputLabelsPath)->required(),
90  "Path to model output labels file.")
91  ("validation-labels-path,v", po::value<std::string>(&validationLabelPath)->required(),
92  "Path to ImageNet Validation Label file")
93  ("data-layout,l", po::value<std::string>(&inputLayout)->default_value("NHWC"),
94  "Data layout. Supported value: NHWC, NCHW. Default: NHWC")
95  ("compute,c", po::value<std::vector<armnn::BackendId>>(&computeDevice)->default_value(defaultBackends),
96  backendsMessage.c_str())
97  ("validation-range,r", po::value<std::string>(&validationRange)->default_value("1:0"),
98  "The range of the images to be evaluated. Specified in the form <begin index>:<end index>."
99  "The index starts at 1 and the range is inclusive."
100  "By default the evaluation will be performed on all images.")
101  ("blacklist-path,b", po::value<std::string>(&blacklistPath)->default_value(""),
102  "Path to a blacklist file where each line denotes the index of an image to be "
103  "excluded from evaluation.");
104  }
105  catch (const std::exception& e)
106  {
107  // Coverity points out that default_value(...) can throw a bad_lexical_cast,
108  // and that desc.add_options() can throw boost::io::too_few_args.
109  // They really won't in any of these cases.
110  ARMNN_ASSERT_MSG(false, "Caught unexpected exception");
111  std::cerr << "Fatal internal error: " << e.what() << std::endl;
112  return 1;
113  }
114 
115  po::variables_map vm;
116  try
117  {
118  po::store(po::parse_command_line(argc, argv, desc), vm);
119 
120  if (vm.count("help"))
121  {
122  std::cout << desc << std::endl;
123  return 1;
124  }
125  po::notify(vm);
126  }
127  catch (po::error& e)
128  {
129  std::cerr << e.what() << std::endl << std::endl;
130  std::cerr << desc << std::endl;
131  return 1;
132  }
133 
134  // Check if the requested backend are all valid
135  std::string invalidBackends;
136  if (!CheckRequestedBackendsAreValid(computeDevice, armnn::Optional<std::string&>(invalidBackends)))
137  {
138  ARMNN_LOG(fatal) << "The list of preferred devices contains invalid backend IDs: "
139  << invalidBackends;
140  return EXIT_FAILURE;
141  }
142  armnn::Status status;
143 
144  // Create runtime
147  std::ifstream file(modelPath);
148 
149  // Create Parser
150  using IParser = armnnDeserializer::IDeserializer;
151  auto armnnparser(IParser::Create());
152 
153  // Create a network
154  armnn::INetworkPtr network = armnnparser->CreateNetworkFromBinary(file);
155 
156  // Optimizes the network.
157  armnn::IOptimizedNetworkPtr optimizedNet(nullptr, nullptr);
158  try
159  {
160  optimizedNet = armnn::Optimize(*network, computeDevice, runtime->GetDeviceSpec());
161  }
162  catch (const armnn::Exception& e)
163  {
164  std::stringstream message;
165  message << "armnn::Exception (" << e.what() << ") caught from optimize.";
166  ARMNN_LOG(fatal) << message.str();
167  return 1;
168  }
169 
170  // Loads the network into the runtime.
171  armnn::NetworkId networkId;
172  status = runtime->LoadNetwork(networkId, std::move(optimizedNet));
173  if (status == armnn::Status::Failure)
174  {
175  ARMNN_LOG(fatal) << "armnn::IRuntime: Failed to load network";
176  return 1;
177  }
178 
179  // Set up Network
181 
183  inputBindingInfo = armnnparser->GetNetworkInputBindingInfo(0, inputName);
184 
185  std::pair<armnn::LayerBindingId, armnn::TensorInfo>
186  m_InputBindingInfo(inputBindingInfo.m_BindingId, inputBindingInfo.m_TensorInfo);
187  std::vector<BindingPointInfo> inputBindings = { m_InputBindingInfo };
188 
190  outputBindingInfo = armnnparser->GetNetworkOutputBindingInfo(0, outputName);
191 
192  std::pair<armnn::LayerBindingId, armnn::TensorInfo>
193  m_OutputBindingInfo(outputBindingInfo.m_BindingId, outputBindingInfo.m_TensorInfo);
194  std::vector<BindingPointInfo> outputBindings = { m_OutputBindingInfo };
195 
196  // Load model output labels
197  if (modelOutputLabelsPath.empty() || !fs::exists(modelOutputLabelsPath) ||
198  !fs::is_regular_file(modelOutputLabelsPath))
199  {
200  ARMNN_LOG(fatal) << "Invalid model output labels path at " << modelOutputLabelsPath;
201  }
202  const std::vector<armnnUtils::LabelCategoryNames> modelOutputLabels =
203  LoadModelOutputLabels(modelOutputLabelsPath);
204 
205  // Parse begin and end image indices
206  std::vector<std::string> imageIndexStrs = armnnUtils::SplitBy(validationRange, ":");
207  size_t imageBegIndex;
208  size_t imageEndIndex;
209  if (imageIndexStrs.size() != 2)
210  {
211  ARMNN_LOG(fatal) << "Invalid validation range specification: Invalid format " << validationRange;
212  return 1;
213  }
214  try
215  {
216  imageBegIndex = std::stoul(imageIndexStrs[0]);
217  imageEndIndex = std::stoul(imageIndexStrs[1]);
218  }
219  catch (const std::exception& e)
220  {
221  ARMNN_LOG(fatal) << "Invalid validation range specification: " << validationRange;
222  return 1;
223  }
224 
225  // Validate blacklist file if it's specified
226  if (!blacklistPath.empty() &&
227  !(fs::exists(blacklistPath) && fs::is_regular_file(blacklistPath)))
228  {
229  ARMNN_LOG(fatal) << "Invalid path to blacklist file at " << blacklistPath;
230  return 1;
231  }
232 
233  path pathToDataDir(dataDir);
234  const map<std::string, std::string> imageNameToLabel = LoadValidationImageFilenamesAndLabels(
235  validationLabelPath, pathToDataDir.string(), imageBegIndex, imageEndIndex, blacklistPath);
236  armnnUtils::ModelAccuracyChecker checker(imageNameToLabel, modelOutputLabels);
237  using TContainer = boost::variant<std::vector<float>, std::vector<int>, std::vector<uint8_t>>;
238 
239  if (ValidateDirectory(dataDir))
240  {
242  params.m_ModelPath = modelPath;
243  params.m_IsModelBinary = true;
244  params.m_ComputeDevices = computeDevice;
245  params.m_InputBindings.push_back(inputName);
246  params.m_OutputBindings.push_back(outputName);
247 
248  using TParser = armnnDeserializer::IDeserializer;
249  InferenceModel<TParser, float> model(params, false);
250  // Get input tensor information
251  const armnn::TensorInfo& inputTensorInfo = model.GetInputBindingInfo().second;
252  const armnn::TensorShape& inputTensorShape = inputTensorInfo.GetShape();
253  const armnn::DataType& inputTensorDataType = inputTensorInfo.GetDataType();
254  armnn::DataLayout inputTensorDataLayout;
255  if (inputLayout == "NCHW")
256  {
257  inputTensorDataLayout = armnn::DataLayout::NCHW;
258  }
259  else if (inputLayout == "NHWC")
260  {
261  inputTensorDataLayout = armnn::DataLayout::NHWC;
262  }
263  else
264  {
265  ARMNN_LOG(fatal) << "Invalid Data layout: " << inputLayout;
266  return 1;
267  }
268  const unsigned int inputTensorWidth =
269  inputTensorDataLayout == armnn::DataLayout::NCHW ? inputTensorShape[3] : inputTensorShape[2];
270  const unsigned int inputTensorHeight =
271  inputTensorDataLayout == armnn::DataLayout::NCHW ? inputTensorShape[2] : inputTensorShape[1];
272  // Get output tensor info
273  const unsigned int outputNumElements = model.GetOutputSize();
274  // Check output tensor shape is valid
275  if (modelOutputLabels.size() != outputNumElements)
276  {
277  ARMNN_LOG(fatal) << "Number of output elements: " << outputNumElements
278  << " , mismatches the number of output labels: " << modelOutputLabels.size();
279  return 1;
280  }
281 
282  const unsigned int batchSize = 1;
283  // Get normalisation parameters
284  SupportedFrontend modelFrontend;
285  if (modelFormat == "caffe")
286  {
287  modelFrontend = SupportedFrontend::Caffe;
288  }
289  else if (modelFormat == "tensorflow")
290  {
291  modelFrontend = SupportedFrontend::TensorFlow;
292  }
293  else if (modelFormat == "tflite")
294  {
295  modelFrontend = SupportedFrontend::TFLite;
296  }
297  else
298  {
299  ARMNN_LOG(fatal) << "Unsupported frontend: " << modelFormat;
300  return 1;
301  }
302  const NormalizationParameters& normParams = GetNormalizationParameters(modelFrontend, inputTensorDataType);
303  for (const auto& imageEntry : imageNameToLabel)
304  {
305  const std::string imageName = imageEntry.first;
306  std::cout << "Processing image: " << imageName << "\n";
307 
308  vector<TContainer> inputDataContainers;
309  vector<TContainer> outputDataContainers;
310 
311  auto imagePath = pathToDataDir / fs::path(imageName);
312  switch (inputTensorDataType)
313  {
315  inputDataContainers.push_back(
316  PrepareImageTensor<int>(imagePath.string(),
317  inputTensorWidth, inputTensorHeight,
318  normParams,
319  batchSize,
320  inputTensorDataLayout));
321  outputDataContainers = { vector<int>(outputNumElements) };
322  break;
324  inputDataContainers.push_back(
325  PrepareImageTensor<uint8_t>(imagePath.string(),
326  inputTensorWidth, inputTensorHeight,
327  normParams,
328  batchSize,
329  inputTensorDataLayout));
330  outputDataContainers = { vector<uint8_t>(outputNumElements) };
331  break;
333  default:
334  inputDataContainers.push_back(
335  PrepareImageTensor<float>(imagePath.string(),
336  inputTensorWidth, inputTensorHeight,
337  normParams,
338  batchSize,
339  inputTensorDataLayout));
340  outputDataContainers = { vector<float>(outputNumElements) };
341  break;
342  }
343 
344  status = runtime->EnqueueWorkload(networkId,
345  armnnUtils::MakeInputTensors(inputBindings, inputDataContainers),
346  armnnUtils::MakeOutputTensors(outputBindings, outputDataContainers));
347 
348  if (status == armnn::Status::Failure)
349  {
350  ARMNN_LOG(fatal) << "armnn::IRuntime: Failed to enqueue workload for image: " << imageName;
351  }
352 
353  checker.AddImageResult<TContainer>(imageName, outputDataContainers);
354  }
355  }
356  else
357  {
358  return 1;
359  }
360 
361  for(unsigned int i = 1; i <= 5; ++i)
362  {
363  std::cout << "Top " << i << " Accuracy: " << checker.GetAccuracy(i) << "%" << "\n";
364  }
365 
366  ARMNN_LOG(info) << "Accuracy Tool ran successfully!";
367  return 0;
368  }
369  catch (const armnn::Exception& e)
370  {
371  // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
372  // exception of type std::length_error.
373  // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
374  std::cerr << "Armnn Error: " << e.what() << std::endl;
375  return 1;
376  }
377  catch (const std::exception& e)
378  {
379  // Coverity fix: various boost exceptions can be thrown by methods called by this test.
380  std::cerr << "WARNING: ModelAccuracyTool-Armnn: An error has occurred when running the "
381  "Accuracy Tool: " << e.what() << std::endl;
382  return 1;
383  }
384 }
385 
386 map<std::string, std::string> LoadValidationImageFilenamesAndLabels(const string& validationLabelPath,
387  const string& imageDirectoryPath,
388  size_t begIndex,
389  size_t endIndex,
390  const string& blacklistPath)
391 {
392  // Populate imageFilenames with names of all .JPEG, .PNG images
393  std::vector<std::string> imageFilenames;
394  for (const auto& imageEntry :
395  boost::make_iterator_range(fs::directory_iterator(fs::path(imageDirectoryPath))))
396  {
397  fs::path imagePath = imageEntry.path();
398  std::string imageExtension = boost::to_upper_copy<std::string>(imagePath.extension().string());
399  if (fs::is_regular_file(imagePath) && (imageExtension == ".JPEG" || imageExtension == ".PNG"))
400  {
401  imageFilenames.push_back(imagePath.filename().string());
402  }
403  }
404  if (imageFilenames.empty())
405  {
406  throw armnn::Exception("No image file (JPEG, PNG) found at " + imageDirectoryPath);
407  }
408 
409  // Sort the image filenames lexicographically
410  std::sort(imageFilenames.begin(), imageFilenames.end());
411 
412  std::cout << imageFilenames.size() << " images found at " << imageDirectoryPath << std::endl;
413 
414  // Get default end index
415  if (begIndex < 1 || endIndex > imageFilenames.size())
416  {
417  throw armnn::Exception("Invalid image index range");
418  }
419  endIndex = endIndex == 0 ? imageFilenames.size() : endIndex;
420  if (begIndex > endIndex)
421  {
422  throw armnn::Exception("Invalid image index range");
423  }
424 
425  // Load blacklist if there is one
426  std::vector<unsigned int> blacklist;
427  if (!blacklistPath.empty())
428  {
429  std::ifstream blacklistFile(blacklistPath);
430  unsigned int index;
431  while (blacklistFile >> index)
432  {
433  blacklist.push_back(index);
434  }
435  }
436 
437  // Load ground truth labels and pair them with corresponding image names
438  std::string classification;
439  map<std::string, std::string> imageNameToLabel;
440  ifstream infile(validationLabelPath);
441  size_t imageIndex = begIndex;
442  size_t blacklistIndexCount = 0;
443  while (std::getline(infile, classification))
444  {
445  if (imageIndex > endIndex)
446  {
447  break;
448  }
449  // If current imageIndex is included in blacklist, skip the current image
450  if (blacklistIndexCount < blacklist.size() && imageIndex == blacklist[blacklistIndexCount])
451  {
452  ++imageIndex;
453  ++blacklistIndexCount;
454  continue;
455  }
456  imageNameToLabel.insert(std::pair<std::string, std::string>(imageFilenames[imageIndex - 1], classification));
457  ++imageIndex;
458  }
459  std::cout << blacklistIndexCount << " images blacklisted" << std::endl;
460  std::cout << imageIndex - begIndex - blacklistIndexCount << " images to be loaded" << std::endl;
461  return imageNameToLabel;
462 }
463 
464 std::vector<armnnUtils::LabelCategoryNames> LoadModelOutputLabels(const std::string& modelOutputLabelsPath)
465 {
466  std::vector<armnnUtils::LabelCategoryNames> modelOutputLabels;
467  ifstream modelOutputLablesFile(modelOutputLabelsPath);
468  std::string line;
469  while (std::getline(modelOutputLablesFile, line))
470  {
472  armnnUtils::LabelCategoryNames predictionCategoryNames = armnnUtils::SplitBy(tokens.back(), ",");
473  std::transform(predictionCategoryNames.begin(), predictionCategoryNames.end(), predictionCategoryNames.begin(),
474  [](const std::string& category) { return armnnUtils::Strip(category); });
475  modelOutputLabels.push_back(predictionCategoryNames);
476  }
477  return modelOutputLabels;
478 }
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:32
DataLayout
Definition: Types.hpp:49
const TensorShape & GetShape() const
Definition: Tensor.hpp:187
CPU Execution: Reference C++ kernels.
void ConfigureLogging(bool printToStandardOutput, bool printToDebugOutput, LogSeverity severity)
Configures the logging behaviour of the ARMNN library.
Definition: Utils.cpp:10
NormalizationParameters GetNormalizationParameters(const SupportedFrontend &modelFormat, const armnn::DataType &outputType)
Get normalization parameters.
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:25
virtual const char * what() const noexcept override
Definition: Exceptions.cpp:32
#define ARMNN_LOG(severity)
Definition: Logging.hpp:163
BackendRegistry & BackendRegistryInstance()
const armnn::BindingPointInfo & GetInputBindingInfo(unsigned int inputIndex=0u) const
std::vector< uint8_t > PrepareImageTensor< uint8_t >(const std::string &imagePath, unsigned int newWidth, unsigned int newHeight, const NormalizationParameters &normParams, unsigned int batchSize, const armnn::DataLayout &outputLayout)
armnn::BindingPointInfo BindingPointInfo
int NetworkId
Definition: IRuntime.hpp:20
std::string GetBackendIdsAsString() const
map< std::string, std::string > LoadValidationImageFilenamesAndLabels(const string &validationLabelPath, const string &imageDirectoryPath, size_t begIndex=0, size_t endIndex=0, const string &blacklistPath="")
Load image names and ground-truth labels from the image directory and the ground truth label file...
unsigned int GetOutputSize(unsigned int outputIndex=0u) const
boost::variant< std::vector< float >, std::vector< int >, std::vector< unsigned char > > TContainer
std::vector< std::string > m_InputBindings
std::string Strip(const std::string &originalString, const std::string &characterSet)
Remove any preceding and trailing character specified in the characterSet.
DataType
Definition: Types.hpp:32
armnn::InputTensors MakeInputTensors(const std::vector< armnn::BindingPointInfo > &inputBindings, const std::vector< TContainer > &inputDataContainers)
std::vector< std::string > SplitBy(const std::string &originalString, const std::string &delimiter, bool includeEmptyToken)
Split a string into tokens by a delimiter.
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:1014
#define ARMNN_ASSERT_MSG(COND, MSG)
Definition: Assert.hpp:15
std::vector< std::string > m_OutputBindings
std::vector< armnn::BackendId > m_ComputeDevices
DataType GetDataType() const
Definition: Tensor.hpp:194
Status
enumeration
Definition: Types.hpp:26
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:593
std::vector< int > PrepareImageTensor< int >(const std::string &imagePath, unsigned int newWidth, unsigned int newHeight, const NormalizationParameters &normParams, unsigned int batchSize, const armnn::DataLayout &outputLayout)
armnn::OutputTensors MakeOutputTensors(const std::vector< armnn::BindingPointInfo > &outputBindings, std::vector< TContainer > &outputDataContainers)
int main(int argc, char *argv[])
std::pair< armnn::LayerBindingId, armnn::TensorInfo > BindingPointInfo
Definition: Tensor.hpp:245
std::vector< armnn::BackendId > defaultBackends
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46
CPU Execution: NEON: ArmCompute.
bool ValidateDirectory(std::string &dir)
std::vector< armnnUtils::LabelCategoryNames > LoadModelOutputLabels(const std::string &modelOutputLabelsPath)
Load model output labels from file.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:101
LogSeverity
Definition: Utils.hpp:12
std::vector< std::string > LabelCategoryNames
std::vector< float > PrepareImageTensor< float >(const std::string &imagePath, unsigned int newWidth, unsigned int newHeight, const NormalizationParameters &normParams, unsigned int batchSize, const armnn::DataLayout &outputLayout)