ArmNN
 20.05
ModelAccuracyTool-Armnn.cpp File Reference
#include "../ImageTensorGenerator/ImageTensorGenerator.hpp"
#include "../InferenceTest.hpp"
#include "ModelAccuracyChecker.hpp"
#include "armnnDeserializer/IDeserializer.hpp"
#include <boost/filesystem.hpp>
#include <boost/program_options/variables_map.hpp>
#include <boost/range/iterator_range.hpp>
#include <map>

Go to the source code of this file.

Functions

map< std::string, std::string > LoadValidationImageFilenamesAndLabels (const string &validationLabelPath, const string &imageDirectoryPath, size_t begIndex=0, size_t endIndex=0, const string &blacklistPath="")
 Load image names and ground-truth labels from the image directory and the ground truth label file. More...
 
std::vector< armnnUtils::LabelCategoryNamesLoadModelOutputLabels (const std::string &modelOutputLabelsPath)
 Load model output labels from file. More...
 
int main (int argc, char *argv[])
 

Function Documentation

◆ LoadModelOutputLabels()

std::vector< armnnUtils::LabelCategoryNames > LoadModelOutputLabels ( const std::string &  modelOutputLabelsPath)

Load model output labels from file.

Precondition
modelOutputLabelsPath exists and is a regular file
Parameters
[in]modelOutputLabelsPathpath to model output labels file
Returns
A vector of labels, which in turn is described by a list of category names

Definition at line 465 of file ModelAccuracyTool-Armnn.cpp.

References armnnUtils::SplitBy(), and armnnUtils::Strip().

Referenced by main().

466 {
467  std::vector<armnnUtils::LabelCategoryNames> modelOutputLabels;
468  ifstream modelOutputLablesFile(modelOutputLabelsPath);
469  std::string line;
470  while (std::getline(modelOutputLablesFile, line))
471  {
473  armnnUtils::LabelCategoryNames predictionCategoryNames = armnnUtils::SplitBy(tokens.back(), ",");
474  std::transform(predictionCategoryNames.begin(), predictionCategoryNames.end(), predictionCategoryNames.begin(),
475  [](const std::string& category) { return armnnUtils::Strip(category); });
476  modelOutputLabels.push_back(predictionCategoryNames);
477  }
478  return modelOutputLabels;
479 }
std::string Strip(const std::string &originalString, const std::string &characterSet)
Remove any preceding and trailing character specified in the characterSet.
std::vector< std::string > SplitBy(const std::string &originalString, const std::string &delimiter, bool includeEmptyToken)
Split a string into tokens by a delimiter.
std::vector< std::string > LabelCategoryNames

◆ LoadValidationImageFilenamesAndLabels()

map< std::string, std::string > LoadValidationImageFilenamesAndLabels ( const string &  validationLabelPath,
const string &  imageDirectoryPath,
size_t  begIndex = 0,
size_t  endIndex = 0,
const string &  blacklistPath = "" 
)

Load image names and ground-truth labels from the image directory and the ground truth label file.

Precondition
validationLabelPath exists and is valid regular file
imageDirectoryPath exists and is valid directory
labels in validation file correspond to images which are in lexicographical order with the image name
image index starts at 1
begIndex and endIndex are end-inclusive
Parameters
[in]validationLabelPathPath to validation label file
[in]imageDirectoryPathPath to directory containing validation images
[in]begIndexBegin index of images to be loaded. Inclusive
[in]endIndexEnd index of images to be loaded. Inclusive
[in]blacklistPathPath to blacklist file
Returns
A map mapping image file names to their corresponding ground-truth labels

Definition at line 387 of file ModelAccuracyTool-Armnn.cpp.

Referenced by main().

392 {
393  // Populate imageFilenames with names of all .JPEG, .PNG images
394  std::vector<std::string> imageFilenames;
395  for (const auto& imageEntry :
396  boost::make_iterator_range(boost::filesystem::directory_iterator(boost::filesystem::path(imageDirectoryPath))))
397  {
398  boost::filesystem::path imagePath = imageEntry.path();
399  std::string imageExtension = boost::to_upper_copy<std::string>(imagePath.extension().string());
400  if (boost::filesystem::is_regular_file(imagePath) && (imageExtension == ".JPEG" || imageExtension == ".PNG"))
401  {
402  imageFilenames.push_back(imagePath.filename().string());
403  }
404  }
405  if (imageFilenames.empty())
406  {
407  throw armnn::Exception("No image file (JPEG, PNG) found at " + imageDirectoryPath);
408  }
409 
410  // Sort the image filenames lexicographically
411  std::sort(imageFilenames.begin(), imageFilenames.end());
412 
413  std::cout << imageFilenames.size() << " images found at " << imageDirectoryPath << std::endl;
414 
415  // Get default end index
416  if (begIndex < 1 || endIndex > imageFilenames.size())
417  {
418  throw armnn::Exception("Invalid image index range");
419  }
420  endIndex = endIndex == 0 ? imageFilenames.size() : endIndex;
421  if (begIndex > endIndex)
422  {
423  throw armnn::Exception("Invalid image index range");
424  }
425 
426  // Load blacklist if there is one
427  std::vector<unsigned int> blacklist;
428  if (!blacklistPath.empty())
429  {
430  std::ifstream blacklistFile(blacklistPath);
431  unsigned int index;
432  while (blacklistFile >> index)
433  {
434  blacklist.push_back(index);
435  }
436  }
437 
438  // Load ground truth labels and pair them with corresponding image names
439  std::string classification;
440  map<std::string, std::string> imageNameToLabel;
441  ifstream infile(validationLabelPath);
442  size_t imageIndex = begIndex;
443  size_t blacklistIndexCount = 0;
444  while (std::getline(infile, classification))
445  {
446  if (imageIndex > endIndex)
447  {
448  break;
449  }
450  // If current imageIndex is included in blacklist, skip the current image
451  if (blacklistIndexCount < blacklist.size() && imageIndex == blacklist[blacklistIndexCount])
452  {
453  ++imageIndex;
454  ++blacklistIndexCount;
455  continue;
456  }
457  imageNameToLabel.insert(std::pair<std::string, std::string>(imageFilenames[imageIndex - 1], classification));
458  ++imageIndex;
459  }
460  std::cout << blacklistIndexCount << " images blacklisted" << std::endl;
461  std::cout << imageIndex - begIndex - blacklistIndexCount << " images to be loaded" << std::endl;
462  return imageNameToLabel;
463 }
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46

◆ main()

int main ( int  argc,
char *  argv[] 
)

Definition at line 48 of file ModelAccuracyTool-Armnn.cpp.

References ARMNN_ASSERT_MSG, ARMNN_LOG, armnn::BackendRegistryInstance(), Caffe, armnn::ConfigureLogging(), armnn::CpuAcc, armnn::CpuRef, IRuntime::Create(), armnn::Debug, defaultBackends, armnn::error, armnn::Failure, armnn::Float32, BackendRegistry::GetBackendIdsAsString(), TensorInfo::GetDataType(), InferenceModel< IParser, TDataType >::GetInputBindingInfo(), GetNormalizationParameters(), InferenceModel< IParser, TDataType >::GetOutputSize(), TensorInfo::GetShape(), LoadModelOutputLabels(), LoadValidationImageFilenamesAndLabels(), BindingPointInfo::m_BindingId, Params::m_ComputeDevices, Params::m_InputBindings, Params::m_IsModelBinary, Params::m_ModelPath, Params::m_OutputBindings, BindingPointInfo::m_TensorInfo, armnnUtils::MakeInputTensors(), armnnUtils::MakeOutputTensors(), armnn::NCHW, armnn::NHWC, armnn::Optimize(), options, PrepareImageTensor< float >(), PrepareImageTensor< int >(), PrepareImageTensor< uint8_t >(), armnn::QAsymmU8, armnn::Signed32, armnnUtils::SplitBy(), TensorFlow, TFLite, armnn::test::ValidateDirectory(), and Exception::what().

49 {
50  try
51  {
52  using namespace boost::filesystem;
54  armnn::ConfigureLogging(true, true, level);
55 
56  // Set-up program Options
57  namespace po = boost::program_options;
58 
59  std::vector<armnn::BackendId> computeDevice;
60  std::vector<armnn::BackendId> defaultBackends = {armnn::Compute::CpuAcc, armnn::Compute::CpuRef};
61  std::string modelPath;
62  std::string modelFormat;
63  std::string dataDir;
64  std::string inputName;
65  std::string inputLayout;
66  std::string outputName;
67  std::string modelOutputLabelsPath;
68  std::string validationLabelPath;
69  std::string validationRange;
70  std::string blacklistPath;
71 
72  const std::string backendsMessage = "Which device to run layers on by default. Possible choices: "
74 
75  po::options_description desc("Options");
76  try
77  {
78  // Adds generic options needed to run Accuracy Tool.
79  desc.add_options()
80  ("help,h", "Display help messages")
81  ("model-path,m", po::value<std::string>(&modelPath)->required(), "Path to armnn format model file")
82  ("model-format,f", po::value<std::string>(&modelFormat)->required(),
83  "The model format. Supported values: caffe, tensorflow, tflite")
84  ("input-name,i", po::value<std::string>(&inputName)->required(),
85  "Identifier of the input tensors in the network separated by comma.")
86  ("output-name,o", po::value<std::string>(&outputName)->required(),
87  "Identifier of the output tensors in the network separated by comma.")
88  ("data-dir,d", po::value<std::string>(&dataDir)->required(),
89  "Path to directory containing the ImageNet test data")
90  ("model-output-labels,p", po::value<std::string>(&modelOutputLabelsPath)->required(),
91  "Path to model output labels file.")
92  ("validation-labels-path,v", po::value<std::string>(&validationLabelPath)->required(),
93  "Path to ImageNet Validation Label file")
94  ("data-layout,l", po::value<std::string>(&inputLayout)->default_value("NHWC"),
95  "Data layout. Supported value: NHWC, NCHW. Default: NHWC")
96  ("compute,c", po::value<std::vector<armnn::BackendId>>(&computeDevice)->default_value(defaultBackends),
97  backendsMessage.c_str())
98  ("validation-range,r", po::value<std::string>(&validationRange)->default_value("1:0"),
99  "The range of the images to be evaluated. Specified in the form <begin index>:<end index>."
100  "The index starts at 1 and the range is inclusive."
101  "By default the evaluation will be performed on all images.")
102  ("blacklist-path,b", po::value<std::string>(&blacklistPath)->default_value(""),
103  "Path to a blacklist file where each line denotes the index of an image to be "
104  "excluded from evaluation.");
105  }
106  catch (const std::exception& e)
107  {
108  // Coverity points out that default_value(...) can throw a bad_lexical_cast,
109  // and that desc.add_options() can throw boost::io::too_few_args.
110  // They really won't in any of these cases.
111  ARMNN_ASSERT_MSG(false, "Caught unexpected exception");
112  std::cerr << "Fatal internal error: " << e.what() << std::endl;
113  return 1;
114  }
115 
116  po::variables_map vm;
117  try
118  {
119  po::store(po::parse_command_line(argc, argv, desc), vm);
120 
121  if (vm.count("help"))
122  {
123  std::cout << desc << std::endl;
124  return 1;
125  }
126  po::notify(vm);
127  }
128  catch (po::error& e)
129  {
130  std::cerr << e.what() << std::endl << std::endl;
131  std::cerr << desc << std::endl;
132  return 1;
133  }
134 
135  // Check if the requested backend are all valid
136  std::string invalidBackends;
137  if (!CheckRequestedBackendsAreValid(computeDevice, armnn::Optional<std::string&>(invalidBackends)))
138  {
139  ARMNN_LOG(fatal) << "The list of preferred devices contains invalid backend IDs: "
140  << invalidBackends;
141  return EXIT_FAILURE;
142  }
143  armnn::Status status;
144 
145  // Create runtime
148  std::ifstream file(modelPath);
149 
150  // Create Parser
151  using IParser = armnnDeserializer::IDeserializer;
152  auto armnnparser(IParser::Create());
153 
154  // Create a network
155  armnn::INetworkPtr network = armnnparser->CreateNetworkFromBinary(file);
156 
157  // Optimizes the network.
158  armnn::IOptimizedNetworkPtr optimizedNet(nullptr, nullptr);
159  try
160  {
161  optimizedNet = armnn::Optimize(*network, computeDevice, runtime->GetDeviceSpec());
162  }
163  catch (armnn::Exception& e)
164  {
165  std::stringstream message;
166  message << "armnn::Exception (" << e.what() << ") caught from optimize.";
167  ARMNN_LOG(fatal) << message.str();
168  return 1;
169  }
170 
171  // Loads the network into the runtime.
172  armnn::NetworkId networkId;
173  status = runtime->LoadNetwork(networkId, std::move(optimizedNet));
174  if (status == armnn::Status::Failure)
175  {
176  ARMNN_LOG(fatal) << "armnn::IRuntime: Failed to load network";
177  return 1;
178  }
179 
180  // Set up Network
182 
184  inputBindingInfo = armnnparser->GetNetworkInputBindingInfo(0, inputName);
185 
186  std::pair<armnn::LayerBindingId, armnn::TensorInfo>
187  m_InputBindingInfo(inputBindingInfo.m_BindingId, inputBindingInfo.m_TensorInfo);
188  std::vector<BindingPointInfo> inputBindings = { m_InputBindingInfo };
189 
191  outputBindingInfo = armnnparser->GetNetworkOutputBindingInfo(0, outputName);
192 
193  std::pair<armnn::LayerBindingId, armnn::TensorInfo>
194  m_OutputBindingInfo(outputBindingInfo.m_BindingId, outputBindingInfo.m_TensorInfo);
195  std::vector<BindingPointInfo> outputBindings = { m_OutputBindingInfo };
196 
197  // Load model output labels
198  if (modelOutputLabelsPath.empty() || !boost::filesystem::exists(modelOutputLabelsPath) ||
199  !boost::filesystem::is_regular_file(modelOutputLabelsPath))
200  {
201  ARMNN_LOG(fatal) << "Invalid model output labels path at " << modelOutputLabelsPath;
202  }
203  const std::vector<armnnUtils::LabelCategoryNames> modelOutputLabels =
204  LoadModelOutputLabels(modelOutputLabelsPath);
205 
206  // Parse begin and end image indices
207  std::vector<std::string> imageIndexStrs = armnnUtils::SplitBy(validationRange, ":");
208  size_t imageBegIndex;
209  size_t imageEndIndex;
210  if (imageIndexStrs.size() != 2)
211  {
212  ARMNN_LOG(fatal) << "Invalid validation range specification: Invalid format " << validationRange;
213  return 1;
214  }
215  try
216  {
217  imageBegIndex = std::stoul(imageIndexStrs[0]);
218  imageEndIndex = std::stoul(imageIndexStrs[1]);
219  }
220  catch (const std::exception& e)
221  {
222  ARMNN_LOG(fatal) << "Invalid validation range specification: " << validationRange;
223  return 1;
224  }
225 
226  // Validate blacklist file if it's specified
227  if (!blacklistPath.empty() &&
228  !(boost::filesystem::exists(blacklistPath) && boost::filesystem::is_regular_file(blacklistPath)))
229  {
230  ARMNN_LOG(fatal) << "Invalid path to blacklist file at " << blacklistPath;
231  return 1;
232  }
233 
234  path pathToDataDir(dataDir);
235  const map<std::string, std::string> imageNameToLabel = LoadValidationImageFilenamesAndLabels(
236  validationLabelPath, pathToDataDir.string(), imageBegIndex, imageEndIndex, blacklistPath);
237  armnnUtils::ModelAccuracyChecker checker(imageNameToLabel, modelOutputLabels);
238  using TContainer = boost::variant<std::vector<float>, std::vector<int>, std::vector<uint8_t>>;
239 
240  if (ValidateDirectory(dataDir))
241  {
243  params.m_ModelPath = modelPath;
244  params.m_IsModelBinary = true;
245  params.m_ComputeDevices = computeDevice;
246  params.m_InputBindings.push_back(inputName);
247  params.m_OutputBindings.push_back(outputName);
248 
249  using TParser = armnnDeserializer::IDeserializer;
250  InferenceModel<TParser, float> model(params, false);
251  // Get input tensor information
252  const armnn::TensorInfo& inputTensorInfo = model.GetInputBindingInfo().second;
253  const armnn::TensorShape& inputTensorShape = inputTensorInfo.GetShape();
254  const armnn::DataType& inputTensorDataType = inputTensorInfo.GetDataType();
255  armnn::DataLayout inputTensorDataLayout;
256  if (inputLayout == "NCHW")
257  {
258  inputTensorDataLayout = armnn::DataLayout::NCHW;
259  }
260  else if (inputLayout == "NHWC")
261  {
262  inputTensorDataLayout = armnn::DataLayout::NHWC;
263  }
264  else
265  {
266  ARMNN_LOG(fatal) << "Invalid Data layout: " << inputLayout;
267  return 1;
268  }
269  const unsigned int inputTensorWidth =
270  inputTensorDataLayout == armnn::DataLayout::NCHW ? inputTensorShape[3] : inputTensorShape[2];
271  const unsigned int inputTensorHeight =
272  inputTensorDataLayout == armnn::DataLayout::NCHW ? inputTensorShape[2] : inputTensorShape[1];
273  // Get output tensor info
274  const unsigned int outputNumElements = model.GetOutputSize();
275  // Check output tensor shape is valid
276  if (modelOutputLabels.size() != outputNumElements)
277  {
278  ARMNN_LOG(fatal) << "Number of output elements: " << outputNumElements
279  << " , mismatches the number of output labels: " << modelOutputLabels.size();
280  return 1;
281  }
282 
283  const unsigned int batchSize = 1;
284  // Get normalisation parameters
285  SupportedFrontend modelFrontend;
286  if (modelFormat == "caffe")
287  {
288  modelFrontend = SupportedFrontend::Caffe;
289  }
290  else if (modelFormat == "tensorflow")
291  {
292  modelFrontend = SupportedFrontend::TensorFlow;
293  }
294  else if (modelFormat == "tflite")
295  {
296  modelFrontend = SupportedFrontend::TFLite;
297  }
298  else
299  {
300  ARMNN_LOG(fatal) << "Unsupported frontend: " << modelFormat;
301  return 1;
302  }
303  const NormalizationParameters& normParams = GetNormalizationParameters(modelFrontend, inputTensorDataType);
304  for (const auto& imageEntry : imageNameToLabel)
305  {
306  const std::string imageName = imageEntry.first;
307  std::cout << "Processing image: " << imageName << "\n";
308 
309  vector<TContainer> inputDataContainers;
310  vector<TContainer> outputDataContainers;
311 
312  auto imagePath = pathToDataDir / boost::filesystem::path(imageName);
313  switch (inputTensorDataType)
314  {
316  inputDataContainers.push_back(
317  PrepareImageTensor<int>(imagePath.string(),
318  inputTensorWidth, inputTensorHeight,
319  normParams,
320  batchSize,
321  inputTensorDataLayout));
322  outputDataContainers = { vector<int>(outputNumElements) };
323  break;
325  inputDataContainers.push_back(
326  PrepareImageTensor<uint8_t>(imagePath.string(),
327  inputTensorWidth, inputTensorHeight,
328  normParams,
329  batchSize,
330  inputTensorDataLayout));
331  outputDataContainers = { vector<uint8_t>(outputNumElements) };
332  break;
334  default:
335  inputDataContainers.push_back(
336  PrepareImageTensor<float>(imagePath.string(),
337  inputTensorWidth, inputTensorHeight,
338  normParams,
339  batchSize,
340  inputTensorDataLayout));
341  outputDataContainers = { vector<float>(outputNumElements) };
342  break;
343  }
344 
345  status = runtime->EnqueueWorkload(networkId,
346  armnnUtils::MakeInputTensors(inputBindings, inputDataContainers),
347  armnnUtils::MakeOutputTensors(outputBindings, outputDataContainers));
348 
349  if (status == armnn::Status::Failure)
350  {
351  ARMNN_LOG(fatal) << "armnn::IRuntime: Failed to enqueue workload for image: " << imageName;
352  }
353 
354  checker.AddImageResult<TContainer>(imageName, outputDataContainers);
355  }
356  }
357  else
358  {
359  return 1;
360  }
361 
362  for(unsigned int i = 1; i <= 5; ++i)
363  {
364  std::cout << "Top " << i << " Accuracy: " << checker.GetAccuracy(i) << "%" << "\n";
365  }
366 
367  ARMNN_LOG(info) << "Accuracy Tool ran successfully!";
368  return 0;
369  }
370  catch (armnn::Exception const & e)
371  {
372  // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
373  // exception of type std::length_error.
374  // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
375  std::cerr << "Armnn Error: " << e.what() << std::endl;
376  return 1;
377  }
378  catch (const std::exception & e)
379  {
380  // Coverity fix: various boost exceptions can be thrown by methods called by this test.
381  std::cerr << "WARNING: ModelAccuracyTool-Armnn: An error has occurred when running the "
382  "Accuracy Tool: " << e.what() << std::endl;
383  return 1;
384  }
385 }
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:31
DataLayout
Definition: Types.hpp:49
const TensorShape & GetShape() const
Definition: Tensor.hpp:88
CPU Execution: Reference C++ kernels.
void ConfigureLogging(bool printToStandardOutput, bool printToDebugOutput, LogSeverity severity)
Configures the logging behaviour of the ARMNN library.
Definition: Utils.cpp:10
NormalizationParameters GetNormalizationParameters(const SupportedFrontend &modelFormat, const armnn::DataType &outputType)
Get normalization parameters.
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:25
virtual const char * what() const noexcept override
Definition: Exceptions.cpp:32
#define ARMNN_LOG(severity)
Definition: Logging.hpp:163
BackendRegistry & BackendRegistryInstance()
std::vector< uint8_t > PrepareImageTensor< uint8_t >(const std::string &imagePath, unsigned int newWidth, unsigned int newHeight, const NormalizationParameters &normParams, unsigned int batchSize, const armnn::DataLayout &outputLayout)
armnn::BindingPointInfo BindingPointInfo
int NetworkId
Definition: IRuntime.hpp:20
std::string GetBackendIdsAsString() const
map< std::string, std::string > LoadValidationImageFilenamesAndLabels(const string &validationLabelPath, const string &imageDirectoryPath, size_t begIndex=0, size_t endIndex=0, const string &blacklistPath="")
Load image names and ground-truth labels from the image directory and the ground truth label file...
boost::variant< std::vector< float >, std::vector< int >, std::vector< unsigned char > > TContainer
std::vector< std::string > m_InputBindings
DataType
Definition: Types.hpp:32
armnn::InputTensors MakeInputTensors(const std::vector< armnn::BindingPointInfo > &inputBindings, const std::vector< TContainer > &inputDataContainers)
std::vector< std::string > SplitBy(const std::string &originalString, const std::string &delimiter, bool includeEmptyToken)
Split a string into tokens by a delimiter.
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:1003
#define ARMNN_ASSERT_MSG(COND, MSG)
Definition: Assert.hpp:15
std::vector< std::string > m_OutputBindings
std::vector< armnn::BackendId > m_ComputeDevices
DataType GetDataType() const
Definition: Tensor.hpp:95
Status
enumeration
Definition: Types.hpp:26
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:573
std::vector< int > PrepareImageTensor< int >(const std::string &imagePath, unsigned int newWidth, unsigned int newHeight, const NormalizationParameters &normParams, unsigned int batchSize, const armnn::DataLayout &outputLayout)
armnn::OutputTensors MakeOutputTensors(const std::vector< armnn::BindingPointInfo > &outputBindings, std::vector< TContainer > &outputDataContainers)
std::pair< armnn::LayerBindingId, armnn::TensorInfo > BindingPointInfo
Definition: Tensor.hpp:146
std::vector< armnn::BackendId > defaultBackends
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46
CPU Execution: NEON: ArmCompute.
bool ValidateDirectory(std::string &dir)
std::vector< armnnUtils::LabelCategoryNames > LoadModelOutputLabels(const std::string &modelOutputLabelsPath)
Load model output labels from file.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:101
armnn::Runtime::CreationOptions::ExternalProfilingOptions options
LogSeverity
Definition: Utils.hpp:12
std::vector< float > PrepareImageTensor< float >(const std::string &imagePath, unsigned int newWidth, unsigned int newHeight, const NormalizationParameters &normParams, unsigned int batchSize, const armnn::DataLayout &outputLayout)