ArmNN
 21.08
ModelAccuracyTool-Armnn.cpp File Reference
#include "../ImageTensorGenerator/ImageTensorGenerator.hpp"
#include "../InferenceTest.hpp"
#include "ModelAccuracyChecker.hpp"
#include "armnnDeserializer/IDeserializer.hpp"
#include <armnnUtils/Filesystem.hpp>
#include <cxxopts/cxxopts.hpp>
#include <map>

Go to the source code of this file.

Functions

map< std::string, std::string > LoadValidationImageFilenamesAndLabels (const string &validationLabelPath, const string &imageDirectoryPath, size_t begIndex=0, size_t endIndex=0, const string &blacklistPath="")
 Load image names and ground-truth labels from the image directory and the ground truth label file. More...
 
std::vector< armnnUtils::LabelCategoryNamesLoadModelOutputLabels (const std::string &modelOutputLabelsPath)
 Load model output labels from file. More...
 
int main (int argc, char *argv[])
 

Function Documentation

◆ LoadModelOutputLabels()

std::vector< armnnUtils::LabelCategoryNames > LoadModelOutputLabels ( const std::string &  modelOutputLabelsPath)

Load model output labels from file.

Precondition
modelOutputLabelsPath exists and is a regular file
Parameters
[in]modelOutputLabelsPathpath to model output labels file
Returns
A vector of labels, which in turn is described by a list of category names

Definition at line 486 of file ModelAccuracyTool-Armnn.cpp.

References armnnUtils::SplitBy(), and armnnUtils::Strip().

Referenced by main().

487 {
488  std::vector<armnnUtils::LabelCategoryNames> modelOutputLabels;
489  ifstream modelOutputLablesFile(modelOutputLabelsPath);
490  std::string line;
491  while (std::getline(modelOutputLablesFile, line))
492  {
494  armnnUtils::LabelCategoryNames predictionCategoryNames = armnnUtils::SplitBy(tokens.back(), ",");
495  std::transform(predictionCategoryNames.begin(), predictionCategoryNames.end(), predictionCategoryNames.begin(),
496  [](const std::string& category) { return armnnUtils::Strip(category); });
497  modelOutputLabels.push_back(predictionCategoryNames);
498  }
499  return modelOutputLabels;
500 }
std::string Strip(const std::string &originalString, const std::string &characterSet)
Remove any preceding and trailing character specified in the characterSet.
std::vector< std::string > SplitBy(const std::string &originalString, const std::string &delimiter, bool includeEmptyToken)
Split a string into tokens by a delimiter.
std::vector< std::string > LabelCategoryNames

◆ LoadValidationImageFilenamesAndLabels()

map< std::string, std::string > LoadValidationImageFilenamesAndLabels ( const string &  validationLabelPath,
const string &  imageDirectoryPath,
size_t  begIndex = 0,
size_t  endIndex = 0,
const string &  blacklistPath = "" 
)

Load image names and ground-truth labels from the image directory and the ground truth label file.

Precondition
validationLabelPath exists and is valid regular file
imageDirectoryPath exists and is valid directory
labels in validation file correspond to images which are in lexicographical order with the image name
image index starts at 1
begIndex and endIndex are end-inclusive
Parameters
[in]validationLabelPathPath to validation label file
[in]imageDirectoryPathPath to directory containing validation images
[in]begIndexBegin index of images to be loaded. Inclusive
[in]endIndexEnd index of images to be loaded. Inclusive
[in]blacklistPathPath to blacklist file
Returns
A map mapping image file names to their corresponding ground-truth labels

Definition at line 405 of file ModelAccuracyTool-Armnn.cpp.

Referenced by main().

410 {
411  // Populate imageFilenames with names of all .JPEG, .PNG images
412  std::vector<std::string> imageFilenames;
413  for (const auto& imageEntry : fs::directory_iterator(fs::path(imageDirectoryPath)))
414  {
415  fs::path imagePath = imageEntry.path();
416 
417  // Get extension and convert to uppercase
418  std::string imageExtension = imagePath.extension().string();
419  std::transform(imageExtension.begin(), imageExtension.end(), imageExtension.begin(), ::toupper);
420 
421  if (fs::is_regular_file(imagePath) && (imageExtension == ".JPEG" || imageExtension == ".PNG"))
422  {
423  imageFilenames.push_back(imagePath.filename().string());
424  }
425  }
426  if (imageFilenames.empty())
427  {
428  throw armnn::Exception("No image file (JPEG, PNG) found at " + imageDirectoryPath);
429  }
430 
431  // Sort the image filenames lexicographically
432  std::sort(imageFilenames.begin(), imageFilenames.end());
433 
434  std::cout << imageFilenames.size() << " images found at " << imageDirectoryPath << std::endl;
435 
436  // Get default end index
437  if (begIndex < 1 || endIndex > imageFilenames.size())
438  {
439  throw armnn::Exception("Invalid image index range");
440  }
441  endIndex = endIndex == 0 ? imageFilenames.size() : endIndex;
442  if (begIndex > endIndex)
443  {
444  throw armnn::Exception("Invalid image index range");
445  }
446 
447  // Load blacklist if there is one
448  std::vector<unsigned int> blacklist;
449  if (!blacklistPath.empty())
450  {
451  std::ifstream blacklistFile(blacklistPath);
452  unsigned int index;
453  while (blacklistFile >> index)
454  {
455  blacklist.push_back(index);
456  }
457  }
458 
459  // Load ground truth labels and pair them with corresponding image names
460  std::string classification;
461  map<std::string, std::string> imageNameToLabel;
462  ifstream infile(validationLabelPath);
463  size_t imageIndex = begIndex;
464  size_t blacklistIndexCount = 0;
465  while (std::getline(infile, classification))
466  {
467  if (imageIndex > endIndex)
468  {
469  break;
470  }
471  // If current imageIndex is included in blacklist, skip the current image
472  if (blacklistIndexCount < blacklist.size() && imageIndex == blacklist[blacklistIndexCount])
473  {
474  ++imageIndex;
475  ++blacklistIndexCount;
476  continue;
477  }
478  imageNameToLabel.insert(std::pair<std::string, std::string>(imageFilenames[imageIndex - 1], classification));
479  ++imageIndex;
480  }
481  std::cout << blacklistIndexCount << " images blacklisted" << std::endl;
482  std::cout << imageIndex - begIndex - blacklistIndexCount << " images to be loaded" << std::endl;
483  return imageNameToLabel;
484 }
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46

◆ main()

int main ( int  argc,
char *  argv[] 
)

Definition at line 47 of file ModelAccuracyTool-Armnn.cpp.

References ARMNN_ASSERT_MSG, ARMNN_LOG, armnn::BackendRegistryInstance(), armnn::ConfigureLogging(), IRuntime::Create(), armnn::Debug, armnn::Failure, armnn::Float32, BackendRegistry::GetBackendIdsAsString(), TensorInfo::GetDataType(), InferenceModel< IParser, TDataType >::GetInputBindingInfo(), GetNormalizationParameters(), InferenceModel< IParser, TDataType >::GetOutputSize(), TensorInfo::GetShape(), LoadModelOutputLabels(), LoadValidationImageFilenamesAndLabels(), BindingPointInfo::m_BindingId, Params::m_ComputeDevices, Params::m_InputBindings, Params::m_IsModelBinary, Params::m_ModelPath, Params::m_OutputBindings, BindingPointInfo::m_TensorInfo, armnnUtils::MakeInputTensors(), armnnUtils::MakeOutputTensors(), armnn::NCHW, armnn::NHWC, armnn::Optimize(), PrepareImageTensor< float >(), PrepareImageTensor< int >(), PrepareImageTensor< uint8_t >(), armnn::QAsymmU8, armnn::Signed32, armnnUtils::SplitBy(), TFLite, armnn::test::ValidateDirectory(), and Exception::what().

48 {
49  try
50  {
52  armnn::ConfigureLogging(true, true, level);
53 
54  std::string modelPath;
55  std::string modelFormat;
56  std::vector<std::string> inputNames;
57  std::vector<std::string> outputNames;
58  std::string dataDir;
59  std::string modelOutputLabelsPath;
60  std::string validationLabelPath;
61  std::string inputLayout;
62  std::vector<armnn::BackendId> computeDevice;
63  std::string validationRange;
64  std::string blacklistPath;
65 
66  const std::string backendsMessage = "Which device to run layers on by default. Possible choices: "
68 
69  try
70  {
71  cxxopts::Options options("ModeAccuracyTool-Armnn","Options");
72 
73  options.add_options()
74  ("h,help", "Display help messages")
75  ("m,model-path",
76  "Path to armnn format model file",
77  cxxopts::value<std::string>(modelPath))
78  ("f,model-format",
79  "The model format. Supported values: tflite",
80  cxxopts::value<std::string>(modelFormat))
81  ("i,input-name",
82  "Identifier of the input tensors in the network separated by comma with no space.",
83  cxxopts::value<std::vector<std::string>>(inputNames))
84  ("o,output-name",
85  "Identifier of the output tensors in the network separated by comma with no space.",
86  cxxopts::value<std::vector<std::string>>(outputNames))
87  ("d,data-dir",
88  "Path to directory containing the ImageNet test data",
89  cxxopts::value<std::string>(dataDir))
90  ("p,model-output-labels",
91  "Path to model output labels file.",
92  cxxopts::value<std::string>(modelOutputLabelsPath))
93  ("v,validation-labels-path",
94  "Path to ImageNet Validation Label file",
95  cxxopts::value<std::string>(validationLabelPath))
96  ("l,data-layout",
97  "Data layout. Supported value: NHWC, NCHW. Default: NHWC",
98  cxxopts::value<std::string>(inputLayout)->default_value("NHWC"))
99  ("c,compute",
100  backendsMessage.c_str(),
101  cxxopts::value<std::vector<armnn::BackendId>>(computeDevice)->default_value("CpuAcc,CpuRef"))
102  ("r,validation-range",
103  "The range of the images to be evaluated. Specified in the form <begin index>:<end index>."
104  "The index starts at 1 and the range is inclusive."
105  "By default the evaluation will be performed on all images.",
106  cxxopts::value<std::string>(validationRange)->default_value("1:0"))
107  ("b,blacklist-path",
108  "Path to a blacklist file where each line denotes the index of an image to be "
109  "excluded from evaluation.",
110  cxxopts::value<std::string>(blacklistPath)->default_value(""));
111 
112  auto result = options.parse(argc, argv);
113 
114  if (result.count("help") > 0)
115  {
116  std::cout << options.help() << std::endl;
117  return EXIT_FAILURE;
118  }
119 
120  // Check for mandatory single options.
121  std::string mandatorySingleParameters[] = { "model-path", "model-format", "input-name", "output-name",
122  "data-dir", "model-output-labels", "validation-labels-path" };
123  for (auto param : mandatorySingleParameters)
124  {
125  if (result.count(param) != 1)
126  {
127  std::cerr << "Parameter \'--" << param << "\' is required but missing." << std::endl;
128  return EXIT_FAILURE;
129  }
130  }
131  }
132  catch (const cxxopts::OptionException& e)
133  {
134  std::cerr << e.what() << std::endl << std::endl;
135  return EXIT_FAILURE;
136  }
137  catch (const std::exception& e)
138  {
139  ARMNN_ASSERT_MSG(false, "Caught unexpected exception");
140  std::cerr << "Fatal internal error: " << e.what() << std::endl;
141  return EXIT_FAILURE;
142  }
143 
144  // Check if the requested backend are all valid
145  std::string invalidBackends;
146  if (!CheckRequestedBackendsAreValid(computeDevice, armnn::Optional<std::string&>(invalidBackends)))
147  {
148  ARMNN_LOG(fatal) << "The list of preferred devices contains invalid backend IDs: "
149  << invalidBackends;
150  return EXIT_FAILURE;
151  }
152  armnn::Status status;
153 
154  // Create runtime
157  std::ifstream file(modelPath);
158 
159  // Create Parser
160  using IParser = armnnDeserializer::IDeserializer;
161  auto armnnparser(IParser::Create());
162 
163  // Create a network
164  armnn::INetworkPtr network = armnnparser->CreateNetworkFromBinary(file);
165 
166  // Optimizes the network.
167  armnn::IOptimizedNetworkPtr optimizedNet(nullptr, nullptr);
168  try
169  {
170  optimizedNet = armnn::Optimize(*network, computeDevice, runtime->GetDeviceSpec());
171  }
172  catch (const armnn::Exception& e)
173  {
174  std::stringstream message;
175  message << "armnn::Exception (" << e.what() << ") caught from optimize.";
176  ARMNN_LOG(fatal) << message.str();
177  return EXIT_FAILURE;
178  }
179 
180  // Loads the network into the runtime.
181  armnn::NetworkId networkId;
182  status = runtime->LoadNetwork(networkId, std::move(optimizedNet));
183  if (status == armnn::Status::Failure)
184  {
185  ARMNN_LOG(fatal) << "armnn::IRuntime: Failed to load network";
186  return EXIT_FAILURE;
187  }
188 
189  // Set up Network
191 
192  // Handle inputNames and outputNames, there can be multiple.
193  std::vector<BindingPointInfo> inputBindings;
194  for(auto& input: inputNames)
195  {
197  inputBindingInfo = armnnparser->GetNetworkInputBindingInfo(0, input);
198 
199  std::pair<armnn::LayerBindingId, armnn::TensorInfo>
200  m_InputBindingInfo(inputBindingInfo.m_BindingId, inputBindingInfo.m_TensorInfo);
201  inputBindings.push_back(m_InputBindingInfo);
202  }
203 
204  std::vector<BindingPointInfo> outputBindings;
205  for(auto& output: outputNames)
206  {
208  outputBindingInfo = armnnparser->GetNetworkOutputBindingInfo(0, output);
209 
210  std::pair<armnn::LayerBindingId, armnn::TensorInfo>
211  m_OutputBindingInfo(outputBindingInfo.m_BindingId, outputBindingInfo.m_TensorInfo);
212  outputBindings.push_back(m_OutputBindingInfo);
213  }
214 
215  // Load model output labels
216  if (modelOutputLabelsPath.empty() || !fs::exists(modelOutputLabelsPath) ||
217  !fs::is_regular_file(modelOutputLabelsPath))
218  {
219  ARMNN_LOG(fatal) << "Invalid model output labels path at " << modelOutputLabelsPath;
220  }
221  const std::vector<armnnUtils::LabelCategoryNames> modelOutputLabels =
222  LoadModelOutputLabels(modelOutputLabelsPath);
223 
224  // Parse begin and end image indices
225  std::vector<std::string> imageIndexStrs = armnnUtils::SplitBy(validationRange, ":");
226  size_t imageBegIndex;
227  size_t imageEndIndex;
228  if (imageIndexStrs.size() != 2)
229  {
230  ARMNN_LOG(fatal) << "Invalid validation range specification: Invalid format " << validationRange;
231  return EXIT_FAILURE;
232  }
233  try
234  {
235  imageBegIndex = std::stoul(imageIndexStrs[0]);
236  imageEndIndex = std::stoul(imageIndexStrs[1]);
237  }
238  catch (const std::exception& e)
239  {
240  ARMNN_LOG(fatal) << "Invalid validation range specification: " << validationRange;
241  return EXIT_FAILURE;
242  }
243 
244  // Validate blacklist file if it's specified
245  if (!blacklistPath.empty() &&
246  !(fs::exists(blacklistPath) && fs::is_regular_file(blacklistPath)))
247  {
248  ARMNN_LOG(fatal) << "Invalid path to blacklist file at " << blacklistPath;
249  return EXIT_FAILURE;
250  }
251 
252  fs::path pathToDataDir(dataDir);
253  const map<std::string, std::string> imageNameToLabel = LoadValidationImageFilenamesAndLabels(
254  validationLabelPath, pathToDataDir.string(), imageBegIndex, imageEndIndex, blacklistPath);
255  armnnUtils::ModelAccuracyChecker checker(imageNameToLabel, modelOutputLabels);
256  using TContainer = mapbox::util::variant<std::vector<float>, std::vector<int>, std::vector<uint8_t>>;
257 
258  if (ValidateDirectory(dataDir))
259  {
261 
262  params.m_ModelPath = modelPath;
263  params.m_IsModelBinary = true;
264  params.m_ComputeDevices = computeDevice;
265  // Insert inputNames and outputNames into params vector
266  params.m_InputBindings.insert(std::end(params.m_InputBindings),
267  std::begin(inputNames),
268  std::end(inputNames));
269  params.m_OutputBindings.insert(std::end(params.m_OutputBindings),
270  std::begin(outputNames),
271  std::end(outputNames));
272 
273  using TParser = armnnDeserializer::IDeserializer;
274  // If dynamicBackends is empty it will be disabled by default.
275  InferenceModel<TParser, float> model(params, false, "");
276 
277  // Get input tensor information
278  const armnn::TensorInfo& inputTensorInfo = model.GetInputBindingInfo().second;
279  const armnn::TensorShape& inputTensorShape = inputTensorInfo.GetShape();
280  const armnn::DataType& inputTensorDataType = inputTensorInfo.GetDataType();
281  armnn::DataLayout inputTensorDataLayout;
282  if (inputLayout == "NCHW")
283  {
284  inputTensorDataLayout = armnn::DataLayout::NCHW;
285  }
286  else if (inputLayout == "NHWC")
287  {
288  inputTensorDataLayout = armnn::DataLayout::NHWC;
289  }
290  else
291  {
292  ARMNN_LOG(fatal) << "Invalid Data layout: " << inputLayout;
293  return EXIT_FAILURE;
294  }
295  const unsigned int inputTensorWidth =
296  inputTensorDataLayout == armnn::DataLayout::NCHW ? inputTensorShape[3] : inputTensorShape[2];
297  const unsigned int inputTensorHeight =
298  inputTensorDataLayout == armnn::DataLayout::NCHW ? inputTensorShape[2] : inputTensorShape[1];
299  // Get output tensor info
300  const unsigned int outputNumElements = model.GetOutputSize();
301  // Check output tensor shape is valid
302  if (modelOutputLabels.size() != outputNumElements)
303  {
304  ARMNN_LOG(fatal) << "Number of output elements: " << outputNumElements
305  << " , mismatches the number of output labels: " << modelOutputLabels.size();
306  return EXIT_FAILURE;
307  }
308 
309  const unsigned int batchSize = 1;
310  // Get normalisation parameters
311  SupportedFrontend modelFrontend;
312  if (modelFormat == "tflite")
313  {
314  modelFrontend = SupportedFrontend::TFLite;
315  }
316  else
317  {
318  ARMNN_LOG(fatal) << "Unsupported frontend: " << modelFormat;
319  return EXIT_FAILURE;
320  }
321  const NormalizationParameters& normParams = GetNormalizationParameters(modelFrontend, inputTensorDataType);
322  for (const auto& imageEntry : imageNameToLabel)
323  {
324  const std::string imageName = imageEntry.first;
325  std::cout << "Processing image: " << imageName << "\n";
326 
327  vector<TContainer> inputDataContainers;
328  vector<TContainer> outputDataContainers;
329 
330  auto imagePath = pathToDataDir / fs::path(imageName);
331  switch (inputTensorDataType)
332  {
334  inputDataContainers.push_back(
335  PrepareImageTensor<int>(imagePath.string(),
336  inputTensorWidth, inputTensorHeight,
337  normParams,
338  batchSize,
339  inputTensorDataLayout));
340  outputDataContainers = { vector<int>(outputNumElements) };
341  break;
343  inputDataContainers.push_back(
344  PrepareImageTensor<uint8_t>(imagePath.string(),
345  inputTensorWidth, inputTensorHeight,
346  normParams,
347  batchSize,
348  inputTensorDataLayout));
349  outputDataContainers = { vector<uint8_t>(outputNumElements) };
350  break;
352  default:
353  inputDataContainers.push_back(
354  PrepareImageTensor<float>(imagePath.string(),
355  inputTensorWidth, inputTensorHeight,
356  normParams,
357  batchSize,
358  inputTensorDataLayout));
359  outputDataContainers = { vector<float>(outputNumElements) };
360  break;
361  }
362 
363  status = runtime->EnqueueWorkload(networkId,
364  armnnUtils::MakeInputTensors(inputBindings, inputDataContainers),
365  armnnUtils::MakeOutputTensors(outputBindings, outputDataContainers));
366 
367  if (status == armnn::Status::Failure)
368  {
369  ARMNN_LOG(fatal) << "armnn::IRuntime: Failed to enqueue workload for image: " << imageName;
370  }
371 
372  checker.AddImageResult<TContainer>(imageName, outputDataContainers);
373  }
374  }
375  else
376  {
377  return EXIT_SUCCESS;
378  }
379 
380  for(unsigned int i = 1; i <= 5; ++i)
381  {
382  std::cout << "Top " << i << " Accuracy: " << checker.GetAccuracy(i) << "%" << "\n";
383  }
384 
385  ARMNN_LOG(info) << "Accuracy Tool ran successfully!";
386  return EXIT_SUCCESS;
387  }
388  catch (const armnn::Exception& e)
389  {
390  // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
391  // exception of type std::length_error.
392  // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
393  std::cerr << "Armnn Error: " << e.what() << std::endl;
394  return EXIT_FAILURE;
395  }
396  catch (const std::exception& e)
397  {
398  // Coverity fix: various boost exceptions can be thrown by methods called by this test.
399  std::cerr << "WARNING: ModelAccuracyTool-Armnn: An error has occurred when running the "
400  "Accuracy Tool: " << e.what() << std::endl;
401  return EXIT_FAILURE;
402  }
403 }
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:39
DataLayout
Definition: Types.hpp:53
const TensorShape & GetShape() const
Definition: Tensor.hpp:191
void ConfigureLogging(bool printToStandardOutput, bool printToDebugOutput, LogSeverity severity)
Configures the logging behaviour of the ARMNN library.
Definition: Utils.cpp:18
NormalizationParameters GetNormalizationParameters(const SupportedFrontend &modelFormat, const armnn::DataType &outputType)
Get normalization parameters.
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:30
virtual const char * what() const noexcept override
Definition: Exceptions.cpp:32
#define ARMNN_LOG(severity)
Definition: Logging.hpp:202
BackendRegistry & BackendRegistryInstance()
std::vector< uint8_t > PrepareImageTensor< uint8_t >(const std::string &imagePath, unsigned int newWidth, unsigned int newHeight, const NormalizationParameters &normParams, unsigned int batchSize, const armnn::DataLayout &outputLayout)
armnn::BindingPointInfo BindingPointInfo
std::string GetBackendIdsAsString() const
map< std::string, std::string > LoadValidationImageFilenamesAndLabels(const string &validationLabelPath, const string &imageDirectoryPath, size_t begIndex=0, size_t endIndex=0, const string &blacklistPath="")
Load image names and ground-truth labels from the image directory and the ground truth label file...
std::vector< std::string > m_InputBindings
DataType
Definition: Types.hpp:35
armnn::InputTensors MakeInputTensors(const std::vector< armnn::BindingPointInfo > &inputBindings, const std::vector< TContainer > &inputDataContainers)
std::vector< std::string > SplitBy(const std::string &originalString, const std::string &delimiter, bool includeEmptyToken)
Split a string into tokens by a delimiter.
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:1613
#define ARMNN_ASSERT_MSG(COND, MSG)
Definition: Assert.hpp:15
std::vector< std::string > m_OutputBindings
std::vector< armnn::BackendId > m_ComputeDevices
DataType GetDataType() const
Definition: Tensor.hpp:198
mapbox::util::variant< std::vector< float >, std::vector< int >, std::vector< unsigned char >, std::vector< int8_t > > TContainer
int NetworkId
Definition: IRuntime.hpp:24
Status
enumeration
Definition: Types.hpp:29
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:173
std::vector< int > PrepareImageTensor< int >(const std::string &imagePath, unsigned int newWidth, unsigned int newHeight, const NormalizationParameters &normParams, unsigned int batchSize, const armnn::DataLayout &outputLayout)
armnn::OutputTensors MakeOutputTensors(const std::vector< armnn::BindingPointInfo > &outputBindings, std::vector< TContainer > &outputDataContainers)
std::pair< armnn::LayerBindingId, armnn::TensorInfo > BindingPointInfo
Definition: Tensor.hpp:274
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46
bool ValidateDirectory(std::string &dir)
std::vector< armnnUtils::LabelCategoryNames > LoadModelOutputLabels(const std::string &modelOutputLabelsPath)
Load model output labels from file.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:172
LogSeverity
Definition: Utils.hpp:13
std::vector< float > PrepareImageTensor< float >(const std::string &imagePath, unsigned int newWidth, unsigned int newHeight, const NormalizationParameters &normParams, unsigned int batchSize, const armnn::DataLayout &outputLayout)