ArmNN
 21.11
ModelAccuracyTool-Armnn.cpp File Reference
#include "../ImageTensorGenerator/ImageTensorGenerator.hpp"
#include "../InferenceTest.hpp"
#include "ModelAccuracyChecker.hpp"
#include "armnnDeserializer/IDeserializer.hpp"
#include <armnnUtils/Filesystem.hpp>
#include <armnnUtils/TContainer.hpp>
#include <cxxopts/cxxopts.hpp>
#include <map>

Go to the source code of this file.

Functions

map< std::string, std::string > LoadValidationImageFilenamesAndLabels (const string &validationLabelPath, const string &imageDirectoryPath, size_t begIndex=0, size_t endIndex=0, const string &blacklistPath="")
 Load image names and ground-truth labels from the image directory and the ground truth label file. More...
 
std::vector< armnnUtils::LabelCategoryNamesLoadModelOutputLabels (const std::string &modelOutputLabelsPath)
 Load model output labels from file. More...
 
int main (int argc, char *argv[])
 

Function Documentation

◆ LoadModelOutputLabels()

std::vector< armnnUtils::LabelCategoryNames > LoadModelOutputLabels ( const std::string &  modelOutputLabelsPath)

Load model output labels from file.

Precondition
modelOutputLabelsPath exists and is a regular file
Parameters
[in]modelOutputLabelsPathpath to model output labels file
Returns
A vector of labels, which in turn is described by a list of category names

Definition at line 487 of file ModelAccuracyTool-Armnn.cpp.

References armnnUtils::SplitBy(), and armnnUtils::Strip().

Referenced by main().

488 {
489  std::vector<armnnUtils::LabelCategoryNames> modelOutputLabels;
490  ifstream modelOutputLablesFile(modelOutputLabelsPath);
491  std::string line;
492  while (std::getline(modelOutputLablesFile, line))
493  {
495  armnnUtils::LabelCategoryNames predictionCategoryNames = armnnUtils::SplitBy(tokens.back(), ",");
496  std::transform(predictionCategoryNames.begin(), predictionCategoryNames.end(), predictionCategoryNames.begin(),
497  [](const std::string& category) { return armnnUtils::Strip(category); });
498  modelOutputLabels.push_back(predictionCategoryNames);
499  }
500  return modelOutputLabels;
501 }
std::string Strip(const std::string &originalString, const std::string &characterSet)
Remove any preceding and trailing character specified in the characterSet.
std::vector< std::string > SplitBy(const std::string &originalString, const std::string &delimiter, bool includeEmptyToken)
Split a string into tokens by a delimiter.
std::vector< std::string > LabelCategoryNames

◆ LoadValidationImageFilenamesAndLabels()

map< std::string, std::string > LoadValidationImageFilenamesAndLabels ( const string &  validationLabelPath,
const string &  imageDirectoryPath,
size_t  begIndex = 0,
size_t  endIndex = 0,
const string &  blacklistPath = "" 
)

Load image names and ground-truth labels from the image directory and the ground truth label file.

Precondition
validationLabelPath exists and is valid regular file
imageDirectoryPath exists and is valid directory
labels in validation file correspond to images which are in lexicographical order with the image name
image index starts at 1
begIndex and endIndex are end-inclusive
Parameters
[in]validationLabelPathPath to validation label file
[in]imageDirectoryPathPath to directory containing validation images
[in]begIndexBegin index of images to be loaded. Inclusive
[in]endIndexEnd index of images to be loaded. Inclusive
[in]blacklistPathPath to blacklist file
Returns
A map mapping image file names to their corresponding ground-truth labels

Definition at line 406 of file ModelAccuracyTool-Armnn.cpp.

Referenced by main().

411 {
412  // Populate imageFilenames with names of all .JPEG, .PNG images
413  std::vector<std::string> imageFilenames;
414  for (const auto& imageEntry : fs::directory_iterator(fs::path(imageDirectoryPath)))
415  {
416  fs::path imagePath = imageEntry.path();
417 
418  // Get extension and convert to uppercase
419  std::string imageExtension = imagePath.extension().string();
420  std::transform(imageExtension.begin(), imageExtension.end(), imageExtension.begin(), ::toupper);
421 
422  if (fs::is_regular_file(imagePath) && (imageExtension == ".JPEG" || imageExtension == ".PNG"))
423  {
424  imageFilenames.push_back(imagePath.filename().string());
425  }
426  }
427  if (imageFilenames.empty())
428  {
429  throw armnn::Exception("No image file (JPEG, PNG) found at " + imageDirectoryPath);
430  }
431 
432  // Sort the image filenames lexicographically
433  std::sort(imageFilenames.begin(), imageFilenames.end());
434 
435  std::cout << imageFilenames.size() << " images found at " << imageDirectoryPath << std::endl;
436 
437  // Get default end index
438  if (begIndex < 1 || endIndex > imageFilenames.size())
439  {
440  throw armnn::Exception("Invalid image index range");
441  }
442  endIndex = endIndex == 0 ? imageFilenames.size() : endIndex;
443  if (begIndex > endIndex)
444  {
445  throw armnn::Exception("Invalid image index range");
446  }
447 
448  // Load blacklist if there is one
449  std::vector<unsigned int> blacklist;
450  if (!blacklistPath.empty())
451  {
452  std::ifstream blacklistFile(blacklistPath);
453  unsigned int index;
454  while (blacklistFile >> index)
455  {
456  blacklist.push_back(index);
457  }
458  }
459 
460  // Load ground truth labels and pair them with corresponding image names
461  std::string classification;
462  map<std::string, std::string> imageNameToLabel;
463  ifstream infile(validationLabelPath);
464  size_t imageIndex = begIndex;
465  size_t blacklistIndexCount = 0;
466  while (std::getline(infile, classification))
467  {
468  if (imageIndex > endIndex)
469  {
470  break;
471  }
472  // If current imageIndex is included in blacklist, skip the current image
473  if (blacklistIndexCount < blacklist.size() && imageIndex == blacklist[blacklistIndexCount])
474  {
475  ++imageIndex;
476  ++blacklistIndexCount;
477  continue;
478  }
479  imageNameToLabel.insert(std::pair<std::string, std::string>(imageFilenames[imageIndex - 1], classification));
480  ++imageIndex;
481  }
482  std::cout << blacklistIndexCount << " images blacklisted" << std::endl;
483  std::cout << imageIndex - begIndex - blacklistIndexCount << " images to be loaded" << std::endl;
484  return imageNameToLabel;
485 }
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46

◆ main()

int main ( int  argc,
char *  argv[] 
)

Definition at line 49 of file ModelAccuracyTool-Armnn.cpp.

References ARMNN_ASSERT_MSG, ARMNN_LOG, armnn::BackendRegistryInstance(), armnn::ConfigureLogging(), IRuntime::Create(), armnn::Debug, armnn::Failure, armnn::Float32, BackendRegistry::GetBackendIdsAsString(), TensorInfo::GetDataType(), InferenceModel< IParser, TDataType >::GetInputBindingInfo(), GetNormalizationParameters(), InferenceModel< IParser, TDataType >::GetOutputSize(), TensorInfo::GetShape(), LoadModelOutputLabels(), LoadValidationImageFilenamesAndLabels(), BindingPointInfo::m_BindingId, Params::m_ComputeDevices, Params::m_InputBindings, Params::m_IsModelBinary, Params::m_ModelPath, Params::m_OutputBindings, BindingPointInfo::m_TensorInfo, armnnUtils::MakeInputTensors(), armnnUtils::MakeOutputTensors(), armnn::NCHW, armnn::NHWC, armnn::Optimize(), PrepareImageTensor< float >(), PrepareImageTensor< int >(), PrepareImageTensor< uint8_t >(), armnn::QAsymmU8, armnn::Signed32, armnnUtils::SplitBy(), TFLite, armnn::test::ValidateDirectory(), and Exception::what().

50 {
51  try
52  {
54  armnn::ConfigureLogging(true, true, level);
55 
56  std::string modelPath;
57  std::string modelFormat;
58  std::vector<std::string> inputNames;
59  std::vector<std::string> outputNames;
60  std::string dataDir;
61  std::string modelOutputLabelsPath;
62  std::string validationLabelPath;
63  std::string inputLayout;
64  std::vector<armnn::BackendId> computeDevice;
65  std::string validationRange;
66  std::string blacklistPath;
67 
68  const std::string backendsMessage = "Which device to run layers on by default. Possible choices: "
70 
71  try
72  {
73  cxxopts::Options options("ModeAccuracyTool-Armnn","Options");
74 
75  options.add_options()
76  ("h,help", "Display help messages")
77  ("m,model-path",
78  "Path to armnn format model file",
79  cxxopts::value<std::string>(modelPath))
80  ("f,model-format",
81  "The model format. Supported values: tflite",
82  cxxopts::value<std::string>(modelFormat))
83  ("i,input-name",
84  "Identifier of the input tensors in the network separated by comma with no space.",
85  cxxopts::value<std::vector<std::string>>(inputNames))
86  ("o,output-name",
87  "Identifier of the output tensors in the network separated by comma with no space.",
88  cxxopts::value<std::vector<std::string>>(outputNames))
89  ("d,data-dir",
90  "Path to directory containing the ImageNet test data",
91  cxxopts::value<std::string>(dataDir))
92  ("p,model-output-labels",
93  "Path to model output labels file.",
94  cxxopts::value<std::string>(modelOutputLabelsPath))
95  ("v,validation-labels-path",
96  "Path to ImageNet Validation Label file",
97  cxxopts::value<std::string>(validationLabelPath))
98  ("l,data-layout",
99  "Data layout. Supported value: NHWC, NCHW. Default: NHWC",
100  cxxopts::value<std::string>(inputLayout)->default_value("NHWC"))
101  ("c,compute",
102  backendsMessage.c_str(),
103  cxxopts::value<std::vector<armnn::BackendId>>(computeDevice)->default_value("CpuAcc,CpuRef"))
104  ("r,validation-range",
105  "The range of the images to be evaluated. Specified in the form <begin index>:<end index>."
106  "The index starts at 1 and the range is inclusive."
107  "By default the evaluation will be performed on all images.",
108  cxxopts::value<std::string>(validationRange)->default_value("1:0"))
109  ("b,blacklist-path",
110  "Path to a blacklist file where each line denotes the index of an image to be "
111  "excluded from evaluation.",
112  cxxopts::value<std::string>(blacklistPath)->default_value(""));
113 
114  auto result = options.parse(argc, argv);
115 
116  if (result.count("help") > 0)
117  {
118  std::cout << options.help() << std::endl;
119  return EXIT_FAILURE;
120  }
121 
122  // Check for mandatory single options.
123  std::string mandatorySingleParameters[] = { "model-path", "model-format", "input-name", "output-name",
124  "data-dir", "model-output-labels", "validation-labels-path" };
125  for (auto param : mandatorySingleParameters)
126  {
127  if (result.count(param) != 1)
128  {
129  std::cerr << "Parameter \'--" << param << "\' is required but missing." << std::endl;
130  return EXIT_FAILURE;
131  }
132  }
133  }
134  catch (const cxxopts::OptionException& e)
135  {
136  std::cerr << e.what() << std::endl << std::endl;
137  return EXIT_FAILURE;
138  }
139  catch (const std::exception& e)
140  {
141  ARMNN_ASSERT_MSG(false, "Caught unexpected exception");
142  std::cerr << "Fatal internal error: " << e.what() << std::endl;
143  return EXIT_FAILURE;
144  }
145 
146  // Check if the requested backend are all valid
147  std::string invalidBackends;
148  if (!CheckRequestedBackendsAreValid(computeDevice, armnn::Optional<std::string&>(invalidBackends)))
149  {
150  ARMNN_LOG(fatal) << "The list of preferred devices contains invalid backend IDs: "
151  << invalidBackends;
152  return EXIT_FAILURE;
153  }
154  armnn::Status status;
155 
156  // Create runtime
159  std::ifstream file(modelPath);
160 
161  // Create Parser
162  using IParser = armnnDeserializer::IDeserializer;
163  auto armnnparser(IParser::Create());
164 
165  // Create a network
166  armnn::INetworkPtr network = armnnparser->CreateNetworkFromBinary(file);
167 
168  // Optimizes the network.
169  armnn::IOptimizedNetworkPtr optimizedNet(nullptr, nullptr);
170  try
171  {
172  optimizedNet = armnn::Optimize(*network, computeDevice, runtime->GetDeviceSpec());
173  }
174  catch (const armnn::Exception& e)
175  {
176  std::stringstream message;
177  message << "armnn::Exception (" << e.what() << ") caught from optimize.";
178  ARMNN_LOG(fatal) << message.str();
179  return EXIT_FAILURE;
180  }
181 
182  // Loads the network into the runtime.
183  armnn::NetworkId networkId;
184  status = runtime->LoadNetwork(networkId, std::move(optimizedNet));
185  if (status == armnn::Status::Failure)
186  {
187  ARMNN_LOG(fatal) << "armnn::IRuntime: Failed to load network";
188  return EXIT_FAILURE;
189  }
190 
191  // Set up Network
193 
194  // Handle inputNames and outputNames, there can be multiple.
195  std::vector<BindingPointInfo> inputBindings;
196  for(auto& input: inputNames)
197  {
199  inputBindingInfo = armnnparser->GetNetworkInputBindingInfo(0, input);
200 
201  std::pair<armnn::LayerBindingId, armnn::TensorInfo>
202  m_InputBindingInfo(inputBindingInfo.m_BindingId, inputBindingInfo.m_TensorInfo);
203  inputBindings.push_back(m_InputBindingInfo);
204  }
205 
206  std::vector<BindingPointInfo> outputBindings;
207  for(auto& output: outputNames)
208  {
210  outputBindingInfo = armnnparser->GetNetworkOutputBindingInfo(0, output);
211 
212  std::pair<armnn::LayerBindingId, armnn::TensorInfo>
213  m_OutputBindingInfo(outputBindingInfo.m_BindingId, outputBindingInfo.m_TensorInfo);
214  outputBindings.push_back(m_OutputBindingInfo);
215  }
216 
217  // Load model output labels
218  if (modelOutputLabelsPath.empty() || !fs::exists(modelOutputLabelsPath) ||
219  !fs::is_regular_file(modelOutputLabelsPath))
220  {
221  ARMNN_LOG(fatal) << "Invalid model output labels path at " << modelOutputLabelsPath;
222  }
223  const std::vector<armnnUtils::LabelCategoryNames> modelOutputLabels =
224  LoadModelOutputLabels(modelOutputLabelsPath);
225 
226  // Parse begin and end image indices
227  std::vector<std::string> imageIndexStrs = armnnUtils::SplitBy(validationRange, ":");
228  size_t imageBegIndex;
229  size_t imageEndIndex;
230  if (imageIndexStrs.size() != 2)
231  {
232  ARMNN_LOG(fatal) << "Invalid validation range specification: Invalid format " << validationRange;
233  return EXIT_FAILURE;
234  }
235  try
236  {
237  imageBegIndex = std::stoul(imageIndexStrs[0]);
238  imageEndIndex = std::stoul(imageIndexStrs[1]);
239  }
240  catch (const std::exception& e)
241  {
242  ARMNN_LOG(fatal) << "Invalid validation range specification: " << validationRange;
243  return EXIT_FAILURE;
244  }
245 
246  // Validate blacklist file if it's specified
247  if (!blacklistPath.empty() &&
248  !(fs::exists(blacklistPath) && fs::is_regular_file(blacklistPath)))
249  {
250  ARMNN_LOG(fatal) << "Invalid path to blacklist file at " << blacklistPath;
251  return EXIT_FAILURE;
252  }
253 
254  fs::path pathToDataDir(dataDir);
255  const map<std::string, std::string> imageNameToLabel = LoadValidationImageFilenamesAndLabels(
256  validationLabelPath, pathToDataDir.string(), imageBegIndex, imageEndIndex, blacklistPath);
257  armnnUtils::ModelAccuracyChecker checker(imageNameToLabel, modelOutputLabels);
258 
259  if (ValidateDirectory(dataDir))
260  {
262 
263  params.m_ModelPath = modelPath;
264  params.m_IsModelBinary = true;
265  params.m_ComputeDevices = computeDevice;
266  // Insert inputNames and outputNames into params vector
267  params.m_InputBindings.insert(std::end(params.m_InputBindings),
268  std::begin(inputNames),
269  std::end(inputNames));
270  params.m_OutputBindings.insert(std::end(params.m_OutputBindings),
271  std::begin(outputNames),
272  std::end(outputNames));
273 
274  using TParser = armnnDeserializer::IDeserializer;
275  // If dynamicBackends is empty it will be disabled by default.
276  InferenceModel<TParser, float> model(params, false, "");
277 
278  // Get input tensor information
279  const armnn::TensorInfo& inputTensorInfo = model.GetInputBindingInfo().second;
280  const armnn::TensorShape& inputTensorShape = inputTensorInfo.GetShape();
281  const armnn::DataType& inputTensorDataType = inputTensorInfo.GetDataType();
282  armnn::DataLayout inputTensorDataLayout;
283  if (inputLayout == "NCHW")
284  {
285  inputTensorDataLayout = armnn::DataLayout::NCHW;
286  }
287  else if (inputLayout == "NHWC")
288  {
289  inputTensorDataLayout = armnn::DataLayout::NHWC;
290  }
291  else
292  {
293  ARMNN_LOG(fatal) << "Invalid Data layout: " << inputLayout;
294  return EXIT_FAILURE;
295  }
296  const unsigned int inputTensorWidth =
297  inputTensorDataLayout == armnn::DataLayout::NCHW ? inputTensorShape[3] : inputTensorShape[2];
298  const unsigned int inputTensorHeight =
299  inputTensorDataLayout == armnn::DataLayout::NCHW ? inputTensorShape[2] : inputTensorShape[1];
300  // Get output tensor info
301  const unsigned int outputNumElements = model.GetOutputSize();
302  // Check output tensor shape is valid
303  if (modelOutputLabels.size() != outputNumElements)
304  {
305  ARMNN_LOG(fatal) << "Number of output elements: " << outputNumElements
306  << " , mismatches the number of output labels: " << modelOutputLabels.size();
307  return EXIT_FAILURE;
308  }
309 
310  const unsigned int batchSize = 1;
311  // Get normalisation parameters
312  SupportedFrontend modelFrontend;
313  if (modelFormat == "tflite")
314  {
315  modelFrontend = SupportedFrontend::TFLite;
316  }
317  else
318  {
319  ARMNN_LOG(fatal) << "Unsupported frontend: " << modelFormat;
320  return EXIT_FAILURE;
321  }
322  const NormalizationParameters& normParams = GetNormalizationParameters(modelFrontend, inputTensorDataType);
323  for (const auto& imageEntry : imageNameToLabel)
324  {
325  const std::string imageName = imageEntry.first;
326  std::cout << "Processing image: " << imageName << "\n";
327 
328  vector<armnnUtils::TContainer> inputDataContainers;
329  vector<armnnUtils::TContainer> outputDataContainers;
330 
331  auto imagePath = pathToDataDir / fs::path(imageName);
332  switch (inputTensorDataType)
333  {
335  inputDataContainers.push_back(
336  PrepareImageTensor<int>(imagePath.string(),
337  inputTensorWidth, inputTensorHeight,
338  normParams,
339  batchSize,
340  inputTensorDataLayout));
341  outputDataContainers = { vector<int>(outputNumElements) };
342  break;
344  inputDataContainers.push_back(
345  PrepareImageTensor<uint8_t>(imagePath.string(),
346  inputTensorWidth, inputTensorHeight,
347  normParams,
348  batchSize,
349  inputTensorDataLayout));
350  outputDataContainers = { vector<uint8_t>(outputNumElements) };
351  break;
353  default:
354  inputDataContainers.push_back(
355  PrepareImageTensor<float>(imagePath.string(),
356  inputTensorWidth, inputTensorHeight,
357  normParams,
358  batchSize,
359  inputTensorDataLayout));
360  outputDataContainers = { vector<float>(outputNumElements) };
361  break;
362  }
363 
364  status = runtime->EnqueueWorkload(networkId,
365  armnnUtils::MakeInputTensors(inputBindings, inputDataContainers),
366  armnnUtils::MakeOutputTensors(outputBindings, outputDataContainers));
367 
368  if (status == armnn::Status::Failure)
369  {
370  ARMNN_LOG(fatal) << "armnn::IRuntime: Failed to enqueue workload for image: " << imageName;
371  }
372 
373  checker.AddImageResult<armnnUtils::TContainer>(imageName, outputDataContainers);
374  }
375  }
376  else
377  {
378  return EXIT_SUCCESS;
379  }
380 
381  for(unsigned int i = 1; i <= 5; ++i)
382  {
383  std::cout << "Top " << i << " Accuracy: " << checker.GetAccuracy(i) << "%" << "\n";
384  }
385 
386  ARMNN_LOG(info) << "Accuracy Tool ran successfully!";
387  return EXIT_SUCCESS;
388  }
389  catch (const armnn::Exception& e)
390  {
391  // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
392  // exception of type std::length_error.
393  // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
394  std::cerr << "Armnn Error: " << e.what() << std::endl;
395  return EXIT_FAILURE;
396  }
397  catch (const std::exception& e)
398  {
399  // Coverity fix: various boost exceptions can be thrown by methods called by this test.
400  std::cerr << "WARNING: ModelAccuracyTool-Armnn: An error has occurred when running the "
401  "Accuracy Tool: " << e.what() << std::endl;
402  return EXIT_FAILURE;
403  }
404 }
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:40
DataLayout
Definition: Types.hpp:49
const TensorShape & GetShape() const
Definition: Tensor.hpp:191
void ConfigureLogging(bool printToStandardOutput, bool printToDebugOutput, LogSeverity severity)
Configures the logging behaviour of the ARMNN library.
Definition: Utils.cpp:18
NormalizationParameters GetNormalizationParameters(const SupportedFrontend &modelFormat, const armnn::DataType &outputType)
Get normalization parameters.
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:31
virtual const char * what() const noexcept override
Definition: Exceptions.cpp:32
#define ARMNN_LOG(severity)
Definition: Logging.hpp:202
BackendRegistry & BackendRegistryInstance()
std::vector< uint8_t > PrepareImageTensor< uint8_t >(const std::string &imagePath, unsigned int newWidth, unsigned int newHeight, const NormalizationParameters &normParams, unsigned int batchSize, const armnn::DataLayout &outputLayout)
armnn::BindingPointInfo BindingPointInfo
std::string GetBackendIdsAsString() const
map< std::string, std::string > LoadValidationImageFilenamesAndLabels(const string &validationLabelPath, const string &imageDirectoryPath, size_t begIndex=0, size_t endIndex=0, const string &blacklistPath="")
Load image names and ground-truth labels from the image directory and the ground truth label file...
std::vector< std::string > m_InputBindings
DataType
Definition: Types.hpp:35
armnn::InputTensors MakeInputTensors(const std::vector< armnn::BindingPointInfo > &inputBindings, const std::vector< TContainer > &inputDataContainers)
std::vector< std::string > SplitBy(const std::string &originalString, const std::string &delimiter, bool includeEmptyToken)
Split a string into tokens by a delimiter.
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:1605
#define ARMNN_ASSERT_MSG(COND, MSG)
Definition: Assert.hpp:15
std::vector< std::string > m_OutputBindings
std::vector< armnn::BackendId > m_ComputeDevices
DataType GetDataType() const
Definition: Tensor.hpp:198
int NetworkId
Definition: IRuntime.hpp:25
Status
enumeration
Definition: Types.hpp:29
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:198
std::vector< int > PrepareImageTensor< int >(const std::string &imagePath, unsigned int newWidth, unsigned int newHeight, const NormalizationParameters &normParams, unsigned int batchSize, const armnn::DataLayout &outputLayout)
armnn::OutputTensors MakeOutputTensors(const std::vector< armnn::BindingPointInfo > &outputBindings, std::vector< TContainer > &outputDataContainers)
std::pair< armnn::LayerBindingId, armnn::TensorInfo > BindingPointInfo
Definition: Tensor.hpp:274
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46
mapbox::util::variant< std::vector< float >, std::vector< int >, std::vector< unsigned char >, std::vector< int8_t > > TContainer
Definition: TContainer.hpp:18
bool ValidateDirectory(std::string &dir)
std::vector< armnnUtils::LabelCategoryNames > LoadModelOutputLabels(const std::string &modelOutputLabelsPath)
Load model output labels from file.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:197
LogSeverity
Definition: Utils.hpp:14
std::vector< float > PrepareImageTensor< float >(const std::string &imagePath, unsigned int newWidth, unsigned int newHeight, const NormalizationParameters &normParams, unsigned int batchSize, const armnn::DataLayout &outputLayout)