ArmNN
 20.08
TfLiteYoloV3Big-Armnn.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2020 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
7 
8 #include "NMS.hpp"
9 
10 #include <stb/stb_image.h>
11 
12 #include <armnn/INetwork.hpp>
13 #include <armnn/IRuntime.hpp>
14 #include <armnn/Logging.hpp>
16 
17 #include <cxxopts/cxxopts.hpp>
18 #include <ghc/filesystem.hpp>
19 
20 #include <chrono>
21 #include <fstream>
22 #include <iostream>
23 #include <stdlib.h>
24 
25 using namespace armnnTfLiteParser;
26 using namespace armnn;
27 
28 static const int OPEN_FILE_ERROR = -2;
29 static const int OPTIMIZE_NETWORK_ERROR = -3;
30 static const int LOAD_NETWORK_ERROR = -4;
31 static const int LOAD_IMAGE_ERROR = -5;
32 static const int GENERAL_ERROR = -100;
33 
34 #define CHECK_OK(v) \
35  do { \
36  try { \
37  auto r_local = v; \
38  if (r_local != 0) { return r_local;} \
39  } \
40  catch (const armnn::Exception& e) \
41  { \
42  ARMNN_LOG(error) << "Oops: " << e.what(); \
43  return GENERAL_ERROR; \
44  } \
45  } while(0)
46 
47 
48 
49 template<typename TContainer>
50 inline armnn::InputTensors MakeInputTensors(const std::vector<armnn::BindingPointInfo>& inputBindings,
51  const std::vector<std::reference_wrapper<TContainer>>& inputDataContainers)
52 {
53  armnn::InputTensors inputTensors;
54 
55  const size_t numInputs = inputBindings.size();
56  if (numInputs != inputDataContainers.size())
57  {
58  throw armnn::Exception("Mismatching vectors");
59  }
60 
61  for (size_t i = 0; i < numInputs; i++)
62  {
63  const armnn::BindingPointInfo& inputBinding = inputBindings[i];
64  const TContainer& inputData = inputDataContainers[i].get();
65 
66  armnn::ConstTensor inputTensor(inputBinding.second, inputData.data());
67  inputTensors.push_back(std::make_pair(inputBinding.first, inputTensor));
68  }
69 
70  return inputTensors;
71 }
72 
73 template<typename TContainer>
75  const std::vector<armnn::BindingPointInfo>& outputBindings,
76  const std::vector<std::reference_wrapper<TContainer>>& outputDataContainers)
77 {
78  armnn::OutputTensors outputTensors;
79 
80  const size_t numOutputs = outputBindings.size();
81  if (numOutputs != outputDataContainers.size())
82  {
83  throw armnn::Exception("Mismatching vectors");
84  }
85 
86  outputTensors.reserve(numOutputs);
87 
88  for (size_t i = 0; i < numOutputs; i++)
89  {
90  const armnn::BindingPointInfo& outputBinding = outputBindings[i];
91  const TContainer& outputData = outputDataContainers[i].get();
92 
93  armnn::Tensor outputTensor(outputBinding.second, const_cast<float*>(outputData.data()));
94  outputTensors.push_back(std::make_pair(outputBinding.first, outputTensor));
95  }
96 
97  return outputTensors;
98 }
99 
100 int LoadModel(const char* filename,
101  ITfLiteParser& parser,
102  IRuntime& runtime,
103  NetworkId& networkId,
104  const std::vector<BackendId>& backendPreferences,
105  bool enableImport = false)
106 {
107  std::ifstream stream(filename, std::ios::in | std::ios::binary);
108  if (!stream.is_open())
109  {
110  ARMNN_LOG(error) << "Could not open model: " << filename;
111  return OPEN_FILE_ERROR;
112  }
113 
114  std::vector<uint8_t> contents((std::istreambuf_iterator<char>(stream)), std::istreambuf_iterator<char>());
115  stream.close();
116 
117  auto model = parser.CreateNetworkFromBinary(contents);
118  contents.clear();
119  ARMNN_LOG(debug) << "Model loaded ok: " << filename;
120 
121  // Optimize backbone model
122  OptimizerOptions options;
123  options.m_ImportEnabled = enableImport;
124  auto optimizedModel = Optimize(*model, backendPreferences, runtime.GetDeviceSpec(), options);
125  if (!optimizedModel)
126  {
127  ARMNN_LOG(fatal) << "Could not optimize the model:" << filename;
128  return OPTIMIZE_NETWORK_ERROR;
129  }
130 
131  // Load model into runtime
132  {
133  std::string errorMessage;
134  INetworkProperties modelProps(enableImport, enableImport);
135  Status status = runtime.LoadNetwork(networkId, std::move(optimizedModel), errorMessage, modelProps);
136  if (status != Status::Success)
137  {
138  ARMNN_LOG(fatal) << "Could not load " << filename << " model into runtime: " << errorMessage;
139  return LOAD_NETWORK_ERROR;
140  }
141  }
142 
143  return 0;
144 }
145 
146 std::vector<float> LoadImage(const char* filename)
147 {
148  struct Memory
149  {
150  ~Memory() {stbi_image_free(m_Data);}
151  bool IsLoaded() const { return m_Data != nullptr;}
152 
153  unsigned char* m_Data;
154  };
155 
156  std::vector<float> image;
157 
158  int width;
159  int height;
160  int channels;
161 
162  Memory mem = {stbi_load(filename, &width, &height, &channels, 3)};
163  if (!mem.IsLoaded())
164  {
165  ARMNN_LOG(error) << "Could not load input image file: " << filename;
166  return image;
167  }
168 
169  if (width != 1920 || height != 1080 || channels != 3)
170  {
171  ARMNN_LOG(error) << "Input image has wong dimension: " << width << "x" << height << "x" << channels << ". "
172  " Expected 1920x1080x3.";
173  return image;
174  }
175 
176  image.resize(1920*1080*3);
177 
178  // Expand to float. Does this need de-gamma?
179  for (unsigned int idx=0; idx <= 1920*1080*3; idx++)
180  {
181  image[idx] = static_cast<float>(mem.m_Data[idx]) /255.0f;
182  }
183 
184  return image;
185 }
186 
187 
188 bool ValidateFilePath(std::string& file)
189 {
190  if (!ghc::filesystem::exists(file))
191  {
192  std::cerr << "Given file path " << file << " does not exist" << std::endl;
193  return false;
194  }
195  if (!ghc::filesystem::is_regular_file(file))
196  {
197  std::cerr << "Given file path " << file << " is not a regular file" << std::endl;
198  return false;
199  }
200  return true;
201 }
202 
203 void CheckAccuracy(std::vector<float>* toDetector0, std::vector<float>* toDetector1,
204  std::vector<float>* toDetector2, std::vector<float>* detectorOutput,
205  const std::vector<yolov3::Detection>& nmsOut, const std::vector<std::string>& filePaths)
206 {
207  std::ifstream pathStream;
208  std::vector<float> expected;
209  std::vector<std::vector<float>*> outputs;
210  float compare = 0;
211  unsigned int count = 0;
212 
213  //Push back output vectors from inference for use in loop
214  outputs.push_back(toDetector0);
215  outputs.push_back(toDetector1);
216  outputs.push_back(toDetector2);
217  outputs.push_back(detectorOutput);
218 
219  for (unsigned int i = 0; i < outputs.size(); ++i)
220  {
221  // Reading expected output files and assigning them to @expected. Close and Clear to reuse stream and clean RAM
222  pathStream.open(filePaths[i]);
223  if (!pathStream.is_open())
224  {
225  ARMNN_LOG(error) << "Expected output file can not be opened: " << filePaths[i];
226  continue;
227  }
228 
229  expected.assign(std::istream_iterator<float>(pathStream), {});
230  pathStream.close();
231  pathStream.clear();
232 
233  // Ensure each vector is the same length
234  if (expected.size() != outputs[i]->size())
235  {
236  ARMNN_LOG(error) << "Expected output size does not match actual output size: " << filePaths[i];
237  }
238  else
239  {
240  count = 0;
241 
242  // Compare abs(difference) with tolerance to check for value by value equality
243  for (unsigned int j = 0; j < outputs[i]->size(); ++j)
244  {
245  compare = abs(expected[j] - outputs[i]->at(j));
246  if (compare > 0.001f)
247  {
248  count++;
249  }
250  }
251  if (count > 0)
252  {
253  ARMNN_LOG(error) << count << " output(s) do not match expected values in: " << filePaths[i];
254  }
255  }
256  }
257 
258  pathStream.open(filePaths[4]);
259  if (!pathStream.is_open())
260  {
261  ARMNN_LOG(error) << "Expected output file can not be opened: " << filePaths[4];
262  }
263  else
264  {
265  expected.assign(std::istream_iterator<float>(pathStream), {});
266  pathStream.close();
267  pathStream.clear();
268  unsigned int y = 0;
269  unsigned int numOfMember = 6;
270  std::vector<float> intermediate;
271 
272  for (auto& detection: nmsOut)
273  {
274  for (unsigned int x = y * numOfMember; x < ((y * numOfMember) + numOfMember); ++x)
275  {
276  intermediate.push_back(expected[x]);
277  }
278  if (!yolov3::compare_detection(detection, intermediate))
279  {
280  ARMNN_LOG(error) << "Expected NMS output does not match: Detection " << y + 1;
281  }
282  intermediate.clear();
283  y++;
284  }
285  }
286 }
287 
288 struct ParseArgs
289 {
290  ParseArgs(int ac, char *av[]) : options{"TfLiteYoloV3Big-Armnn",
291  "Executes YoloV3Big using ArmNN. YoloV3Big consists "
292  "of 3 parts: A backbone TfLite model, a detector TfLite "
293  "model, and None Maximum Suppression. All parts are "
294  "executed successively."}
295  {
296  options.add_options()
297  ("b,backbone-path",
298  "File path where the TfLite model for the yoloV3big backbone "
299  "can be found e.g. mydir/yoloV3big_backbone.tflite",
300  cxxopts::value<std::string>())
301 
302  ("c,comparison-files",
303  "Defines the expected outputs for the model "
304  "of yoloV3big e.g. 'mydir/file1.txt,mydir/file2.txt,mydir/file3.txt,mydir/file4.txt'->InputToDetector1"
305  " will be tried first then InputToDetector2 then InputToDetector3 then the Detector Output and finally"
306  " the NMS output. NOTE: Files are passed as comma separated list without whitespaces.",
307  cxxopts::value<std::vector<std::string>>())
308 
309  ("d,detector-path",
310  "File path where the TfLite model for the yoloV3big "
311  "detector can be found e.g.'mydir/yoloV3big_detector.tflite'",
312  cxxopts::value<std::string>())
313 
314  ("h,help", "Produce help message")
315 
316  ("i,image-path",
317  "File path to a 1080x1920 jpg image that should be "
318  "processed e.g. 'mydir/example_img_180_1920.jpg'",
319  cxxopts::value<std::string>())
320 
321  ("B,preferred-backends-backbone",
322  "Defines the preferred backends to run the backbone model "
323  "of yoloV3big e.g. 'GpuAcc,CpuRef' -> GpuAcc will be tried "
324  "first before falling back to CpuRef. NOTE: Backends are passed "
325  "as comma separated list without whitespaces.",
326  cxxopts::value<std::vector<std::string>>()->default_value("GpuAcc,CpuRef"))
327 
328  ("D,preferred-backends-detector",
329  "Defines the preferred backends to run the detector model "
330  "of yoloV3big e.g. 'CpuAcc,CpuRef' -> CpuAcc will be tried "
331  "first before falling back to CpuRef. NOTE: Backends are passed "
332  "as comma separated list without whitespaces.",
333  cxxopts::value<std::vector<std::string>>()->default_value("CpuAcc,CpuRef"));
334 
335  auto result = options.parse(ac, av);
336 
337  if (result.count("help"))
338  {
339  std::cout << options.help() << "\n";
340  exit(EXIT_SUCCESS);
341  }
342 
343  backboneDir = GetPathArgument(result, "backbone-path");
344  comparisonFiles = GetPathArgument(result["comparison-files"].as<std::vector<std::string>>());
345  detectorDir = GetPathArgument(result, "detector-path");
346  imageDir = GetPathArgument(result, "image-path");
347 
348 
349 
350  prefBackendsBackbone = GetBackendIDs(result["preferred-backends-backbone"].as<std::vector<std::string>>());
351  LogBackendsInfo(prefBackendsBackbone, "Backbone");
352  prefBackendsDetector = GetBackendIDs(result["preferred-backends-detector"].as<std::vector<std::string>>());
353  LogBackendsInfo(prefBackendsDetector, "detector");
354  }
355 
356  /// Takes a vector of backend strings and returns a vector of backendIDs
357  std::vector<BackendId> GetBackendIDs(const std::vector<std::string>& backendStrings)
358  {
359  std::vector<BackendId> backendIDs;
360  for (const auto& b : backendStrings)
361  {
362  backendIDs.push_back(BackendId(b));
363  }
364  return backendIDs;
365  }
366 
367  /// Verifies if the program argument with the name argName contains a valid file path.
368  /// Returns the valid file path string if given argument is associated a valid file path.
369  /// Otherwise throws an exception.
370  std::string GetPathArgument(cxxopts::ParseResult& result, std::string&& argName)
371  {
372  if (result.count(argName))
373  {
374  std::string fileDir = result[argName].as<std::string>();
375  if (!ValidateFilePath(fileDir))
376  {
377  throw cxxopts::option_syntax_exception("Argument given to backbone-path is not a valid file path");
378  }
379  return fileDir;
380  }
381  else
382  {
383  throw cxxopts::missing_argument_exception(argName);
384  }
385  }
386 
387  /// Assigns vector of strings to struct member variable
388  std::vector<std::string> GetPathArgument(const std::vector<std::string>& pathStrings)
389  {
390  if (pathStrings.size() < 5){
391  throw cxxopts::option_syntax_exception("Comparison files requires 5 file paths.");
392  }
393 
394  std::vector<std::string> filePaths;
395  for (auto& path : pathStrings)
396  {
397  filePaths.push_back(path);
398  if (!ValidateFilePath(filePaths.back()))
399  {
400  throw cxxopts::option_syntax_exception("Argument given to Comparison Files is not a valid file path");
401  }
402  }
403  return filePaths;
404  }
405 
406  /// Log info about assigned backends
407  void LogBackendsInfo(std::vector<BackendId>& backends, std::string&& modelName)
408  {
409  std::string info;
410  info = "Preferred backends for " + modelName + " set to [ ";
411  for (auto const &backend : backends)
412  {
413  info = info + std::string(backend) + " ";
414  }
415  ARMNN_LOG(info) << info << "]";
416  }
417 
418  // Member variables
419  std::string backboneDir;
420  std::vector<std::string> comparisonFiles;
421  std::string detectorDir;
422  std::string imageDir;
423 
424  std::vector<BackendId> prefBackendsBackbone;
425  std::vector<BackendId> prefBackendsDetector;
426 
427  cxxopts::Options options;
428 };
429 
430 int main(int argc, char* argv[])
431 {
432  // Configure logging
433  SetAllLoggingSinks(true, true, true);
434  SetLogFilter(LogSeverity::Trace);
435 
436  // Check and get given program arguments
437  ParseArgs progArgs = ParseArgs(argc, argv);
438 
439  // Create runtime
440  IRuntime::CreationOptions runtimeOptions; // default
441  auto runtime = IRuntime::Create(runtimeOptions);
442  if (!runtime)
443  {
444  ARMNN_LOG(fatal) << "Could not create runtime.";
445  return -1;
446  }
447 
448  // Create TfLite Parsers
450  auto parser = ITfLiteParser::Create(parserOptions);
451 
452  // Load backbone model
453  ARMNN_LOG(info) << "Loading backbone...";
454  NetworkId backboneId;
455  CHECK_OK(LoadModel(progArgs.backboneDir.c_str(), *parser, *runtime, backboneId, progArgs.prefBackendsBackbone));
456  auto inputId = parser->GetNetworkInputBindingInfo(0, "inputs");
457  auto bbOut0Id = parser->GetNetworkOutputBindingInfo(0, "input_to_detector_1");
458  auto bbOut1Id = parser->GetNetworkOutputBindingInfo(0, "input_to_detector_2");
459  auto bbOut2Id = parser->GetNetworkOutputBindingInfo(0, "input_to_detector_3");
460  auto backboneProfile = runtime->GetProfiler(backboneId);
461  backboneProfile->EnableProfiling(true);
462 
463  // Load detector model
464  ARMNN_LOG(info) << "Loading detector...";
465  NetworkId detectorId;
467  progArgs.detectorDir.c_str(), *parser, *runtime, detectorId, progArgs.prefBackendsDetector, true));
468  auto detectIn0Id = parser->GetNetworkInputBindingInfo(0, "input_to_detector_1");
469  auto detectIn1Id = parser->GetNetworkInputBindingInfo(0, "input_to_detector_2");
470  auto detectIn2Id = parser->GetNetworkInputBindingInfo(0, "input_to_detector_3");
471  auto outputBoxesId = parser->GetNetworkOutputBindingInfo(0, "output_boxes");
472  auto detectorProfile = runtime->GetProfiler(detectorId);
473 
474  // Load input from file
475  ARMNN_LOG(info) << "Loading test image...";
476  auto image = LoadImage(progArgs.imageDir.c_str());
477  if (image.empty())
478  {
479  return LOAD_IMAGE_ERROR;
480  }
481 
482  // Allocate the intermediate tensors
483  std::vector<float> intermediateMem0(bbOut0Id.second.GetNumElements());
484  std::vector<float> intermediateMem1(bbOut1Id.second.GetNumElements());
485  std::vector<float> intermediateMem2(bbOut2Id.second.GetNumElements());
486  std::vector<float> intermediateMem3(outputBoxesId.second.GetNumElements());
487 
488  // Setup inputs and outputs
489  using BindingInfos = std::vector<armnn::BindingPointInfo>;
490  using FloatTensors = std::vector<std::reference_wrapper<std::vector<float>>>;
491 
492  InputTensors bbInputTensors = MakeInputTensors(BindingInfos{ inputId },
493  FloatTensors{ image });
494  OutputTensors bbOutputTensors = MakeOutputTensors(BindingInfos{ bbOut0Id, bbOut1Id, bbOut2Id },
495  FloatTensors{ intermediateMem0,
496  intermediateMem1,
497  intermediateMem2 });
498  InputTensors detectInputTensors = MakeInputTensors(BindingInfos{ detectIn0Id,
499  detectIn1Id,
500  detectIn2Id } ,
501  FloatTensors{ intermediateMem0,
502  intermediateMem1,
503  intermediateMem2 });
504  OutputTensors detectOutputTensors = MakeOutputTensors(BindingInfos{ outputBoxesId },
505  FloatTensors{ intermediateMem3 });
506 
507  static const int numIterations=2;
508  using DurationUS = std::chrono::duration<double, std::micro>;
509  std::vector<DurationUS> nmsDurations(0);
510  std::vector<yolov3::Detection> filtered_boxes;
511  nmsDurations.reserve(numIterations);
512  for (int i=0; i < numIterations; i++)
513  {
514  // Execute backbone
515  ARMNN_LOG(info) << "Running backbone...";
516  runtime->EnqueueWorkload(backboneId, bbInputTensors, bbOutputTensors);
517 
518  // Execute detector
519  ARMNN_LOG(info) << "Running detector...";
520  runtime->EnqueueWorkload(detectorId, detectInputTensors, detectOutputTensors);
521 
522  // Execute NMS
523  ARMNN_LOG(info) << "Running nms...";
524  using clock = std::chrono::steady_clock;
525  auto nmsStartTime = clock::now();
526  yolov3::NMSConfig config;
527  config.num_boxes = 127800;
528  config.num_classes = 80;
529  config.confidence_threshold = 0.9f;
530  config.iou_threshold = 0.5f;
531  filtered_boxes = yolov3::nms(config, intermediateMem3);
532  auto nmsEndTime = clock::now();
533 
534  // Enable the profiling after the warm-up run
535  if (i>0)
536  {
537  print_detection(std::cout, filtered_boxes);
538 
539  const auto nmsDuration = DurationUS(nmsStartTime - nmsEndTime);
540  nmsDurations.push_back(nmsDuration);
541  }
542  backboneProfile->EnableProfiling(true);
543  detectorProfile->EnableProfiling(true);
544  }
545  // Log timings to file
546  std::ofstream backboneProfileStream("backbone.json");
547  backboneProfile->Print(backboneProfileStream);
548  backboneProfileStream.close();
549 
550  std::ofstream detectorProfileStream("detector.json");
551  detectorProfile->Print(detectorProfileStream);
552  detectorProfileStream.close();
553 
554  // Manually construct the json output
555  std::ofstream nmsProfileStream("nms.json");
556  nmsProfileStream << "{" << "\n";
557  nmsProfileStream << R"( "NmsTimings": {)" << "\n";
558  nmsProfileStream << R"( "raw": [)" << "\n";
559  bool isFirst = true;
560  for (auto duration : nmsDurations)
561  {
562  if (!isFirst)
563  {
564  nmsProfileStream << ",\n";
565  }
566 
567  nmsProfileStream << " " << duration.count();
568  isFirst = false;
569  }
570  nmsProfileStream << "\n";
571  nmsProfileStream << R"( "units": "us")" << "\n";
572  nmsProfileStream << " ]" << "\n";
573  nmsProfileStream << " }" << "\n";
574  nmsProfileStream << "}" << "\n";
575  nmsProfileStream.close();
576 
577  CheckAccuracy(&intermediateMem0, &intermediateMem1,
578  &intermediateMem2, &intermediateMem3,
579  filtered_boxes, progArgs.comparisonFiles);
580 
581  ARMNN_LOG(info) << "Run completed";
582  return 0;
583 }
void CheckAccuracy(std::vector< float > *toDetector0, std::vector< float > *toDetector1, std::vector< float > *toDetector2, std::vector< float > *detectorOutput, const std::vector< yolov3::Detection > &nmsOut, const std::vector< std::string > &filePaths)
void SetAllLoggingSinks(bool standardOut, bool debugOut, bool coloured)
Definition: Logging.cpp:142
int LoadModel(const char *filename, ITfLiteParser &parser, IRuntime &runtime, NetworkId &networkId, const std::vector< BackendId > &backendPreferences, bool enableImport=false)
armnn::InputTensors MakeInputTensors(const std::vector< armnn::BindingPointInfo > &inputBindings, const std::vector< std::reference_wrapper< TContainer >> &inputDataContainers)
int main(int argc, char *argv[])
#define ARMNN_LOG(severity)
Definition: Logging.hpp:163
std::vector< std::pair< LayerBindingId, class ConstTensor > > InputTensors
Definition: Tensor.hpp:324
int NetworkId
Definition: IRuntime.hpp:20
Copyright (c) 2020 ARM Limited.
unsigned int num_boxes
Number of detected boxes.
Definition: NMS.hpp:15
static ITfLiteParserPtr Create(const armnn::Optional< TfLiteParserOptions > &options=armnn::EmptyOptional())
virtual const IDeviceSpec & GetDeviceSpec() const =0
A tensor defined by a TensorInfo (shape and data type) and a mutable backing store.
Definition: Tensor.hpp:290
std::vector< float > LoadImage(const char *filename)
void SetLogFilter(LogSeverity level)
Definition: Logging.cpp:24
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:1014
void print_detection(std::ostream &os, const std::vector< Detection > &detections)
Print identified yolo detections.
Definition: NMS.cpp:96
#define CHECK_OK(v)
virtual armnn::INetworkPtr CreateNetworkFromBinary(const std::vector< uint8_t > &binaryContent)=0
Create the network from a flatbuffers binary.
virtual Status LoadNetwork(NetworkId &networkIdOut, IOptimizedNetworkPtr network)=0
Loads a complete network into the IRuntime.
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:298
std::vector< std::pair< LayerBindingId, class Tensor > > OutputTensors
Definition: Tensor.hpp:325
Status
enumeration
Definition: Types.hpp:26
float iou_threshold
Inclusion threshold for Intersection-Over-Union.
Definition: NMS.hpp:17
boost::variant< std::vector< float >, std::vector< int >, std::vector< unsigned char > > TContainer
std::pair< armnn::LayerBindingId, armnn::TensorInfo > BindingPointInfo
Definition: Tensor.hpp:245
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46
std::vector< Detection > nms(const NMSConfig &config, const std::vector< float > &detected_boxes)
Perform Non-Maxima Supression on a list of given detections.
Definition: NMS.cpp:113
armnn::OutputTensors MakeOutputTensors(const std::vector< armnn::BindingPointInfo > &outputBindings, const std::vector< std::reference_wrapper< TContainer >> &outputDataContainers)
Non Maxima Suprresion configuration meta-data.
Definition: NMS.hpp:13
bool ValidateFilePath(std::string &file)
float confidence_threshold
Inclusion confidence threshold for a box.
Definition: NMS.hpp:16
bool compare_detection(const yolov3::Detection &detection, const std::vector< float > &expected)
Compare a detection object with a vector of float values.
Definition: NMS.cpp:84
unsigned int num_classes
Number of classes in the detected boxes.
Definition: NMS.hpp:14