ArmNN
 21.02
TfLiteYoloV3Big-Armnn.cpp File Reference
#include "armnnTfLiteParser/ITfLiteParser.hpp"
#include "NMS.hpp"
#include <stb/stb_image.h>
#include <armnn/INetwork.hpp>
#include <armnn/IRuntime.hpp>
#include <armnn/Logging.hpp>
#include <armnn/utility/IgnoreUnused.hpp>
#include <cxxopts/cxxopts.hpp>
#include <ghc/filesystem.hpp>
#include <chrono>
#include <fstream>
#include <iostream>
#include <stdlib.h>

Go to the source code of this file.

Macros

#define CHECK_OK(v)
 
#define S_BOOL(name)   enum class name {False=0, True=1};
 

Enumerations

enum  ImportMemory { False =0, True =1 }
 
enum  DumpToDot { False =0, True =1 }
 
enum  ExpectFile { False =0, True =1 }
 
enum  OptionalArg { False =0, True =1 }
 

Functions

template<typename TContainer >
armnn::InputTensors MakeInputTensors (const std::vector< armnn::BindingPointInfo > &inputBindings, const std::vector< std::reference_wrapper< TContainer >> &inputDataContainers)
 
template<typename TContainer >
armnn::OutputTensors MakeOutputTensors (const std::vector< armnn::BindingPointInfo > &outputBindings, const std::vector< std::reference_wrapper< TContainer >> &outputDataContainers)
 
int LoadModel (const char *filename, ITfLiteParser &parser, IRuntime &runtime, NetworkId &networkId, const std::vector< BackendId > &backendPreferences, ImportMemory enableImport, DumpToDot dumpToDot)
 
std::vector< float > LoadImage (const char *filename)
 
bool ValidateFilePath (std::string &file, ExpectFile expectFile)
 
void CheckAccuracy (std::vector< float > *toDetector0, std::vector< float > *toDetector1, std::vector< float > *toDetector2, std::vector< float > *detectorOutput, const std::vector< yolov3::Detection > &nmsOut, const std::vector< std::string > &filePaths)
 
int main (int argc, char *argv[])
 

Macro Definition Documentation

◆ CHECK_OK

#define CHECK_OK (   v)
Value:
do { \
try { \
auto r_local = v; \
if (r_local != 0) { return r_local;} \
} \
catch (const armnn::Exception& e) \
{ \
ARMNN_LOG(error) << "Oops: " << e.what(); \
return GENERAL_ERROR; \
} \
} while(0)
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46

Definition at line 34 of file TfLiteYoloV3Big-Armnn.cpp.

Referenced by main().

◆ S_BOOL

#define S_BOOL (   name)    enum class name {False=0, True=1};

Definition at line 100 of file TfLiteYoloV3Big-Armnn.cpp.

Enumeration Type Documentation

◆ DumpToDot

enum DumpToDot
strong
Enumerator
False 
True 

Definition at line 103 of file TfLiteYoloV3Big-Armnn.cpp.

◆ ExpectFile

enum ExpectFile
strong
Enumerator
False 
True 

Definition at line 104 of file TfLiteYoloV3Big-Armnn.cpp.

◆ ImportMemory

enum ImportMemory
strong
Enumerator
False 
True 

Definition at line 102 of file TfLiteYoloV3Big-Armnn.cpp.

◆ OptionalArg

enum OptionalArg
strong
Enumerator
False 
True 

Definition at line 105 of file TfLiteYoloV3Big-Armnn.cpp.

Function Documentation

◆ CheckAccuracy()

void CheckAccuracy ( std::vector< float > *  toDetector0,
std::vector< float > *  toDetector1,
std::vector< float > *  toDetector2,
std::vector< float > *  detectorOutput,
const std::vector< yolov3::Detection > &  nmsOut,
const std::vector< std::string > &  filePaths 
)

Definition at line 223 of file TfLiteYoloV3Big-Armnn.cpp.

References ARMNN_LOG, yolov3::compare_detection(), armnn::error, False, GetBackendIDs(), armnn::info, True, and ValidateFilePath().

226 {
227  std::ifstream pathStream;
228  std::vector<float> expected;
229  std::vector<std::vector<float>*> outputs;
230  float compare = 0;
231  unsigned int count = 0;
232 
233  //Push back output vectors from inference for use in loop
234  outputs.push_back(toDetector0);
235  outputs.push_back(toDetector1);
236  outputs.push_back(toDetector2);
237  outputs.push_back(detectorOutput);
238 
239  for (unsigned int i = 0; i < outputs.size(); ++i)
240  {
241  // Reading expected output files and assigning them to @expected. Close and Clear to reuse stream and clean RAM
242  pathStream.open(filePaths[i]);
243  if (!pathStream.is_open())
244  {
245  ARMNN_LOG(error) << "Expected output file can not be opened: " << filePaths[i];
246  continue;
247  }
248 
249  expected.assign(std::istream_iterator<float>(pathStream), {});
250  pathStream.close();
251  pathStream.clear();
252 
253  // Ensure each vector is the same length
254  if (expected.size() != outputs[i]->size())
255  {
256  ARMNN_LOG(error) << "Expected output size does not match actual output size: " << filePaths[i];
257  }
258  else
259  {
260  count = 0;
261 
262  // Compare abs(difference) with tolerance to check for value by value equality
263  for (unsigned int j = 0; j < outputs[i]->size(); ++j)
264  {
265  compare = abs(expected[j] - outputs[i]->at(j));
266  if (compare > 0.001f)
267  {
268  count++;
269  }
270  }
271  if (count > 0)
272  {
273  ARMNN_LOG(error) << count << " output(s) do not match expected values in: " << filePaths[i];
274  }
275  }
276  }
277 
278  pathStream.open(filePaths[4]);
279  if (!pathStream.is_open())
280  {
281  ARMNN_LOG(error) << "Expected output file can not be opened: " << filePaths[4];
282  }
283  else
284  {
285  expected.assign(std::istream_iterator<float>(pathStream), {});
286  pathStream.close();
287  pathStream.clear();
288  unsigned int y = 0;
289  unsigned int numOfMember = 6;
290  std::vector<float> intermediate;
291 
292  for (auto& detection: nmsOut)
293  {
294  for (unsigned int x = y * numOfMember; x < ((y * numOfMember) + numOfMember); ++x)
295  {
296  intermediate.push_back(expected[x]);
297  }
298  if (!yolov3::compare_detection(detection, intermediate))
299  {
300  ARMNN_LOG(error) << "Expected NMS output does not match: Detection " << y + 1;
301  }
302  intermediate.clear();
303  y++;
304  }
305  }
306 }
#define ARMNN_LOG(severity)
Definition: Logging.hpp:202
bool compare_detection(const yolov3::Detection &detection, const std::vector< float > &expected)
Compare a detection object with a vector of float values.
Definition: NMS.cpp:84

◆ LoadImage()

std::vector<float> LoadImage ( const char *  filename)

Definition at line 162 of file TfLiteYoloV3Big-Armnn.cpp.

References ARMNN_LOG, and armnn::error.

Referenced by main().

163 {
164  if (strlen(filename) == 0)
165  {
166  return std::vector<float>(1920*10180*3, 0.0f);
167  }
168  struct Memory
169  {
170  ~Memory() {stbi_image_free(m_Data);}
171  bool IsLoaded() const { return m_Data != nullptr;}
172 
173  unsigned char* m_Data;
174  };
175 
176  std::vector<float> image;
177 
178  int width;
179  int height;
180  int channels;
181 
182  Memory mem = {stbi_load(filename, &width, &height, &channels, 3)};
183  if (!mem.IsLoaded())
184  {
185  ARMNN_LOG(error) << "Could not load input image file: " << filename;
186  return image;
187  }
188 
189  if (width != 1920 || height != 1080 || channels != 3)
190  {
191  ARMNN_LOG(error) << "Input image has wong dimension: " << width << "x" << height << "x" << channels << ". "
192  " Expected 1920x1080x3.";
193  return image;
194  }
195 
196  image.resize(1920*1080*3);
197 
198  // Expand to float. Does this need de-gamma?
199  for (unsigned int idx=0; idx <= 1920*1080*3; idx++)
200  {
201  image[idx] = static_cast<float>(mem.m_Data[idx]) /255.0f;
202  }
203 
204  return image;
205 }
#define ARMNN_LOG(severity)
Definition: Logging.hpp:202

◆ LoadModel()

int LoadModel ( const char *  filename,
ITfLiteParser parser,
IRuntime runtime,
NetworkId networkId,
const std::vector< BackendId > &  backendPreferences,
ImportMemory  enableImport,
DumpToDot  dumpToDot 
)

Definition at line 107 of file TfLiteYoloV3Big-Armnn.cpp.

References ARMNN_LOG, and armnn::error.

Referenced by main().

114 {
115  std::ifstream stream(filename, std::ios::in | std::ios::binary);
116  if (!stream.is_open())
117  {
118  ARMNN_LOG(error) << "Could not open model: " << filename;
119  return OPEN_FILE_ERROR;
120  }
121 
122  std::vector<uint8_t> contents((std::istreambuf_iterator<char>(stream)), std::istreambuf_iterator<char>());
123  stream.close();
124 
125  auto model = parser.CreateNetworkFromBinary(contents);
126  contents.clear();
127  ARMNN_LOG(debug) << "Model loaded ok: " << filename;
128 
129  // Optimize backbone model
130  OptimizerOptions options;
131  options.m_ImportEnabled = enableImport != ImportMemory::False;
132  auto optimizedModel = Optimize(*model, backendPreferences, runtime.GetDeviceSpec(), options);
133  if (!optimizedModel)
134  {
135  ARMNN_LOG(fatal) << "Could not optimize the model:" << filename;
136  return OPTIMIZE_NETWORK_ERROR;
137  }
138 
139  if (dumpToDot != DumpToDot::False)
140  {
141  std::stringstream ss;
142  ss << filename << ".dot";
143  std::ofstream dotStream(ss.str().c_str(), std::ofstream::out);
144  optimizedModel->SerializeToDot(dotStream);
145  dotStream.close();
146  }
147  // Load model into runtime
148  {
149  std::string errorMessage;
150  INetworkProperties modelProps(options.m_ImportEnabled, options.m_ImportEnabled);
151  Status status = runtime.LoadNetwork(networkId, std::move(optimizedModel), errorMessage, modelProps);
152  if (status != Status::Success)
153  {
154  ARMNN_LOG(fatal) << "Could not load " << filename << " model into runtime: " << errorMessage;
155  return LOAD_NETWORK_ERROR;
156  }
157  }
158 
159  return 0;
160 }
#define ARMNN_LOG(severity)
Definition: Logging.hpp:202
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:1502
Status
enumeration
Definition: Types.hpp:26
Status LoadNetwork(NetworkId &networkIdOut, IOptimizedNetworkPtr network)
Loads a complete network into the IRuntime.
Definition: Runtime.cpp:47
armnn::INetworkPtr CreateNetworkFromBinary(const std::vector< uint8_t > &binaryContent)
Create the network from a flatbuffers binary.
const IDeviceSpec & GetDeviceSpec() const
Definition: Runtime.cpp:89

◆ main()

int main ( int  argc,
char *  argv[] 
)

Definition at line 481 of file TfLiteYoloV3Big-Armnn.cpp.

References ARMNN_LOG, CHECK_OK, ITfLiteParser::Create(), False, armnn::fatal, armnn::info, LoadImage(), LoadModel(), IRuntime::CreationOptions::m_DynamicBackendsPath, armnn::SetAllLoggingSinks(), armnn::SetLogFilter(), and True.

482 {
483  // Configure logging
484  SetAllLoggingSinks(true, true, true);
485  SetLogFilter(LogSeverity::Trace);
486 
487  // Check and get given program arguments
488  ParseArgs progArgs = ParseArgs(argc, argv);
489 
490  // Create runtime
491  IRuntime::CreationOptions runtimeOptions; // default
492 
493  if (!progArgs.dynamicBackendPath.empty())
494  {
495  std::cout << "Loading backends from" << progArgs.dynamicBackendPath << "\n";
496  runtimeOptions.m_DynamicBackendsPath = progArgs.dynamicBackendPath;
497  }
498 
499  auto runtime = IRuntime::Create(runtimeOptions);
500  if (!runtime)
501  {
502  ARMNN_LOG(fatal) << "Could not create runtime.";
503  return -1;
504  }
505 
506  // Create TfLite Parsers
508  auto parser = ITfLiteParser::Create(parserOptions);
509 
510  // Load backbone model
511  ARMNN_LOG(info) << "Loading backbone...";
512  NetworkId backboneId;
513  const DumpToDot dumpToDot = progArgs.dumpToDot;
514  CHECK_OK(LoadModel(progArgs.backboneDir.c_str(),
515  *parser,
516  *runtime,
517  backboneId,
518  progArgs.prefBackendsBackbone,
520  dumpToDot));
521  auto inputId = parser->GetNetworkInputBindingInfo(0, "inputs");
522  auto bbOut0Id = parser->GetNetworkOutputBindingInfo(0, "input_to_detector_1");
523  auto bbOut1Id = parser->GetNetworkOutputBindingInfo(0, "input_to_detector_2");
524  auto bbOut2Id = parser->GetNetworkOutputBindingInfo(0, "input_to_detector_3");
525  auto backboneProfile = runtime->GetProfiler(backboneId);
526  backboneProfile->EnableProfiling(true);
527 
528 
529  // Load detector model
530  ARMNN_LOG(info) << "Loading detector...";
531  NetworkId detectorId;
532  CHECK_OK(LoadModel(progArgs.detectorDir.c_str(),
533  *parser,
534  *runtime,
535  detectorId,
536  progArgs.prefBackendsDetector,
538  dumpToDot));
539  auto detectIn0Id = parser->GetNetworkInputBindingInfo(0, "input_to_detector_1");
540  auto detectIn1Id = parser->GetNetworkInputBindingInfo(0, "input_to_detector_2");
541  auto detectIn2Id = parser->GetNetworkInputBindingInfo(0, "input_to_detector_3");
542  auto outputBoxesId = parser->GetNetworkOutputBindingInfo(0, "output_boxes");
543  auto detectorProfile = runtime->GetProfiler(detectorId);
544 
545  // Load input from file
546  ARMNN_LOG(info) << "Loading test image...";
547  auto image = LoadImage(progArgs.imageDir.c_str());
548  if (image.empty())
549  {
550  return LOAD_IMAGE_ERROR;
551  }
552 
553  // Allocate the intermediate tensors
554  std::vector<float> intermediateMem0(bbOut0Id.second.GetNumElements());
555  std::vector<float> intermediateMem1(bbOut1Id.second.GetNumElements());
556  std::vector<float> intermediateMem2(bbOut2Id.second.GetNumElements());
557  std::vector<float> intermediateMem3(outputBoxesId.second.GetNumElements());
558 
559  // Setup inputs and outputs
560  using BindingInfos = std::vector<armnn::BindingPointInfo>;
561  using FloatTensors = std::vector<std::reference_wrapper<std::vector<float>>>;
562 
563  InputTensors bbInputTensors = MakeInputTensors(BindingInfos{ inputId },
564  FloatTensors{ image });
565  OutputTensors bbOutputTensors = MakeOutputTensors(BindingInfos{ bbOut0Id, bbOut1Id, bbOut2Id },
566  FloatTensors{ intermediateMem0,
567  intermediateMem1,
568  intermediateMem2 });
569  InputTensors detectInputTensors = MakeInputTensors(BindingInfos{ detectIn0Id,
570  detectIn1Id,
571  detectIn2Id } ,
572  FloatTensors{ intermediateMem0,
573  intermediateMem1,
574  intermediateMem2 });
575  OutputTensors detectOutputTensors = MakeOutputTensors(BindingInfos{ outputBoxesId },
576  FloatTensors{ intermediateMem3 });
577 
578  static const int numIterations=2;
579  using DurationUS = std::chrono::duration<double, std::micro>;
580  std::vector<DurationUS> nmsDurations(0);
581  std::vector<yolov3::Detection> filtered_boxes;
582  nmsDurations.reserve(numIterations);
583  for (int i=0; i < numIterations; i++)
584  {
585  // Execute backbone
586  ARMNN_LOG(info) << "Running backbone...";
587  runtime->EnqueueWorkload(backboneId, bbInputTensors, bbOutputTensors);
588 
589  // Execute detector
590  ARMNN_LOG(info) << "Running detector...";
591  runtime->EnqueueWorkload(detectorId, detectInputTensors, detectOutputTensors);
592 
593  // Execute NMS
594  ARMNN_LOG(info) << "Running nms...";
595  using clock = std::chrono::steady_clock;
596  auto nmsStartTime = clock::now();
597  yolov3::NMSConfig config;
598  config.num_boxes = 127800;
599  config.num_classes = 80;
600  config.confidence_threshold = 0.9f;
601  config.iou_threshold = 0.5f;
602  filtered_boxes = yolov3::nms(config, intermediateMem3);
603  auto nmsEndTime = clock::now();
604 
605  // Enable the profiling after the warm-up run
606  if (i>0)
607  {
608  print_detection(std::cout, filtered_boxes);
609 
610  const auto nmsDuration = DurationUS(nmsStartTime - nmsEndTime);
611  nmsDurations.push_back(nmsDuration);
612  }
613  backboneProfile->EnableProfiling(true);
614  detectorProfile->EnableProfiling(true);
615  }
616  // Log timings to file
617  std::ofstream backboneProfileStream("backbone.json");
618  backboneProfile->Print(backboneProfileStream);
619  backboneProfileStream.close();
620 
621  std::ofstream detectorProfileStream("detector.json");
622  detectorProfile->Print(detectorProfileStream);
623  detectorProfileStream.close();
624 
625  // Manually construct the json output
626  std::ofstream nmsProfileStream("nms.json");
627  nmsProfileStream << "{" << "\n";
628  nmsProfileStream << R"( "NmsTimings": {)" << "\n";
629  nmsProfileStream << R"( "raw": [)" << "\n";
630  bool isFirst = true;
631  for (auto duration : nmsDurations)
632  {
633  if (!isFirst)
634  {
635  nmsProfileStream << ",\n";
636  }
637 
638  nmsProfileStream << " " << duration.count();
639  isFirst = false;
640  }
641  nmsProfileStream << "\n";
642  nmsProfileStream << R"( "units": "us")" << "\n";
643  nmsProfileStream << " ]" << "\n";
644  nmsProfileStream << " }" << "\n";
645  nmsProfileStream << "}" << "\n";
646  nmsProfileStream.close();
647 
648  if (progArgs.comparisonFiles.size() > 0)
649  {
650  CheckAccuracy(&intermediateMem0,
651  &intermediateMem1,
652  &intermediateMem2,
653  &intermediateMem3,
654  filtered_boxes,
655  progArgs.comparisonFiles);
656  }
657 
658  ARMNN_LOG(info) << "Run completed";
659  return 0;
660 }
void CheckAccuracy(std::vector< float > *toDetector0, std::vector< float > *toDetector1, std::vector< float > *toDetector2, std::vector< float > *detectorOutput, const std::vector< yolov3::Detection > &nmsOut, const std::vector< std::string > &filePaths)
void SetAllLoggingSinks(bool standardOut, bool debugOut, bool coloured)
Definition: Logging.cpp:142
int LoadModel(const char *filename, ITfLiteParser &parser, IRuntime &runtime, NetworkId &networkId, const std::vector< BackendId > &backendPreferences, ImportMemory enableImport, DumpToDot dumpToDot)
armnn::InputTensors MakeInputTensors(const std::vector< armnn::BindingPointInfo > &inputBindings, const std::vector< std::reference_wrapper< TContainer >> &inputDataContainers)
#define ARMNN_LOG(severity)
Definition: Logging.hpp:202
std::vector< std::pair< LayerBindingId, class ConstTensor > > InputTensors
Definition: Tensor.hpp:340
int NetworkId
Definition: IRuntime.hpp:20
unsigned int num_boxes
Number of detected boxes.
Definition: NMS.hpp:15
std::vector< float > LoadImage(const char *filename)
void SetLogFilter(LogSeverity level)
Definition: Logging.cpp:24
void print_detection(std::ostream &os, const std::vector< Detection > &detections)
Print identified yolo detections.
Definition: NMS.cpp:96
#define CHECK_OK(v)
std::vector< std::pair< LayerBindingId, class Tensor > > OutputTensors
Definition: Tensor.hpp:341
float iou_threshold
Inclusion threshold for Intersection-Over-Union.
Definition: NMS.hpp:17
std::string m_DynamicBackendsPath
Setting this value will override the paths set by the DYNAMIC_BACKEND_PATHS compiler directive Only a...
Definition: IRuntime.hpp:60
std::vector< Detection > nms(const NMSConfig &config, const std::vector< float > &detected_boxes)
Perform Non-Maxima Supression on a list of given detections.
Definition: NMS.cpp:113
armnn::OutputTensors MakeOutputTensors(const std::vector< armnn::BindingPointInfo > &outputBindings, const std::vector< std::reference_wrapper< TContainer >> &outputDataContainers)
Non Maxima Suprresion configuration meta-data.
Definition: NMS.hpp:13
float confidence_threshold
Inclusion confidence threshold for a box.
Definition: NMS.hpp:16
unsigned int num_classes
Number of classes in the detected boxes.
Definition: NMS.hpp:14

◆ MakeInputTensors()

armnn::InputTensors MakeInputTensors ( const std::vector< armnn::BindingPointInfo > &  inputBindings,
const std::vector< std::reference_wrapper< TContainer >> &  inputDataContainers 
)
inline

Definition at line 50 of file TfLiteYoloV3Big-Armnn.cpp.

Referenced by InferenceModel< IParser, TDataType >::GetAllQuantizationParams(), and InferenceModel< IParser, TDataType >::Run().

52 {
53  armnn::InputTensors inputTensors;
54 
55  const size_t numInputs = inputBindings.size();
56  if (numInputs != inputDataContainers.size())
57  {
58  throw armnn::Exception("Mismatching vectors");
59  }
60 
61  for (size_t i = 0; i < numInputs; i++)
62  {
63  const armnn::BindingPointInfo& inputBinding = inputBindings[i];
64  const TContainer& inputData = inputDataContainers[i].get();
65 
66  armnn::ConstTensor inputTensor(inputBinding.second, inputData.data());
67  inputTensors.push_back(std::make_pair(inputBinding.first, inputTensor));
68  }
69 
70  return inputTensors;
71 }
std::vector< std::pair< LayerBindingId, class ConstTensor > > InputTensors
Definition: Tensor.hpp:340
mapbox::util::variant< std::vector< float >, std::vector< int >, std::vector< unsigned char > > TContainer
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:314
std::pair< armnn::LayerBindingId, armnn::TensorInfo > BindingPointInfo
Definition: Tensor.hpp:261
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46

◆ MakeOutputTensors()

armnn::OutputTensors MakeOutputTensors ( const std::vector< armnn::BindingPointInfo > &  outputBindings,
const std::vector< std::reference_wrapper< TContainer >> &  outputDataContainers 
)
inline

Definition at line 74 of file TfLiteYoloV3Big-Armnn.cpp.

Referenced by InferenceModel< IParser, TDataType >::GetAllQuantizationParams(), and InferenceModel< IParser, TDataType >::Run().

77 {
78  armnn::OutputTensors outputTensors;
79 
80  const size_t numOutputs = outputBindings.size();
81  if (numOutputs != outputDataContainers.size())
82  {
83  throw armnn::Exception("Mismatching vectors");
84  }
85 
86  outputTensors.reserve(numOutputs);
87 
88  for (size_t i = 0; i < numOutputs; i++)
89  {
90  const armnn::BindingPointInfo& outputBinding = outputBindings[i];
91  const TContainer& outputData = outputDataContainers[i].get();
92 
93  armnn::Tensor outputTensor(outputBinding.second, const_cast<float*>(outputData.data()));
94  outputTensors.push_back(std::make_pair(outputBinding.first, outputTensor));
95  }
96 
97  return outputTensors;
98 }
mapbox::util::variant< std::vector< float >, std::vector< int >, std::vector< unsigned char > > TContainer
A tensor defined by a TensorInfo (shape and data type) and a mutable backing store.
Definition: Tensor.hpp:306
std::vector< std::pair< LayerBindingId, class Tensor > > OutputTensors
Definition: Tensor.hpp:341
std::pair< armnn::LayerBindingId, armnn::TensorInfo > BindingPointInfo
Definition: Tensor.hpp:261
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46

◆ ValidateFilePath()

bool ValidateFilePath ( std::string &  file,
ExpectFile  expectFile 
)

Definition at line 208 of file TfLiteYoloV3Big-Armnn.cpp.

References True.

Referenced by CheckAccuracy().

209 {
210  if (!ghc::filesystem::exists(file))
211  {
212  std::cerr << "Given file path " << file << " does not exist" << std::endl;
213  return false;
214  }
215  if (!ghc::filesystem::is_regular_file(file) && expectFile == ExpectFile::True)
216  {
217  std::cerr << "Given file path " << file << " is not a regular file" << std::endl;
218  return false;
219  }
220  return true;
221 }