ArmNN
 20.11
ExecuteNetworkParams.cpp File Reference
#include "ExecuteNetworkParams.hpp"
#include "NetworkExecutionUtils/NetworkExecutionUtils.hpp"
#include <InferenceModel.hpp>
#include <armnn/Logging.hpp>
#include <fmt/format.h>

Go to the source code of this file.

Functions

bool IsModelBinary (const std::string &modelFormat)
 
void CheckModelFormat (const std::string &modelFormat)
 
void CheckClTuningParameter (const int &tuningLevel, const std::string &tuningPath, const std::vector< armnn::BackendId > computeDevices)
 

Function Documentation

◆ CheckClTuningParameter()

void CheckClTuningParameter ( const int &  tuningLevel,
const std::string &  tuningPath,
const std::vector< armnn::BackendId computeDevices 
)

Definition at line 91 of file ExecuteNetworkParams.cpp.

References ARMNN_LOG, and ValidatePath().

Referenced by ExecuteNetworkParams::ValidateParams().

94 {
95  if (!tuningPath.empty())
96  {
97  if (tuningLevel == 0)
98  {
99  ARMNN_LOG(info) << "Using cl tuning file: " << tuningPath << "\n";
100  if (!ValidatePath(tuningPath, true))
101  {
102  throw armnn::InvalidArgumentException("The tuning path is not valid");
103  }
104  }
105  else if ((1 <= tuningLevel) && (tuningLevel <= 3))
106  {
107  ARMNN_LOG(info) << "Starting execution to generate a cl tuning file: " << tuningPath << "\n"
108  << "Tuning level in use: " << tuningLevel << "\n";
109  }
110  else if ((0 < tuningLevel) || (tuningLevel > 3))
111  {
112  throw armnn::InvalidArgumentException(fmt::format("The tuning level {} is not valid.",
113  tuningLevel));
114  }
115 
116  // Ensure that a GpuAcc is enabled. Otherwise no tuning data are used or genereted
117  // Only warn if it's not enabled
118  auto it = std::find(computeDevices.begin(), computeDevices.end(), "GpuAcc");
119  if (it == computeDevices.end())
120  {
121  ARMNN_LOG(warning) << "To use Cl Tuning the compute device GpuAcc needs to be active.";
122  }
123  }
124 
125 }
#define ARMNN_LOG(severity)
Definition: Logging.hpp:163
bool ValidatePath(const std::string &file, const bool expectFile)
Verifies if the given string is a valid path.

◆ CheckModelFormat()

void CheckModelFormat ( const std::string &  modelFormat)

Definition at line 33 of file ExecuteNetworkParams.cpp.

References IsModelBinary().

Referenced by ExecuteNetworkParams::ValidateParams().

34 {
35  // Forward to implementation based on the parser type
36  if (modelFormat.find("armnn") != std::string::npos)
37  {
38 #if defined(ARMNN_SERIALIZER)
39 #else
40  throw armnn::InvalidArgumentException("Can't run model in armnn format without a "
41  "built with serialization support.");
42 #endif
43  }
44  else if (modelFormat.find("caffe") != std::string::npos)
45  {
46 #if defined(ARMNN_CAFFE_PARSER)
47 #else
48  throw armnn::InvalidArgumentException("Can't run model in caffe format without a "
49  "built with Caffe parser support.");
50 #endif
51  }
52  else if (modelFormat.find("onnx") != std::string::npos)
53  {
54 #if defined(ARMNN_ONNX_PARSER)
55 #else
56  throw armnn::InvalidArgumentException("Can't run model in onnx format without a "
57  "built with Onnx parser support.");
58 #endif
59  }
60  else if (modelFormat.find("tensorflow") != std::string::npos)
61  {
62 #if defined(ARMNN_TF_PARSER)
63 #else
64  throw armnn::InvalidArgumentException("Can't run model in onnx format without a "
65  "built with Tensorflow parser support.");
66 #endif
67  }
68  else if (modelFormat.find("tflite") != std::string::npos)
69  {
70 #if defined(ARMNN_TF_LITE_PARSER)
71  if (!IsModelBinary(modelFormat))
72  {
73  throw armnn::InvalidArgumentException(fmt::format("Unknown model format: '{}'. Only 'binary' "
74  "format supported for tflite files",
75  modelFormat));
76  }
77 #elif defined(ARMNN_TFLITE_DELEGATE)
78 #else
79  throw armnn::InvalidArgumentException("Can't run model in tflite format without a "
80  "built with Tensorflow Lite parser support.");
81 #endif
82  }
83  else
84  {
85  throw armnn::InvalidArgumentException(fmt::format("Unknown model format: '{}'. "
86  "Please include 'caffe', 'tensorflow', 'tflite' or 'onnx'",
87  modelFormat));
88  }
89 }
bool IsModelBinary(const std::string &modelFormat)

◆ IsModelBinary()

bool IsModelBinary ( const std::string &  modelFormat)

Definition at line 14 of file ExecuteNetworkParams.cpp.

Referenced by CheckModelFormat(), and ExecuteNetworkParams::ValidateParams().

15 {
16  // Parse model binary flag from the model-format string we got from the command-line
17  if (modelFormat.find("binary") != std::string::npos)
18  {
19  return true;
20  }
21  else if (modelFormat.find("txt") != std::string::npos || modelFormat.find("text") != std::string::npos)
22  {
23  return false;
24  }
25  else
26  {
27  throw armnn::InvalidArgumentException(fmt::format("Unknown model format: '{}'. "
28  "Please include 'binary' or 'text'",
29  modelFormat));
30  }
31 }