ArmNN
 21.11
ExecuteNetworkParams.cpp File Reference
#include "ExecuteNetworkParams.hpp"
#include "NetworkExecutionUtils/NetworkExecutionUtils.hpp"
#include <InferenceModel.hpp>
#include <armnn/Logging.hpp>
#include <fmt/format.h>

Go to the source code of this file.

Functions

bool IsModelBinary (const std::string &modelFormat)
 
void CheckModelFormat (const std::string &modelFormat)
 
void CheckClTuningParameter (const int &tuningLevel, const std::string &tuningPath, const std::vector< armnn::BackendId > computeDevices)
 

Function Documentation

◆ CheckClTuningParameter()

void CheckClTuningParameter ( const int &  tuningLevel,
const std::string &  tuningPath,
const std::vector< armnn::BackendId computeDevices 
)

Definition at line 75 of file ExecuteNetworkParams.cpp.

References ARMNN_LOG, and ValidatePath().

Referenced by ExecuteNetworkParams::ValidateParams().

78 {
79  if (!tuningPath.empty())
80  {
81  if (tuningLevel == 0)
82  {
83  ARMNN_LOG(info) << "Using cl tuning file: " << tuningPath << "\n";
84  if (!ValidatePath(tuningPath, true))
85  {
86  throw armnn::InvalidArgumentException("The tuning path is not valid");
87  }
88  }
89  else if ((1 <= tuningLevel) && (tuningLevel <= 3))
90  {
91  ARMNN_LOG(info) << "Starting execution to generate a cl tuning file: " << tuningPath << "\n"
92  << "Tuning level in use: " << tuningLevel << "\n";
93  }
94  else if ((0 < tuningLevel) || (tuningLevel > 3))
95  {
96  throw armnn::InvalidArgumentException(fmt::format("The tuning level {} is not valid.",
97  tuningLevel));
98  }
99 
100  // Ensure that a GpuAcc is enabled. Otherwise no tuning data are used or genereted
101  // Only warn if it's not enabled
102  auto it = std::find(computeDevices.begin(), computeDevices.end(), "GpuAcc");
103  if (it == computeDevices.end())
104  {
105  ARMNN_LOG(warning) << "To use Cl Tuning the compute device GpuAcc needs to be active.";
106  }
107  }
108 
109 }
#define ARMNN_LOG(severity)
Definition: Logging.hpp:202
bool ValidatePath(const std::string &file, const bool expectFile)
Verifies if the given string is a valid path.

◆ CheckModelFormat()

void CheckModelFormat ( const std::string &  modelFormat)

Definition at line 33 of file ExecuteNetworkParams.cpp.

References IsModelBinary().

Referenced by ExecuteNetworkParams::ValidateParams().

34 {
35  // Forward to implementation based on the parser type
36  if (modelFormat.find("armnn") != std::string::npos)
37  {
38 #if defined(ARMNN_SERIALIZER)
39 #else
40  throw armnn::InvalidArgumentException("Can't run model in armnn format without a "
41  "built with serialization support.");
42 #endif
43  }
44  else if (modelFormat.find("onnx") != std::string::npos)
45  {
46 #if defined(ARMNN_ONNX_PARSER)
47 #else
48  throw armnn::InvalidArgumentException("Can't run model in onnx format without a "
49  "built with Onnx parser support.");
50 #endif
51  }
52  else if (modelFormat.find("tflite") != std::string::npos)
53  {
54 #if defined(ARMNN_TF_LITE_PARSER)
55  if (!IsModelBinary(modelFormat))
56  {
57  throw armnn::InvalidArgumentException(fmt::format("Unknown model format: '{}'. Only 'binary' "
58  "format supported for tflite files",
59  modelFormat));
60  }
61 #elif defined(ARMNN_TFLITE_DELEGATE)
62 #else
63  throw armnn::InvalidArgumentException("Can't run model in tflite format without a "
64  "built with Tensorflow Lite parser support.");
65 #endif
66  }
67  else
68  {
69  throw armnn::InvalidArgumentException(fmt::format("Unknown model format: '{}'. "
70  "Please include 'tflite' or 'onnx'",
71  modelFormat));
72  }
73 }
bool IsModelBinary(const std::string &modelFormat)

◆ IsModelBinary()

bool IsModelBinary ( const std::string &  modelFormat)

Definition at line 14 of file ExecuteNetworkParams.cpp.

Referenced by CheckModelFormat(), and ExecuteNetworkParams::ValidateParams().

15 {
16  // Parse model binary flag from the model-format string we got from the command-line
17  if (modelFormat.find("binary") != std::string::npos)
18  {
19  return true;
20  }
21  else if (modelFormat.find("txt") != std::string::npos || modelFormat.find("text") != std::string::npos)
22  {
23  return false;
24  }
25  else
26  {
27  throw armnn::InvalidArgumentException(fmt::format("Unknown model format: '{}'. "
28  "Please include 'binary' or 'text'",
29  modelFormat));
30  }
31 }