ArmNN
 21.02
ExecuteNetworkParams Struct Reference

Holds all parameters necessary to execute a network Check ExecuteNetworkProgramOptions.cpp for a description of each parameter. More...

#include <ExecuteNetworkParams.hpp>

Public Types

using TensorShapePtr = std::unique_ptr< armnn::TensorShape >
 

Public Member Functions

void ValidateParams ()
 

Public Attributes

std::string m_CachedNetworkFilePath
 
std::vector< armnn::BackendIdm_ComputeDevices
 
bool m_DequantizeOutput
 
std::string m_DynamicBackendsPath
 
bool m_EnableBf16TurboMode
 
bool m_EnableFastMath = false
 
bool m_EnableFp16TurboMode
 
bool m_EnableLayerDetails = false
 
bool m_EnableProfiling
 
bool m_GenerateTensorData
 
bool m_InferOutputShape = false
 
bool m_EnableDelegate = false
 
std::vector< std::string > m_InputNames
 
std::vector< std::string > m_InputTensorDataFilePaths
 
std::vector< TensorShapePtrm_InputTensorShapes
 
std::vector< std::string > m_InputTypes
 
bool m_IsModelBinary
 
size_t m_Iterations
 
std::string m_ModelFormat
 
std::string m_ModelPath
 
unsigned int m_NumberOfThreads
 
std::vector< std::string > m_OutputNames
 
std::vector< std::string > m_OutputTensorFiles
 
std::vector< std::string > m_OutputTypes
 
bool m_ParseUnsupported = false
 
bool m_PrintIntermediate
 
bool m_QuantizeInput
 
bool m_SaveCachedNetwork
 
size_t m_SubgraphId
 
double m_ThresholdTime
 
int m_TuningLevel
 
std::string m_TuningPath
 
std::string m_MLGOTuningFilePath
 

Detailed Description

Holds all parameters necessary to execute a network Check ExecuteNetworkProgramOptions.cpp for a description of each parameter.

Definition at line 13 of file ExecuteNetworkParams.hpp.

Member Typedef Documentation

◆ TensorShapePtr

using TensorShapePtr = std::unique_ptr<armnn::TensorShape>

Definition at line 15 of file ExecuteNetworkParams.hpp.

Member Function Documentation

◆ ValidateParams()

void ValidateParams ( )

Definition at line 127 of file ExecuteNetworkParams.cpp.

References ARMNN_LOG, CheckClTuningParameter(), CheckModelFormat(), IsModelBinary(), m_ComputeDevices, m_DynamicBackendsPath, m_EnableBf16TurboMode, m_EnableFp16TurboMode, m_GenerateTensorData, m_InputNames, m_InputTensorDataFilePaths, m_InputTensorShapes, m_InputTypes, m_IsModelBinary, m_ModelFormat, m_OutputNames, m_OutputTensorFiles, m_OutputTypes, m_ThresholdTime, m_TuningLevel, m_TuningPath, and ValidatePaths().

Referenced by ProgramOptions::ValidateExecuteNetworkParams().

128 {
129  // Set to true if it is preferred to throw an exception rather than use ARMNN_LOG
130  bool throwExc = false;
131 
132  try
133  {
134  if (m_DynamicBackendsPath == "")
135  {
136  // Check compute devices are valid unless they are dynamically loaded at runtime
137  std::string invalidBackends;
138  if (!CheckRequestedBackendsAreValid(m_ComputeDevices, armnn::Optional<std::string&>(invalidBackends)))
139  {
140  ARMNN_LOG(fatal) << "The list of preferred devices contains invalid backend IDs: "
141  << invalidBackends;
142  }
143  }
144 
146 
148  {
149  ARMNN_LOG(fatal) << "BFloat16 and Float16 turbo mode cannot be enabled at the same time.";
150  }
151 
153 
155 
156  // Check input tensor shapes
157  if ((m_InputTensorShapes.size() != 0) &&
158  (m_InputTensorShapes.size() != m_InputNames.size()))
159  {
160  ARMNN_LOG(fatal) << "input-name and input-tensor-shape must have the same amount of elements. ";
161  }
162 
163  if (m_InputTensorDataFilePaths.size() != 0)
164  {
166  {
167  ARMNN_LOG(fatal) << "One or more input data file paths are not valid. ";
168  }
169 
170  if (m_InputTensorDataFilePaths.size() != m_InputNames.size())
171  {
172  ARMNN_LOG(fatal) << "input-name and input-tensor-data must have the same amount of elements. ";
173  }
174  }
175 
176  if ((m_OutputTensorFiles.size() != 0) &&
177  (m_OutputTensorFiles.size() != m_OutputNames.size()))
178  {
179  ARMNN_LOG(fatal) << "output-name and write-outputs-to-file must have the same amount of elements. ";
180  }
181 
182  if (m_InputTypes.size() == 0)
183  {
184  //Defaults the value of all inputs to "float"
185  m_InputTypes.assign(m_InputNames.size(), "float");
186  }
187  else if ((m_InputTypes.size() != 0) &&
188  (m_InputTypes.size() != m_InputNames.size()))
189  {
190  ARMNN_LOG(fatal) << "input-name and input-type must have the same amount of elements.";
191  }
192 
193  if (m_OutputTypes.size() == 0)
194  {
195  //Defaults the value of all outputs to "float"
196  m_OutputTypes.assign(m_OutputNames.size(), "float");
197  }
198  else if ((m_OutputTypes.size() != 0) &&
199  (m_OutputTypes.size() != m_OutputNames.size()))
200  {
201  ARMNN_LOG(fatal) << "output-name and output-type must have the same amount of elements.";
202  }
203 
204  // Check that threshold time is not less than zero
205  if (m_ThresholdTime < 0)
206  {
207  ARMNN_LOG(fatal) << "Threshold time supplied as a command line argument is less than zero.";
208  }
209  }
210  catch (std::string& exc)
211  {
212  if (throwExc)
213  {
215  }
216  else
217  {
218  std::cout << exc;
219  exit(EXIT_FAILURE);
220  }
221  }
222  // Check turbo modes
223 
224  // Warn if ExecuteNetwork will generate dummy input data
226  {
227  ARMNN_LOG(warning) << "No input files provided, input tensors will be filled with 0s.";
228  }
229 }
std::vector< std::string > m_InputTypes
std::vector< TensorShapePtr > m_InputTensorShapes
#define ARMNN_LOG(severity)
Definition: Logging.hpp:202
std::vector< std::string > m_OutputNames
std::vector< std::string > m_OutputTensorFiles
void CheckClTuningParameter(const int &tuningLevel, const std::string &tuningPath, const std::vector< armnn::BackendId > computeDevices)
std::vector< armnn::BackendId > m_ComputeDevices
std::vector< std::string > m_OutputTypes
std::vector< std::string > m_InputNames
void CheckModelFormat(const std::string &modelFormat)
bool ValidatePaths(const std::vector< std::string > &fileVec, const bool expectFile)
Verifies if a given vector of strings are valid paths.
std::vector< std::string > m_InputTensorDataFilePaths
bool IsModelBinary(const std::string &modelFormat)

Member Data Documentation

◆ m_CachedNetworkFilePath

std::string m_CachedNetworkFilePath

Definition at line 17 of file ExecuteNetworkParams.hpp.

Referenced by MainImpl(), and ProgramOptions::ProgramOptions().

◆ m_ComputeDevices

std::vector<armnn::BackendId> m_ComputeDevices

Definition at line 18 of file ExecuteNetworkParams.hpp.

Referenced by MainImpl(), ProgramOptions::ParseOptions(), and ValidateParams().

◆ m_DequantizeOutput

bool m_DequantizeOutput

Definition at line 19 of file ExecuteNetworkParams.hpp.

Referenced by MainImpl(), and ProgramOptions::ProgramOptions().

◆ m_DynamicBackendsPath

std::string m_DynamicBackendsPath

Definition at line 20 of file ExecuteNetworkParams.hpp.

Referenced by MainImpl(), ProgramOptions::ParseOptions(), and ValidateParams().

◆ m_EnableBf16TurboMode

bool m_EnableBf16TurboMode

◆ m_EnableDelegate

bool m_EnableDelegate = false

Definition at line 28 of file ExecuteNetworkParams.hpp.

Referenced by main(), and ProgramOptions::ProgramOptions().

◆ m_EnableFastMath

bool m_EnableFastMath = false

Definition at line 22 of file ExecuteNetworkParams.hpp.

Referenced by MainImpl(), and ProgramOptions::ProgramOptions().

◆ m_EnableFp16TurboMode

bool m_EnableFp16TurboMode

◆ m_EnableLayerDetails

bool m_EnableLayerDetails = false

Definition at line 24 of file ExecuteNetworkParams.hpp.

Referenced by MainImpl(), and ProgramOptions::ProgramOptions().

◆ m_EnableProfiling

bool m_EnableProfiling

◆ m_GenerateTensorData

bool m_GenerateTensorData

Definition at line 26 of file ExecuteNetworkParams.hpp.

Referenced by MainImpl(), ProgramOptions::ParseOptions(), and ValidateParams().

◆ m_InferOutputShape

bool m_InferOutputShape = false

Definition at line 27 of file ExecuteNetworkParams.hpp.

Referenced by MainImpl(), and ProgramOptions::ProgramOptions().

◆ m_InputNames

std::vector<std::string> m_InputNames

Definition at line 29 of file ExecuteNetworkParams.hpp.

Referenced by MainImpl(), ProgramOptions::ParseOptions(), and ValidateParams().

◆ m_InputTensorDataFilePaths

std::vector<std::string> m_InputTensorDataFilePaths

Definition at line 30 of file ExecuteNetworkParams.hpp.

Referenced by MainImpl(), ProgramOptions::ParseOptions(), and ValidateParams().

◆ m_InputTensorShapes

std::vector<TensorShapePtr> m_InputTensorShapes

Definition at line 31 of file ExecuteNetworkParams.hpp.

Referenced by MainImpl(), ProgramOptions::ParseOptions(), and ValidateParams().

◆ m_InputTypes

std::vector<std::string> m_InputTypes

Definition at line 32 of file ExecuteNetworkParams.hpp.

Referenced by MainImpl(), ProgramOptions::ParseOptions(), and ValidateParams().

◆ m_IsModelBinary

bool m_IsModelBinary

Definition at line 33 of file ExecuteNetworkParams.hpp.

Referenced by MainImpl(), and ValidateParams().

◆ m_Iterations

size_t m_Iterations

Definition at line 34 of file ExecuteNetworkParams.hpp.

Referenced by MainImpl(), and ProgramOptions::ProgramOptions().

◆ m_MLGOTuningFilePath

std::string m_MLGOTuningFilePath

◆ m_ModelFormat

std::string m_ModelFormat

Definition at line 35 of file ExecuteNetworkParams.hpp.

Referenced by main(), ProgramOptions::ParseOptions(), and ValidateParams().

◆ m_ModelPath

std::string m_ModelPath

Definition at line 36 of file ExecuteNetworkParams.hpp.

Referenced by MainImpl(), and ProgramOptions::ProgramOptions().

◆ m_NumberOfThreads

unsigned int m_NumberOfThreads

Definition at line 37 of file ExecuteNetworkParams.hpp.

Referenced by MainImpl(), and ProgramOptions::ProgramOptions().

◆ m_OutputNames

std::vector<std::string> m_OutputNames

Definition at line 38 of file ExecuteNetworkParams.hpp.

Referenced by MainImpl(), ProgramOptions::ParseOptions(), and ValidateParams().

◆ m_OutputTensorFiles

std::vector<std::string> m_OutputTensorFiles

Definition at line 39 of file ExecuteNetworkParams.hpp.

Referenced by MainImpl(), ProgramOptions::ParseOptions(), and ValidateParams().

◆ m_OutputTypes

std::vector<std::string> m_OutputTypes

Definition at line 40 of file ExecuteNetworkParams.hpp.

Referenced by MainImpl(), ProgramOptions::ParseOptions(), and ValidateParams().

◆ m_ParseUnsupported

bool m_ParseUnsupported = false

Definition at line 41 of file ExecuteNetworkParams.hpp.

Referenced by MainImpl(), and ProgramOptions::ProgramOptions().

◆ m_PrintIntermediate

bool m_PrintIntermediate

Definition at line 42 of file ExecuteNetworkParams.hpp.

Referenced by MainImpl(), and ProgramOptions::ProgramOptions().

◆ m_QuantizeInput

bool m_QuantizeInput

Definition at line 43 of file ExecuteNetworkParams.hpp.

Referenced by MainImpl(), and ProgramOptions::ProgramOptions().

◆ m_SaveCachedNetwork

bool m_SaveCachedNetwork

Definition at line 44 of file ExecuteNetworkParams.hpp.

Referenced by MainImpl(), and ProgramOptions::ProgramOptions().

◆ m_SubgraphId

size_t m_SubgraphId

Definition at line 45 of file ExecuteNetworkParams.hpp.

Referenced by MainImpl(), and ProgramOptions::ProgramOptions().

◆ m_ThresholdTime

double m_ThresholdTime

◆ m_TuningLevel

int m_TuningLevel

◆ m_TuningPath

std::string m_TuningPath

The documentation for this struct was generated from the following files: