12 #include <fmt/format.h> 17 if (modelFormat.find(
"binary") != std::string::npos)
21 else if (modelFormat.find(
"txt") != std::string::npos || modelFormat.find(
"text") != std::string::npos)
28 "Please include 'binary' or 'text'",
36 if (modelFormat.find(
"armnn") != std::string::npos)
38 #if defined(ARMNN_SERIALIZER) 41 "built with serialization support.");
44 else if (modelFormat.find(
"caffe") != std::string::npos)
46 #if defined(ARMNN_CAFFE_PARSER) 49 "built with Caffe parser support.");
52 else if (modelFormat.find(
"onnx") != std::string::npos)
54 #if defined(ARMNN_ONNX_PARSER) 57 "built with Onnx parser support.");
60 else if (modelFormat.find(
"tensorflow") != std::string::npos)
62 #if defined(ARMNN_TF_PARSER) 65 "built with Tensorflow parser support.");
68 else if (modelFormat.find(
"tflite") != std::string::npos)
70 #if defined(ARMNN_TF_LITE_PARSER) 74 "format supported for tflite files",
77 #elif defined(ARMNN_TFLITE_DELEGATE) 80 "built with Tensorflow Lite parser support.");
86 "Please include 'caffe', 'tensorflow', 'tflite' or 'onnx'",
92 const std::string& tuningPath,
93 const std::vector<armnn::BackendId> computeDevices)
95 if (!tuningPath.empty())
99 ARMNN_LOG(info) <<
"Using cl tuning file: " << tuningPath <<
"\n";
105 else if ((1 <= tuningLevel) && (tuningLevel <= 3))
107 ARMNN_LOG(info) <<
"Starting execution to generate a cl tuning file: " << tuningPath <<
"\n" 108 <<
"Tuning level in use: " << tuningLevel <<
"\n";
110 else if ((0 < tuningLevel) || (tuningLevel > 3))
118 auto it = std::find(computeDevices.begin(), computeDevices.end(),
"GpuAcc");
119 if (it == computeDevices.end())
121 ARMNN_LOG(warning) <<
"To use Cl Tuning the compute device GpuAcc needs to be active.";
130 bool throwExc =
false;
137 std::string invalidBackends;
140 ARMNN_LOG(fatal) <<
"The list of preferred devices contains invalid backend IDs: " 149 ARMNN_LOG(fatal) <<
"BFloat16 and Float16 turbo mode cannot be enabled at the same time.";
160 ARMNN_LOG(fatal) <<
"input-name and input-tensor-shape must have the same amount of elements. ";
167 ARMNN_LOG(fatal) <<
"One or more input data file paths are not valid. ";
172 ARMNN_LOG(fatal) <<
"input-name and input-tensor-data must have the same amount of elements. ";
179 ARMNN_LOG(fatal) <<
"output-name and write-outputs-to-file must have the same amount of elements. ";
190 ARMNN_LOG(fatal) <<
"input-name and input-type must have the same amount of elements.";
201 ARMNN_LOG(fatal) <<
"output-name and output-type must have the same amount of elements.";
207 ARMNN_LOG(fatal) <<
"Threshold time supplied as a command line argument is less than zero.";
210 catch (std::string& exc)
227 ARMNN_LOG(warning) <<
"No input files provided, input tensors will be filled with 0s.";
std::vector< std::string > m_InputTypes
std::vector< TensorShapePtr > m_InputTensorShapes
bool m_EnableFp16TurboMode
std::string m_DynamicBackendsPath
#define ARMNN_LOG(severity)
std::vector< std::string > m_OutputNames
std::vector< std::string > m_OutputTensorFiles
void CheckClTuningParameter(const int &tuningLevel, const std::string &tuningPath, const std::vector< armnn::BackendId > computeDevices)
std::vector< armnn::BackendId > m_ComputeDevices
std::vector< std::string > m_OutputTypes
bool m_GenerateTensorData
std::vector< std::string > m_InputNames
void CheckModelFormat(const std::string &modelFormat)
bool ValidatePaths(const std::vector< std::string > &fileVec, const bool expectFile)
Verifies if a given vector of strings are valid paths.
bool m_EnableBf16TurboMode
std::vector< std::string > m_InputTensorDataFilePaths
bool IsModelBinary(const std::string &modelFormat)
bool ValidatePath(const std::string &file, const bool expectFile)
Verifies if the given string is a valid path.
std::string m_ModelFormat