ArmNN
 21.02
ArmnnConverter.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 #include <armnn/Logging.hpp>
6 
7 #if defined(ARMNN_CAFFE_PARSER)
9 #endif
10 #if defined(ARMNN_ONNX_PARSER)
12 #endif
13 #if defined(ARMNN_SERIALIZER)
15 #endif
16 #if defined(ARMNN_TF_PARSER)
18 #endif
19 #if defined(ARMNN_TF_LITE_PARSER)
21 #endif
22 
23 #include <HeapProfiling.hpp>
26 
27 /*
28  * Historically we use the ',' character to separate dimensions in a tensor shape. However, cxxopts will read this
29  * an an array of values which is fine until we have multiple tensors specified. This lumps the values of all shapes
30  * together in a single array and we cannot break it up again. We'll change the vector delimiter to a '.'. We do this
31  * as close as possible to the usage of cxxopts to avoid polluting other possible uses.
32  */
33 #define CXXOPTS_VECTOR_DELIMITER '.'
34 #include <cxxopts/cxxopts.hpp>
35 
36 #include <fmt/format.h>
37 
38 #include <cstdlib>
39 #include <fstream>
40 #include <iostream>
41 
42 namespace
43 {
44 
45 armnn::TensorShape ParseTensorShape(std::istream& stream)
46 {
47  std::vector<unsigned int> result;
48  std::string line;
49 
50  while (std::getline(stream, line))
51  {
52  std::vector<std::string> tokens = armnn::stringUtils::StringTokenizer(line, ",");
53  for (const std::string& token : tokens)
54  {
55  if (!token.empty())
56  {
57  try
58  {
59  result.push_back(armnn::numeric_cast<unsigned int>(std::stoi((token))));
60  }
61  catch (const std::exception&)
62  {
63  ARMNN_LOG(error) << "'" << token << "' is not a valid number. It has been ignored.";
64  }
65  }
66  }
67  }
68 
69  return armnn::TensorShape(armnn::numeric_cast<unsigned int>(result.size()), result.data());
70 }
71 
72 int ParseCommandLineArgs(int argc, char* argv[],
73  std::string& modelFormat,
74  std::string& modelPath,
75  std::vector<std::string>& inputNames,
76  std::vector<std::string>& inputTensorShapeStrs,
77  std::vector<std::string>& outputNames,
78  std::string& outputPath, bool& isModelBinary)
79 {
80  cxxopts::Options options("ArmNNConverter", "Convert a neural network model from provided file to ArmNN format.");
81  try
82  {
83  std::string modelFormatDescription("Format of the model file");
84 #if defined(ARMNN_CAFFE_PARSER)
85  modelFormatDescription += ", caffe-binary, caffe-text";
86 #endif
87 #if defined(ARMNN_ONNX_PARSER)
88  modelFormatDescription += ", onnx-binary, onnx-text";
89 #endif
90 #if defined(ARMNN_TF_PARSER)
91  modelFormatDescription += ", tensorflow-binary, tensorflow-text";
92 #endif
93 #if defined(ARMNN_TF_LITE_PARSER)
94  modelFormatDescription += ", tflite-binary";
95 #endif
96  modelFormatDescription += ".";
97  options.add_options()
98  ("help", "Display usage information")
99  ("f,model-format", modelFormatDescription, cxxopts::value<std::string>(modelFormat))
100  ("m,model-path", "Path to model file.", cxxopts::value<std::string>(modelPath))
101 
102  ("i,input-name", "Identifier of the input tensors in the network. "
103  "Each input must be specified separately.",
104  cxxopts::value<std::vector<std::string>>(inputNames))
105  ("s,input-tensor-shape",
106  "The shape of the input tensor in the network as a flat array of integers, "
107  "separated by comma. Each input shape must be specified separately after the input name. "
108  "This parameter is optional, depending on the network.",
109  cxxopts::value<std::vector<std::string>>(inputTensorShapeStrs))
110 
111  ("o,output-name", "Identifier of the output tensor in the network.",
112  cxxopts::value<std::vector<std::string>>(outputNames))
113  ("p,output-path",
114  "Path to serialize the network to.", cxxopts::value<std::string>(outputPath));
115  }
116  catch (const std::exception& e)
117  {
118  std::cerr << e.what() << std::endl << options.help() << std::endl;
119  return EXIT_FAILURE;
120  }
121  try
122  {
123  cxxopts::ParseResult result = options.parse(argc, argv);
124  if (result.count("help"))
125  {
126  std::cerr << options.help() << std::endl;
127  return EXIT_SUCCESS;
128  }
129  // Check for mandatory single options.
130  std::string mandatorySingleParameters[] = { "model-format", "model-path", "output-name", "output-path" };
131  bool somethingsMissing = false;
132  for (auto param : mandatorySingleParameters)
133  {
134  if (result.count(param) != 1)
135  {
136  std::cerr << "Parameter \'--" << param << "\' is required but missing." << std::endl;
137  somethingsMissing = true;
138  }
139  }
140  // Check at least one "input-name" option.
141  if (result.count("input-name") == 0)
142  {
143  std::cerr << "Parameter \'--" << "input-name" << "\' must be specified at least once." << std::endl;
144  somethingsMissing = true;
145  }
146  // If input-tensor-shape is specified then there must be a 1:1 match with input-name.
147  if (result.count("input-tensor-shape") > 0)
148  {
149  if (result.count("input-tensor-shape") != result.count("input-name"))
150  {
151  std::cerr << "When specifying \'input-tensor-shape\' a matching number of \'input-name\' parameters "
152  "must be specified." << std::endl;
153  somethingsMissing = true;
154  }
155  }
156 
157  if (somethingsMissing)
158  {
159  std::cerr << options.help() << std::endl;
160  return EXIT_FAILURE;
161  }
162  }
163  catch (const cxxopts::OptionException& e)
164  {
165  std::cerr << e.what() << std::endl << std::endl;
166  return EXIT_FAILURE;
167  }
168 
169  if (modelFormat.find("bin") != std::string::npos)
170  {
171  isModelBinary = true;
172  }
173  else if (modelFormat.find("text") != std::string::npos)
174  {
175  isModelBinary = false;
176  }
177  else
178  {
179  ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'. Please include 'binary' or 'text'";
180  return EXIT_FAILURE;
181  }
182 
183  return EXIT_SUCCESS;
184 }
185 
186 template<typename T>
187 struct ParserType
188 {
189  typedef T parserType;
190 };
191 
192 class ArmnnConverter
193 {
194 public:
195  ArmnnConverter(const std::string& modelPath,
196  const std::vector<std::string>& inputNames,
197  const std::vector<armnn::TensorShape>& inputShapes,
198  const std::vector<std::string>& outputNames,
199  const std::string& outputPath,
200  bool isModelBinary)
201  : m_NetworkPtr(armnn::INetworkPtr(nullptr, [](armnn::INetwork *){})),
202  m_ModelPath(modelPath),
203  m_InputNames(inputNames),
204  m_InputShapes(inputShapes),
205  m_OutputNames(outputNames),
206  m_OutputPath(outputPath),
207  m_IsModelBinary(isModelBinary) {}
208 
209  bool Serialize()
210  {
211  if (m_NetworkPtr.get() == nullptr)
212  {
213  return false;
214  }
215 
217 
218  serializer->Serialize(*m_NetworkPtr);
219 
220  std::ofstream file(m_OutputPath, std::ios::out | std::ios::binary);
221 
222  bool retVal = serializer->SaveSerializedToStream(file);
223 
224  return retVal;
225  }
226 
227  template <typename IParser>
228  bool CreateNetwork ()
229  {
230  return CreateNetwork (ParserType<IParser>());
231  }
232 
233 private:
234  armnn::INetworkPtr m_NetworkPtr;
235  std::string m_ModelPath;
236  std::vector<std::string> m_InputNames;
237  std::vector<armnn::TensorShape> m_InputShapes;
238  std::vector<std::string> m_OutputNames;
239  std::string m_OutputPath;
240  bool m_IsModelBinary;
241 
242  template <typename IParser>
243  bool CreateNetwork (ParserType<IParser>)
244  {
245  // Create a network from a file on disk
246  auto parser(IParser::Create());
247 
248  std::map<std::string, armnn::TensorShape> inputShapes;
249  if (!m_InputShapes.empty())
250  {
251  const size_t numInputShapes = m_InputShapes.size();
252  const size_t numInputBindings = m_InputNames.size();
253  if (numInputShapes < numInputBindings)
254  {
255  throw armnn::Exception(fmt::format(
256  "Not every input has its tensor shape specified: expected={0}, got={1}",
257  numInputBindings, numInputShapes));
258  }
259 
260  for (size_t i = 0; i < numInputShapes; i++)
261  {
262  inputShapes[m_InputNames[i]] = m_InputShapes[i];
263  }
264  }
265 
266  {
267  ARMNN_SCOPED_HEAP_PROFILING("Parsing");
268  m_NetworkPtr = (m_IsModelBinary ?
269  parser->CreateNetworkFromBinaryFile(m_ModelPath.c_str(), inputShapes, m_OutputNames) :
270  parser->CreateNetworkFromTextFile(m_ModelPath.c_str(), inputShapes, m_OutputNames));
271  }
272 
273  return m_NetworkPtr.get() != nullptr;
274  }
275 
276 #if defined(ARMNN_TF_LITE_PARSER)
277  bool CreateNetwork (ParserType<armnnTfLiteParser::ITfLiteParser>)
278  {
279  // Create a network from a file on disk
281 
282  if (!m_InputShapes.empty())
283  {
284  const size_t numInputShapes = m_InputShapes.size();
285  const size_t numInputBindings = m_InputNames.size();
286  if (numInputShapes < numInputBindings)
287  {
288  throw armnn::Exception(fmt::format(
289  "Not every input has its tensor shape specified: expected={0}, got={1}",
290  numInputBindings, numInputShapes));
291  }
292  }
293 
294  {
295  ARMNN_SCOPED_HEAP_PROFILING("Parsing");
296  m_NetworkPtr = parser->CreateNetworkFromBinaryFile(m_ModelPath.c_str());
297  }
298 
299  return m_NetworkPtr.get() != nullptr;
300  }
301 #endif
302 
303 #if defined(ARMNN_ONNX_PARSER)
304  bool CreateNetwork (ParserType<armnnOnnxParser::IOnnxParser>)
305  {
306  // Create a network from a file on disk
308 
309  if (!m_InputShapes.empty())
310  {
311  const size_t numInputShapes = m_InputShapes.size();
312  const size_t numInputBindings = m_InputNames.size();
313  if (numInputShapes < numInputBindings)
314  {
315  throw armnn::Exception(fmt::format(
316  "Not every input has its tensor shape specified: expected={0}, got={1}",
317  numInputBindings, numInputShapes));
318  }
319  }
320 
321  {
322  ARMNN_SCOPED_HEAP_PROFILING("Parsing");
323  m_NetworkPtr = (m_IsModelBinary ?
324  parser->CreateNetworkFromBinaryFile(m_ModelPath.c_str()) :
325  parser->CreateNetworkFromTextFile(m_ModelPath.c_str()));
326  }
327 
328  return m_NetworkPtr.get() != nullptr;
329  }
330 #endif
331 
332 };
333 
334 } // anonymous namespace
335 
336 int main(int argc, char* argv[])
337 {
338 
339 #if (!defined(ARMNN_CAFFE_PARSER) \
340  && !defined(ARMNN_ONNX_PARSER) \
341  && !defined(ARMNN_TF_PARSER) \
342  && !defined(ARMNN_TF_LITE_PARSER))
343  ARMNN_LOG(fatal) << "Not built with any of the supported parsers, Caffe, Onnx, Tensorflow, or TfLite.";
344  return EXIT_FAILURE;
345 #endif
346 
347 #if !defined(ARMNN_SERIALIZER)
348  ARMNN_LOG(fatal) << "Not built with Serializer support.";
349  return EXIT_FAILURE;
350 #endif
351 
352 #ifdef NDEBUG
354 #else
356 #endif
357 
358  armnn::ConfigureLogging(true, true, level);
359 
360  std::string modelFormat;
361  std::string modelPath;
362 
363  std::vector<std::string> inputNames;
364  std::vector<std::string> inputTensorShapeStrs;
365  std::vector<armnn::TensorShape> inputTensorShapes;
366 
367  std::vector<std::string> outputNames;
368  std::string outputPath;
369 
370  bool isModelBinary = true;
371 
372  if (ParseCommandLineArgs(
373  argc, argv, modelFormat, modelPath, inputNames, inputTensorShapeStrs, outputNames, outputPath, isModelBinary)
374  != EXIT_SUCCESS)
375  {
376  return EXIT_FAILURE;
377  }
378 
379  for (const std::string& shapeStr : inputTensorShapeStrs)
380  {
381  if (!shapeStr.empty())
382  {
383  std::stringstream ss(shapeStr);
384 
385  try
386  {
387  armnn::TensorShape shape = ParseTensorShape(ss);
388  inputTensorShapes.push_back(shape);
389  }
390  catch (const armnn::InvalidArgumentException& e)
391  {
392  ARMNN_LOG(fatal) << "Cannot create tensor shape: " << e.what();
393  return EXIT_FAILURE;
394  }
395  }
396  }
397 
398  ArmnnConverter converter(modelPath, inputNames, inputTensorShapes, outputNames, outputPath, isModelBinary);
399 
400  try
401  {
402  if (modelFormat.find("caffe") != std::string::npos)
403  {
404 #if defined(ARMNN_CAFFE_PARSER)
405  if (!converter.CreateNetwork<armnnCaffeParser::ICaffeParser>())
406  {
407  ARMNN_LOG(fatal) << "Failed to load model from file";
408  return EXIT_FAILURE;
409  }
410 #else
411  ARMNN_LOG(fatal) << "Not built with Caffe parser support.";
412  return EXIT_FAILURE;
413 #endif
414  }
415  else if (modelFormat.find("onnx") != std::string::npos)
416  {
417 #if defined(ARMNN_ONNX_PARSER)
418  if (!converter.CreateNetwork<armnnOnnxParser::IOnnxParser>())
419  {
420  ARMNN_LOG(fatal) << "Failed to load model from file";
421  return EXIT_FAILURE;
422  }
423 #else
424  ARMNN_LOG(fatal) << "Not built with Onnx parser support.";
425  return EXIT_FAILURE;
426 #endif
427  }
428  else if (modelFormat.find("tensorflow") != std::string::npos)
429  {
430 #if defined(ARMNN_TF_PARSER)
431  if (!converter.CreateNetwork<armnnTfParser::ITfParser>())
432  {
433  ARMNN_LOG(fatal) << "Failed to load model from file";
434  return EXIT_FAILURE;
435  }
436 #else
437  ARMNN_LOG(fatal) << "Not built with Tensorflow parser support.";
438  return EXIT_FAILURE;
439 #endif
440  }
441  else if (modelFormat.find("tflite") != std::string::npos)
442  {
443 #if defined(ARMNN_TF_LITE_PARSER)
444  if (!isModelBinary)
445  {
446  ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'. Only 'binary' format supported \
447  for tflite files";
448  return EXIT_FAILURE;
449  }
450 
451  if (!converter.CreateNetwork<armnnTfLiteParser::ITfLiteParser>())
452  {
453  ARMNN_LOG(fatal) << "Failed to load model from file";
454  return EXIT_FAILURE;
455  }
456 #else
457  ARMNN_LOG(fatal) << "Not built with TfLite parser support.";
458  return EXIT_FAILURE;
459 #endif
460  }
461  else
462  {
463  ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'";
464  return EXIT_FAILURE;
465  }
466  }
467  catch(armnn::Exception& e)
468  {
469  ARMNN_LOG(fatal) << "Failed to load model from file: " << e.what();
470  return EXIT_FAILURE;
471  }
472 
473  if (!converter.Serialize())
474  {
475  ARMNN_LOG(fatal) << "Failed to serialize model";
476  return EXIT_FAILURE;
477  }
478 
479  return EXIT_SUCCESS;
480 }
std::vector< std::string > StringTokenizer(const std::string &str, const char *delimiters, bool tokenCompression=true)
Function to take a string and a list of delimiters and split the string into tokens based on those de...
Definition: StringUtils.hpp:20
void ConfigureLogging(bool printToStandardOutput, bool printToDebugOutput, LogSeverity severity)
Configures the logging behaviour of the ARMNN library.
Definition: Utils.cpp:18
virtual const char * what() const noexcept override
Definition: Exceptions.cpp:32
#define ARMNN_LOG(severity)
Definition: Logging.hpp:202
Main network class which provides the interface for building up a neural network. ...
Definition: INetwork.hpp:178
static ITfLiteParserPtr Create(const armnn::Optional< TfLiteParserOptions > &options=armnn::EmptyOptional())
#define ARMNN_SCOPED_HEAP_PROFILING(TAG)
static IOnnxParserPtr Create()
Definition: OnnxParser.cpp:36
Parses a directed acyclic graph from a tensorflow protobuf file.
Definition: ITfParser.hpp:25
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46
static ISerializerPtr Create()
Definition: Serializer.cpp:36
int main(int argc, char *argv[])
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:173
LogSeverity
Definition: Utils.hpp:13