ArmNN
 20.02
ArmnnConverter.cpp File Reference
#include <armnn/Logging.hpp>
#include <HeapProfiling.hpp>
#include <boost/format.hpp>
#include <boost/algorithm/string/split.hpp>
#include <boost/algorithm/string/classification.hpp>
#include <boost/program_options.hpp>
#include <cstdlib>
#include <fstream>
#include <iostream>

Go to the source code of this file.

Functions

int main (int argc, const char *argv[])
 

Function Documentation

◆ main()

int main ( int  argc,
const char *  argv[] 
)

Definition at line 359 of file ArmnnConverter.cpp.

References ARMNN_LOG, armnn::ConfigureLogging(), armnn::Debug, armnn::Info, and Exception::what().

360 {
361 
362 #if (!defined(ARMNN_CAFFE_PARSER) \
363  && !defined(ARMNN_ONNX_PARSER) \
364  && !defined(ARMNN_TF_PARSER) \
365  && !defined(ARMNN_TF_LITE_PARSER))
366  ARMNN_LOG(fatal) << "Not built with any of the supported parsers, Caffe, Onnx, Tensorflow, or TfLite.";
367  return EXIT_FAILURE;
368 #endif
369 
370 #if !defined(ARMNN_SERIALIZER)
371  ARMNN_LOG(fatal) << "Not built with Serializer support.";
372  return EXIT_FAILURE;
373 #endif
374 
375 #ifdef NDEBUG
377 #else
379 #endif
380 
381  armnn::ConfigureLogging(true, true, level);
382 
383  std::string modelFormat;
384  std::string modelPath;
385 
386  std::vector<std::string> inputNames;
387  std::vector<std::string> inputTensorShapeStrs;
388  std::vector<armnn::TensorShape> inputTensorShapes;
389 
390  std::vector<std::string> outputNames;
391  std::string outputPath;
392 
393  bool isModelBinary = true;
394 
395  if (ParseCommandLineArgs(
396  argc, argv, modelFormat, modelPath, inputNames, inputTensorShapeStrs, outputNames, outputPath, isModelBinary)
397  != EXIT_SUCCESS)
398  {
399  return EXIT_FAILURE;
400  }
401 
402  for (const std::string& shapeStr : inputTensorShapeStrs)
403  {
404  if (!shapeStr.empty())
405  {
406  std::stringstream ss(shapeStr);
407 
408  try
409  {
410  armnn::TensorShape shape = ParseTensorShape(ss);
411  inputTensorShapes.push_back(shape);
412  }
413  catch (const armnn::InvalidArgumentException& e)
414  {
415  ARMNN_LOG(fatal) << "Cannot create tensor shape: " << e.what();
416  return EXIT_FAILURE;
417  }
418  }
419  }
420 
421  ArmnnConverter converter(modelPath, inputNames, inputTensorShapes, outputNames, outputPath, isModelBinary);
422 
423  try
424  {
425  if (modelFormat.find("caffe") != std::string::npos)
426  {
427 #if defined(ARMNN_CAFFE_PARSER)
428  if (!converter.CreateNetwork<armnnCaffeParser::ICaffeParser>())
429  {
430  ARMNN_LOG(fatal) << "Failed to load model from file";
431  return EXIT_FAILURE;
432  }
433 #else
434  ARMNN_LOG(fatal) << "Not built with Caffe parser support.";
435  return EXIT_FAILURE;
436 #endif
437  }
438  else if (modelFormat.find("onnx") != std::string::npos)
439  {
440 #if defined(ARMNN_ONNX_PARSER)
441  if (!converter.CreateNetwork<armnnOnnxParser::IOnnxParser>())
442  {
443  ARMNN_LOG(fatal) << "Failed to load model from file";
444  return EXIT_FAILURE;
445  }
446 #else
447  ARMNN_LOG(fatal) << "Not built with Onnx parser support.";
448  return EXIT_FAILURE;
449 #endif
450  }
451  else if (modelFormat.find("tensorflow") != std::string::npos)
452  {
453 #if defined(ARMNN_TF_PARSER)
454  if (!converter.CreateNetwork<armnnTfParser::ITfParser>())
455  {
456  ARMNN_LOG(fatal) << "Failed to load model from file";
457  return EXIT_FAILURE;
458  }
459 #else
460  ARMNN_LOG(fatal) << "Not built with Tensorflow parser support.";
461  return EXIT_FAILURE;
462 #endif
463  }
464  else if (modelFormat.find("tflite") != std::string::npos)
465  {
466 #if defined(ARMNN_TF_LITE_PARSER)
467  if (!isModelBinary)
468  {
469  ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'. Only 'binary' format supported \
470  for tflite files";
471  return EXIT_FAILURE;
472  }
473 
474  if (!converter.CreateNetwork<armnnTfLiteParser::ITfLiteParser>())
475  {
476  ARMNN_LOG(fatal) << "Failed to load model from file";
477  return EXIT_FAILURE;
478  }
479 #else
480  ARMNN_LOG(fatal) << "Not built with TfLite parser support.";
481  return EXIT_FAILURE;
482 #endif
483  }
484  else
485  {
486  ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'";
487  return EXIT_FAILURE;
488  }
489  }
490  catch(armnn::Exception& e)
491  {
492  ARMNN_LOG(fatal) << "Failed to load model from file: " << e.what();
493  return EXIT_FAILURE;
494  }
495 
496  if (!converter.Serialize())
497  {
498  ARMNN_LOG(fatal) << "Failed to serialize model";
499  return EXIT_FAILURE;
500  }
501 
502  return EXIT_SUCCESS;
503 }
void ConfigureLogging(bool printToStandardOutput, bool printToDebugOutput, LogSeverity severity)
Configures the logging behaviour of the ARMNN library.
Definition: Utils.cpp:10
virtual const char * what() const noexcept override
Definition: Exceptions.cpp:32
#define ARMNN_LOG(severity)
Definition: Logging.hpp:163
Parses a directed acyclic graph from a tensorflow protobuf file.
Definition: ITfParser.hpp:25
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46
LogSeverity
Definition: Utils.hpp:12