ArmNN
 20.05
ArmnnConverter.cpp File Reference
#include <armnn/Logging.hpp>
#include <HeapProfiling.hpp>
#include "armnn/utility/StringUtils.hpp"
#include <boost/format.hpp>
#include <boost/program_options.hpp>
#include <cstdlib>
#include <fstream>
#include <iostream>

Go to the source code of this file.

Functions

int main (int argc, const char *argv[])
 

Function Documentation

◆ main()

int main ( int  argc,
const char *  argv[] 
)

Definition at line 348 of file ArmnnConverter.cpp.

References ARMNN_LOG, armnn::ConfigureLogging(), armnn::Debug, armnn::Info, and Exception::what().

349 {
350 
351 #if (!defined(ARMNN_CAFFE_PARSER) \
352  && !defined(ARMNN_ONNX_PARSER) \
353  && !defined(ARMNN_TF_PARSER) \
354  && !defined(ARMNN_TF_LITE_PARSER))
355  ARMNN_LOG(fatal) << "Not built with any of the supported parsers, Caffe, Onnx, Tensorflow, or TfLite.";
356  return EXIT_FAILURE;
357 #endif
358 
359 #if !defined(ARMNN_SERIALIZER)
360  ARMNN_LOG(fatal) << "Not built with Serializer support.";
361  return EXIT_FAILURE;
362 #endif
363 
364 #ifdef NDEBUG
366 #else
368 #endif
369 
370  armnn::ConfigureLogging(true, true, level);
371 
372  std::string modelFormat;
373  std::string modelPath;
374 
375  std::vector<std::string> inputNames;
376  std::vector<std::string> inputTensorShapeStrs;
377  std::vector<armnn::TensorShape> inputTensorShapes;
378 
379  std::vector<std::string> outputNames;
380  std::string outputPath;
381 
382  bool isModelBinary = true;
383 
384  if (ParseCommandLineArgs(
385  argc, argv, modelFormat, modelPath, inputNames, inputTensorShapeStrs, outputNames, outputPath, isModelBinary)
386  != EXIT_SUCCESS)
387  {
388  return EXIT_FAILURE;
389  }
390 
391  for (const std::string& shapeStr : inputTensorShapeStrs)
392  {
393  if (!shapeStr.empty())
394  {
395  std::stringstream ss(shapeStr);
396 
397  try
398  {
399  armnn::TensorShape shape = ParseTensorShape(ss);
400  inputTensorShapes.push_back(shape);
401  }
402  catch (const armnn::InvalidArgumentException& e)
403  {
404  ARMNN_LOG(fatal) << "Cannot create tensor shape: " << e.what();
405  return EXIT_FAILURE;
406  }
407  }
408  }
409 
410  ArmnnConverter converter(modelPath, inputNames, inputTensorShapes, outputNames, outputPath, isModelBinary);
411 
412  try
413  {
414  if (modelFormat.find("caffe") != std::string::npos)
415  {
416 #if defined(ARMNN_CAFFE_PARSER)
417  if (!converter.CreateNetwork<armnnCaffeParser::ICaffeParser>())
418  {
419  ARMNN_LOG(fatal) << "Failed to load model from file";
420  return EXIT_FAILURE;
421  }
422 #else
423  ARMNN_LOG(fatal) << "Not built with Caffe parser support.";
424  return EXIT_FAILURE;
425 #endif
426  }
427  else if (modelFormat.find("onnx") != std::string::npos)
428  {
429 #if defined(ARMNN_ONNX_PARSER)
430  if (!converter.CreateNetwork<armnnOnnxParser::IOnnxParser>())
431  {
432  ARMNN_LOG(fatal) << "Failed to load model from file";
433  return EXIT_FAILURE;
434  }
435 #else
436  ARMNN_LOG(fatal) << "Not built with Onnx parser support.";
437  return EXIT_FAILURE;
438 #endif
439  }
440  else if (modelFormat.find("tensorflow") != std::string::npos)
441  {
442 #if defined(ARMNN_TF_PARSER)
443  if (!converter.CreateNetwork<armnnTfParser::ITfParser>())
444  {
445  ARMNN_LOG(fatal) << "Failed to load model from file";
446  return EXIT_FAILURE;
447  }
448 #else
449  ARMNN_LOG(fatal) << "Not built with Tensorflow parser support.";
450  return EXIT_FAILURE;
451 #endif
452  }
453  else if (modelFormat.find("tflite") != std::string::npos)
454  {
455 #if defined(ARMNN_TF_LITE_PARSER)
456  if (!isModelBinary)
457  {
458  ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'. Only 'binary' format supported \
459  for tflite files";
460  return EXIT_FAILURE;
461  }
462 
463  if (!converter.CreateNetwork<armnnTfLiteParser::ITfLiteParser>())
464  {
465  ARMNN_LOG(fatal) << "Failed to load model from file";
466  return EXIT_FAILURE;
467  }
468 #else
469  ARMNN_LOG(fatal) << "Not built with TfLite parser support.";
470  return EXIT_FAILURE;
471 #endif
472  }
473  else
474  {
475  ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'";
476  return EXIT_FAILURE;
477  }
478  }
479  catch(armnn::Exception& e)
480  {
481  ARMNN_LOG(fatal) << "Failed to load model from file: " << e.what();
482  return EXIT_FAILURE;
483  }
484 
485  if (!converter.Serialize())
486  {
487  ARMNN_LOG(fatal) << "Failed to serialize model";
488  return EXIT_FAILURE;
489  }
490 
491  return EXIT_SUCCESS;
492 }
void ConfigureLogging(bool printToStandardOutput, bool printToDebugOutput, LogSeverity severity)
Configures the logging behaviour of the ARMNN library.
Definition: Utils.cpp:10
virtual const char * what() const noexcept override
Definition: Exceptions.cpp:32
#define ARMNN_LOG(severity)
Definition: Logging.hpp:163
Parses a directed acyclic graph from a tensorflow protobuf file.
Definition: ITfParser.hpp:25
Base class for all ArmNN exceptions so that users can filter to just those.
Definition: Exceptions.hpp:46
LogSeverity
Definition: Utils.hpp:12