ArmNN  NotReleased
ArmnnConverter.cpp File Reference
#include <armnn/Logging.hpp>
#include <HeapProfiling.hpp>
#include <boost/format.hpp>
#include <boost/algorithm/string/split.hpp>
#include <boost/algorithm/string/classification.hpp>
#include <boost/program_options.hpp>
#include <cstdlib>
#include <fstream>
#include <iostream>

Go to the source code of this file.

Functions

int main (int argc, const char *argv[])
 

Function Documentation

◆ main()

int main ( int  argc,
const char *  argv[] 
)

Definition at line 359 of file ArmnnConverter.cpp.

References ARMNN_LOG, armnn::ConfigureLogging(), armnn::Debug, armnn::Info, and Exception::what().

360 {
361 
362 #if (!defined(ARMNN_CAFFE_PARSER) \
363  && !defined(ARMNN_ONNX_PARSER) \
364  && !defined(ARMNN_TF_PARSER) \
365  && !defined(ARMNN_TF_LITE_PARSER))
366  ARMNN_LOG(fatal) << "Not built with any of the supported parsers, Caffe, Onnx, Tensorflow, or TfLite.";
367  return EXIT_FAILURE;
368 #endif
369 
370 #if !defined(ARMNN_SERIALIZER)
371  ARMNN_LOG(fatal) << "Not built with Serializer support.";
372  return EXIT_FAILURE;
373 #endif
374 
375 #ifdef NDEBUG
377 #else
379 #endif
380 
381  armnn::ConfigureLogging(true, true, level);
382 
383  std::string modelFormat;
384  std::string modelPath;
385 
386  std::vector<std::string> inputNames;
387  std::vector<std::string> inputTensorShapeStrs;
388  std::vector<armnn::TensorShape> inputTensorShapes;
389 
390  std::vector<std::string> outputNames;
391  std::string outputPath;
392 
393  bool isModelBinary = true;
394 
395  if (ParseCommandLineArgs(
396  argc, argv, modelFormat, modelPath, inputNames, inputTensorShapeStrs, outputNames, outputPath, isModelBinary)
397  != EXIT_SUCCESS)
398  {
399  return EXIT_FAILURE;
400  }
401 
402  for (const std::string& shapeStr : inputTensorShapeStrs)
403  {
404  if (!shapeStr.empty())
405  {
406  std::stringstream ss(shapeStr);
407 
408  try
409  {
410  armnn::TensorShape shape = ParseTensorShape(ss);
411  inputTensorShapes.push_back(shape);
412  }
413  catch (const armnn::InvalidArgumentException& e)
414  {
415  ARMNN_LOG(fatal) << "Cannot create tensor shape: " << e.what();
416  return EXIT_FAILURE;
417  }
418  }
419  }
420 
421  ArmnnConverter converter(modelPath, inputNames, inputTensorShapes, outputNames, outputPath, isModelBinary);
422 
423  if (modelFormat.find("caffe") != std::string::npos)
424  {
425 #if defined(ARMNN_CAFFE_PARSER)
426  if (!converter.CreateNetwork<armnnCaffeParser::ICaffeParser>())
427  {
428  ARMNN_LOG(fatal) << "Failed to load model from file";
429  return EXIT_FAILURE;
430  }
431 #else
432  ARMNN_LOG(fatal) << "Not built with Caffe parser support.";
433  return EXIT_FAILURE;
434 #endif
435  }
436  else if (modelFormat.find("onnx") != std::string::npos)
437  {
438 #if defined(ARMNN_ONNX_PARSER)
439  if (!converter.CreateNetwork<armnnOnnxParser::IOnnxParser>())
440  {
441  ARMNN_LOG(fatal) << "Failed to load model from file";
442  return EXIT_FAILURE;
443  }
444 #else
445  ARMNN_LOG(fatal) << "Not built with Onnx parser support.";
446  return EXIT_FAILURE;
447 #endif
448  }
449  else if (modelFormat.find("tensorflow") != std::string::npos)
450  {
451 #if defined(ARMNN_TF_PARSER)
452  if (!converter.CreateNetwork<armnnTfParser::ITfParser>())
453  {
454  ARMNN_LOG(fatal) << "Failed to load model from file";
455  return EXIT_FAILURE;
456  }
457 #else
458  ARMNN_LOG(fatal) << "Not built with Tensorflow parser support.";
459  return EXIT_FAILURE;
460 #endif
461  }
462  else if (modelFormat.find("tflite") != std::string::npos)
463  {
464 #if defined(ARMNN_TF_LITE_PARSER)
465  if (!isModelBinary)
466  {
467  ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'. Only 'binary' format supported \
468  for tflite files";
469  return EXIT_FAILURE;
470  }
471 
472  if (!converter.CreateNetwork<armnnTfLiteParser::ITfLiteParser>())
473  {
474  ARMNN_LOG(fatal) << "Failed to load model from file";
475  return EXIT_FAILURE;
476  }
477 #else
478  ARMNN_LOG(fatal) << "Not built with TfLite parser support.";
479  return EXIT_FAILURE;
480 #endif
481  }
482  else
483  {
484  ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'";
485  return EXIT_FAILURE;
486  }
487 
488  if (!converter.Serialize())
489  {
490  ARMNN_LOG(fatal) << "Failed to serialize model";
491  return EXIT_FAILURE;
492  }
493 
494  return EXIT_SUCCESS;
495 }
void ConfigureLogging(bool printToStandardOutput, bool printToDebugOutput, LogSeverity severity)
Definition: Utils.cpp:10
#define ARMNN_LOG(severity)
Definition: Logging.hpp:163
virtual const char * what() const noexcept override
Definition: Exceptions.cpp:32
LogSeverity
Definition: Utils.hpp:12
Parses a directed acyclic graph from a tensorflow protobuf file.
Definition: ITfParser.hpp:25