aboutsummaryrefslogtreecommitdiff
path: root/src/armnnConverter
diff options
context:
space:
mode:
authorDerek Lamberti <derek.lamberti@arm.com>2020-03-11 11:42:26 +0000
committerJim Flynn <jim.flynn@arm.com>2020-03-12 11:56:33 +0000
commitc9e52794083eb73dd1bbf15ce7b16bb26394d7f5 (patch)
tree2681e7b7a3509989fdd87024363a7f8346bfd7c2 /src/armnnConverter
parent431852c95ab89194bac9c9ce57ca011c0ce2f15e (diff)
downloadarmnn-c9e52794083eb73dd1bbf15ce7b16bb26394d7f5.tar.gz
IVGCVSW-4545 Fix segfault parsing reshape layer
Change-Id: Ib4bbab387cec4780c8aae56fdede090fa265e4ba Signed-off-by: Derek Lamberti <derek.lamberti@arm.com>
Diffstat (limited to 'src/armnnConverter')
-rw-r--r--src/armnnConverter/ArmnnConverter.cpp96
1 files changed, 52 insertions, 44 deletions
diff --git a/src/armnnConverter/ArmnnConverter.cpp b/src/armnnConverter/ArmnnConverter.cpp
index 70df2c3a5a..e0a659dca3 100644
--- a/src/armnnConverter/ArmnnConverter.cpp
+++ b/src/armnnConverter/ArmnnConverter.cpp
@@ -420,68 +420,76 @@ int main(int argc, const char* argv[])
ArmnnConverter converter(modelPath, inputNames, inputTensorShapes, outputNames, outputPath, isModelBinary);
- if (modelFormat.find("caffe") != std::string::npos)
+ try
{
-#if defined(ARMNN_CAFFE_PARSER)
- if (!converter.CreateNetwork<armnnCaffeParser::ICaffeParser>())
+ if (modelFormat.find("caffe") != std::string::npos)
{
- ARMNN_LOG(fatal) << "Failed to load model from file";
- return EXIT_FAILURE;
- }
+#if defined(ARMNN_CAFFE_PARSER)
+ if (!converter.CreateNetwork<armnnCaffeParser::ICaffeParser>())
+ {
+ ARMNN_LOG(fatal) << "Failed to load model from file";
+ return EXIT_FAILURE;
+ }
#else
- ARMNN_LOG(fatal) << "Not built with Caffe parser support.";
- return EXIT_FAILURE;
-#endif
- }
- else if (modelFormat.find("onnx") != std::string::npos)
- {
-#if defined(ARMNN_ONNX_PARSER)
- if (!converter.CreateNetwork<armnnOnnxParser::IOnnxParser>())
- {
- ARMNN_LOG(fatal) << "Failed to load model from file";
+ ARMNN_LOG(fatal) << "Not built with Caffe parser support.";
return EXIT_FAILURE;
- }
-#else
- ARMNN_LOG(fatal) << "Not built with Onnx parser support.";
- return EXIT_FAILURE;
#endif
- }
- else if (modelFormat.find("tensorflow") != std::string::npos)
- {
-#if defined(ARMNN_TF_PARSER)
- if (!converter.CreateNetwork<armnnTfParser::ITfParser>())
+ }
+ else if (modelFormat.find("onnx") != std::string::npos)
{
- ARMNN_LOG(fatal) << "Failed to load model from file";
+#if defined(ARMNN_ONNX_PARSER)
+ if (!converter.CreateNetwork<armnnOnnxParser::IOnnxParser>())
+ {
+ ARMNN_LOG(fatal) << "Failed to load model from file";
+ return EXIT_FAILURE;
+ }
+#else
+ ARMNN_LOG(fatal) << "Not built with Onnx parser support.";
return EXIT_FAILURE;
+#endif
}
+ else if (modelFormat.find("tensorflow") != std::string::npos)
+ {
+#if defined(ARMNN_TF_PARSER)
+ if (!converter.CreateNetwork<armnnTfParser::ITfParser>())
+ {
+ ARMNN_LOG(fatal) << "Failed to load model from file";
+ return EXIT_FAILURE;
+ }
#else
- ARMNN_LOG(fatal) << "Not built with Tensorflow parser support.";
- return EXIT_FAILURE;
+ ARMNN_LOG(fatal) << "Not built with Tensorflow parser support.";
+ return EXIT_FAILURE;
#endif
- }
- else if (modelFormat.find("tflite") != std::string::npos)
- {
-#if defined(ARMNN_TF_LITE_PARSER)
- if (!isModelBinary)
+ }
+ else if (modelFormat.find("tflite") != std::string::npos)
{
- ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'. Only 'binary' format supported \
- for tflite files";
+#if defined(ARMNN_TF_LITE_PARSER)
+ if (!isModelBinary)
+ {
+ ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'. Only 'binary' format supported \
+ for tflite files";
+ return EXIT_FAILURE;
+ }
+
+ if (!converter.CreateNetwork<armnnTfLiteParser::ITfLiteParser>())
+ {
+ ARMNN_LOG(fatal) << "Failed to load model from file";
+ return EXIT_FAILURE;
+ }
+#else
+ ARMNN_LOG(fatal) << "Not built with TfLite parser support.";
return EXIT_FAILURE;
+#endif
}
-
- if (!converter.CreateNetwork<armnnTfLiteParser::ITfLiteParser>())
+ else
{
- ARMNN_LOG(fatal) << "Failed to load model from file";
+ ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'";
return EXIT_FAILURE;
}
-#else
- ARMNN_LOG(fatal) << "Not built with TfLite parser support.";
- return EXIT_FAILURE;
-#endif
}
- else
+ catch(armnn::Exception& e)
{
- ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'";
+ ARMNN_LOG(fatal) << "Failed to load model from file: " << e.what();
return EXIT_FAILURE;
}