diff options
Diffstat (limited to 'tests/ExecuteNetwork')
-rw-r--r-- | tests/ExecuteNetwork/ExecuteNetwork.cpp | 14 | ||||
-rw-r--r-- | tests/ExecuteNetwork/ExecuteNetworkParams.cpp | 10 | ||||
-rw-r--r-- | tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp | 4 |
3 files changed, 4 insertions, 24 deletions
diff --git a/tests/ExecuteNetwork/ExecuteNetwork.cpp b/tests/ExecuteNetwork/ExecuteNetwork.cpp index f812e53e04..8ab286b16b 100644 --- a/tests/ExecuteNetwork/ExecuteNetwork.cpp +++ b/tests/ExecuteNetwork/ExecuteNetwork.cpp @@ -13,9 +13,6 @@ #if defined(ARMNN_SERIALIZER) #include "armnnDeserializer/IDeserializer.hpp" #endif -#if defined(ARMNN_CAFFE_PARSER) -#include "armnnCaffeParser/ICaffeParser.hpp" -#endif #if defined(ARMNN_TF_PARSER) #include "armnnTfParser/ITfParser.hpp" #endif @@ -472,15 +469,6 @@ int main(int argc, const char* argv[]) return EXIT_FAILURE; #endif } - else if (modelFormat.find("caffe") != std::string::npos) - { - #if defined(ARMNN_CAFFE_PARSER) - return MainImpl<armnnCaffeParser::ICaffeParser, float>(ProgramOptions.m_ExNetParams, runtime); - #else - ARMNN_LOG(fatal) << "Not built with Caffe parser support."; - return EXIT_FAILURE; - #endif - } else if (modelFormat.find("onnx") != std::string::npos) { #if defined(ARMNN_ONNX_PARSER) @@ -526,7 +514,7 @@ int main(int argc, const char* argv[]) else { ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat - << "'. Please include 'caffe', 'tensorflow', 'tflite' or 'onnx'"; + << "'. Please include 'tensorflow', 'tflite' or 'onnx'"; return EXIT_FAILURE; } } diff --git a/tests/ExecuteNetwork/ExecuteNetworkParams.cpp b/tests/ExecuteNetwork/ExecuteNetworkParams.cpp index 890ab2a658..4e3b5e313d 100644 --- a/tests/ExecuteNetwork/ExecuteNetworkParams.cpp +++ b/tests/ExecuteNetwork/ExecuteNetworkParams.cpp @@ -41,14 +41,6 @@ void CheckModelFormat(const std::string& modelFormat) "built with serialization support."); #endif } - else if (modelFormat.find("caffe") != std::string::npos) - { -#if defined(ARMNN_CAFFE_PARSER) -#else - throw armnn::InvalidArgumentException("Can't run model in caffe format without a " - "built with Caffe parser support."); -#endif - } else if (modelFormat.find("onnx") != std::string::npos) { #if defined(ARMNN_ONNX_PARSER) @@ -83,7 +75,7 @@ void CheckModelFormat(const std::string& modelFormat) else { throw armnn::InvalidArgumentException(fmt::format("Unknown model format: '{}'. " - "Please include 'caffe', 'tensorflow', 'tflite' or 'onnx'", + "Please include 'tensorflow', 'tflite' or 'onnx'", modelFormat)); } } diff --git a/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp b/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp index b52adaa325..7c1db61841 100644 --- a/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp +++ b/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp @@ -173,12 +173,12 @@ ProgramOptions::ProgramOptions() : m_CxxOptions{"ExecuteNetwork", cxxopts::value<std::vector<std::string>>()) ("f,model-format", - "armnn-binary, caffe-binary, caffe-text, onnx-binary, onnx-text, tflite-binary, tensorflow-binary or " + "armnn-binary, onnx-binary, onnx-text, tflite-binary, tensorflow-binary or " "tensorflow-text.", cxxopts::value<std::string>()) ("m,model-path", - "Path to model file, e.g. .armnn, .caffemodel, .prototxt, .tflite, .onnx", + "Path to model file, e.g. .armnn, , .prototxt, .tflite, .onnx", cxxopts::value<std::string>(m_ExNetParams.m_ModelPath)) ("i,input-name", |