aboutsummaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
authorsurmeh01 <surabhi.mehta@arm.com>2018-05-18 16:31:43 +0100
committertelsoa01 <telmo.soares@arm.com>2018-05-23 13:09:07 +0100
commit3537c2ca7ebf31c1673b9ec2bb0c17b0406bbae0 (patch)
tree5950603ad78ec3fe56fb31ddc7f4d52a19f5bc60 /tests
parentbceff2fb3fc68bb0aa88b886900c34b77340c826 (diff)
downloadarmnn-3537c2ca7ebf31c1673b9ec2bb0c17b0406bbae0.tar.gz
Release 18.05
Diffstat (limited to 'tests')
-rw-r--r--tests/CMakeLists.txt6
-rw-r--r--tests/CaffeAlexNet-Armnn/CaffeAlexNet-Armnn.cpp20
-rw-r--r--tests/CaffeCifar10AcrossChannels-Armnn/CaffeCifar10AcrossChannels-Armnn.cpp22
-rw-r--r--tests/CaffeInception_BN-Armnn/CaffeInception_BN-Armnn.cpp30
-rw-r--r--tests/CaffeMnist-Armnn/CaffeMnist-Armnn.cpp22
-rw-r--r--tests/CaffeResNet-Armnn/CaffeResNet-Armnn.cpp36
-rw-r--r--tests/CaffeVGG-Armnn/CaffeVGG-Armnn.cpp24
-rw-r--r--tests/CaffeYolo-Armnn/CaffeYolo-Armnn.cpp53
-rw-r--r--tests/ExecuteNetwork/ExecuteNetwork.cpp23
-rw-r--r--tests/InferenceModel.hpp62
-rw-r--r--tests/InferenceTest.cpp2
-rw-r--r--tests/InferenceTest.inl1
-rw-r--r--tests/MultipleNetworksCifar10/MultipleNetworksCifar10.cpp14
-rw-r--r--tests/TfCifar10-Armnn/TfCifar10-Armnn.cpp25
-rw-r--r--tests/TfInceptionV3-Armnn/TfInceptionV3-Armnn.cpp39
-rw-r--r--tests/TfMnist-Armnn/TfMnist-Armnn.cpp25
-rw-r--r--tests/TfMobileNet-Armnn/TfMobileNet-Armnn.cpp90
-rw-r--r--tests/TfResNext_Quantized-Armnn/TfResNext_Quantized-Armnn.cpp39
18 files changed, 406 insertions, 127 deletions
diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt
index 6bc88178f5..ecdff7f909 100644
--- a/tests/CMakeLists.txt
+++ b/tests/CMakeLists.txt
@@ -127,6 +127,12 @@ if(BUILD_TF_PARSER)
MobileNetDatabase.hpp
MobileNetDatabase.cpp)
TfParserTest(TfInceptionV3-Armnn "${TfInceptionV3-Armnn_sources}")
+
+ set(TfResNext-Armnn_sources
+ TfResNext_Quantized-Armnn/TfResNext_Quantized-Armnn.cpp
+ ImageNetDatabase.hpp
+ ImageNetDatabase.cpp)
+ TfParserTest(TfResNext-Armnn "${TfResNext-Armnn_sources}")
endif()
if (BUILD_CAFFE_PARSER OR BUILD_TF_PARSER)
diff --git a/tests/CaffeAlexNet-Armnn/CaffeAlexNet-Armnn.cpp b/tests/CaffeAlexNet-Armnn/CaffeAlexNet-Armnn.cpp
index c50d8ea05f..dce4e08d05 100644
--- a/tests/CaffeAlexNet-Armnn/CaffeAlexNet-Armnn.cpp
+++ b/tests/CaffeAlexNet-Armnn/CaffeAlexNet-Armnn.cpp
@@ -8,7 +8,21 @@
int main(int argc, char* argv[])
{
- return armnn::test::ClassifierInferenceTestMain<ImageNetDatabase, armnnCaffeParser::ICaffeParser>(
- argc, argv, "bvlc_alexnet_1.caffemodel", true, "data", "prob", { 0 },
- [](const char* dataDir) { return ImageNetDatabase(dataDir); });
+ int retVal = EXIT_FAILURE;
+ try
+ {
+ // Coverity fix: ClassifierInferenceTestMain() may throw uncaught exceptions.
+ retVal = armnn::test::ClassifierInferenceTestMain<ImageNetDatabase, armnnCaffeParser::ICaffeParser>(
+ argc, argv, "bvlc_alexnet_1.caffemodel", true, "data", "prob", { 0 },
+ [](const char* dataDir) { return ImageNetDatabase(dataDir); });
+ }
+ catch (const std::exception& e)
+ {
+ // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
+ // exception of type std::length_error.
+ // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
+ std::cerr << "WARNING: CaffeAlexNet-Armnn: An error has occurred when running the "
+ "classifier inference tests: " << e.what() << std::endl;
+ }
+ return retVal;
}
diff --git a/tests/CaffeCifar10AcrossChannels-Armnn/CaffeCifar10AcrossChannels-Armnn.cpp b/tests/CaffeCifar10AcrossChannels-Armnn/CaffeCifar10AcrossChannels-Armnn.cpp
index 9994bb5431..fbd3312f04 100644
--- a/tests/CaffeCifar10AcrossChannels-Armnn/CaffeCifar10AcrossChannels-Armnn.cpp
+++ b/tests/CaffeCifar10AcrossChannels-Armnn/CaffeCifar10AcrossChannels-Armnn.cpp
@@ -8,8 +8,22 @@
int main(int argc, char* argv[])
{
- return armnn::test::ClassifierInferenceTestMain<Cifar10Database, armnnCaffeParser::ICaffeParser>(
- argc, argv, "cifar10_full_iter_60000.caffemodel", true, "data", "prob",
- { 0, 1, 2, 4, 7 },
- [](const char* dataDir) { return Cifar10Database(dataDir); });
+ int retVal = EXIT_FAILURE;
+ try
+ {
+ // Coverity fix: ClassifierInferenceTestMain() may throw uncaught exceptions.
+ retVal = armnn::test::ClassifierInferenceTestMain<Cifar10Database, armnnCaffeParser::ICaffeParser>(
+ argc, argv, "cifar10_full_iter_60000.caffemodel", true, "data", "prob",
+ { 0, 1, 2, 4, 7 },
+ [](const char* dataDir) { return Cifar10Database(dataDir); });
+ }
+ catch (const std::exception& e)
+ {
+ // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
+ // exception of type std::length_error.
+ // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
+ std::cerr << "WARNING: CaffeCifar10AcrossChannels-Armnn: An error has occurred when running "
+ "the classifier inference tests: " << e.what() << std::endl;
+ }
+ return retVal;
}
diff --git a/tests/CaffeInception_BN-Armnn/CaffeInception_BN-Armnn.cpp b/tests/CaffeInception_BN-Armnn/CaffeInception_BN-Armnn.cpp
index 557a3b00f4..a6581bea55 100644
--- a/tests/CaffeInception_BN-Armnn/CaffeInception_BN-Armnn.cpp
+++ b/tests/CaffeInception_BN-Armnn/CaffeInception_BN-Armnn.cpp
@@ -8,12 +8,28 @@
int main(int argc, char* argv[])
{
- std::vector<ImageSet> imageSet =
+ int retVal = EXIT_FAILURE;
+ try
{
- {"shark.jpg", 3694}
- };
- return armnn::test::ClassifierInferenceTestMain<ImageNetDatabase, armnnCaffeParser::ICaffeParser>(
- argc, argv, "Inception-BN-batchsize1.caffemodel", true,
- "data", "softmax", { 0 },
- [&imageSet](const char* dataDir) { return ImageNetDatabase(dataDir, 224, 224, imageSet); });
+ // Coverity fix: The following code may throw an exception of type std::length_error.
+ std::vector<ImageSet> imageSet =
+ {
+ {"shark.jpg", 3694}
+ };
+
+ // Coverity fix: ClassifierInferenceTestMain() may throw uncaught exceptions.
+ retVal = armnn::test::ClassifierInferenceTestMain<ImageNetDatabase, armnnCaffeParser::ICaffeParser>(
+ argc, argv, "Inception-BN-batchsize1.caffemodel", true,
+ "data", "softmax", { 0 },
+ [&imageSet](const char* dataDir) { return ImageNetDatabase(dataDir, 224, 224, imageSet); });
+ }
+ catch (const std::exception& e)
+ {
+ // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
+ // exception of type std::length_error.
+ // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
+ std::cerr << "WARNING: CaffeInception_BN-Armnn: An error has occurred when running "
+ "the classifier inference tests: " << e.what() << std::endl;
+ }
+ return retVal;
}
diff --git a/tests/CaffeMnist-Armnn/CaffeMnist-Armnn.cpp b/tests/CaffeMnist-Armnn/CaffeMnist-Armnn.cpp
index 5b8864d73d..ec14a5d7bc 100644
--- a/tests/CaffeMnist-Armnn/CaffeMnist-Armnn.cpp
+++ b/tests/CaffeMnist-Armnn/CaffeMnist-Armnn.cpp
@@ -8,8 +8,22 @@
int main(int argc, char* argv[])
{
- return armnn::test::ClassifierInferenceTestMain<MnistDatabase, armnnCaffeParser::ICaffeParser>(
- argc, argv, "lenet_iter_9000.caffemodel", true, "data", "prob",
- { 0, 1, 5, 8, 9 },
- [](const char* dataDir) { return MnistDatabase(dataDir); });
+ int retVal = EXIT_FAILURE;
+ try
+ {
+ // Coverity fix: ClassifierInferenceTestMain() may throw uncaught exceptions.
+ retVal = armnn::test::ClassifierInferenceTestMain<MnistDatabase, armnnCaffeParser::ICaffeParser>(
+ argc, argv, "lenet_iter_9000.caffemodel", true, "data", "prob",
+ { 0, 1, 5, 8, 9 },
+ [](const char* dataDir) { return MnistDatabase(dataDir); });
+ }
+ catch (const std::exception& e)
+ {
+ // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
+ // exception of type std::length_error.
+ // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
+ std::cerr << "WARNING: CaffeMnist-Armnn: An error has occurred when running "
+ "the classifier inference tests: " << e.what() << std::endl;
+ }
+ return retVal;
}
diff --git a/tests/CaffeResNet-Armnn/CaffeResNet-Armnn.cpp b/tests/CaffeResNet-Armnn/CaffeResNet-Armnn.cpp
index ed304f8b0c..7cccb215a1 100644
--- a/tests/CaffeResNet-Armnn/CaffeResNet-Armnn.cpp
+++ b/tests/CaffeResNet-Armnn/CaffeResNet-Armnn.cpp
@@ -8,16 +8,32 @@
int main(int argc, char* argv[])
{
- std::vector<ImageSet> imageSet =
+ int retVal = EXIT_FAILURE;
+ try
{
- {"ILSVRC2012_val_00000018.JPEG", 21 },
- {"shark.jpg", 2}
- };
+ // Coverity fix: The following code may throw an exception of type std::length_error.
+ std::vector<ImageSet> imageSet =
+ {
+ {"ILSVRC2012_val_00000018.JPEG", 21 },
+ {"shark.jpg", 2}
+ };
- armnn::TensorShape inputTensorShape({ 1, 3, 224, 224 });
- return armnn::test::ClassifierInferenceTestMain<ImageNetDatabase, armnnCaffeParser::ICaffeParser>(
- argc, argv, "ResNet_50_ilsvrc15_model.caffemodel", true,
- "data", "prob", { 0, 1 },
- [&imageSet](const char* dataDir) { return ImageNetDatabase(dataDir, 224, 224, imageSet); },
- &inputTensorShape);
+ armnn::TensorShape inputTensorShape({ 1, 3, 224, 224 });
+
+ // Coverity fix: ClassifierInferenceTestMain() may throw uncaught exceptions.
+ retVal = armnn::test::ClassifierInferenceTestMain<ImageNetDatabase, armnnCaffeParser::ICaffeParser>(
+ argc, argv, "ResNet_50_ilsvrc15_model.caffemodel", true,
+ "data", "prob", { 0, 1 },
+ [&imageSet](const char* dataDir) { return ImageNetDatabase(dataDir, 224, 224, imageSet); },
+ &inputTensorShape);
+ }
+ catch (const std::exception& e)
+ {
+ // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
+ // exception of type std::length_error.
+ // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
+ std::cerr << "WARNING: CaffeResNet-Armnn: An error has occurred when running "
+ "the classifier inference tests: " << e.what() << std::endl;
+ }
+ return retVal;
}
diff --git a/tests/CaffeVGG-Armnn/CaffeVGG-Armnn.cpp b/tests/CaffeVGG-Armnn/CaffeVGG-Armnn.cpp
index e7fc55c7e7..b859042935 100644
--- a/tests/CaffeVGG-Armnn/CaffeVGG-Armnn.cpp
+++ b/tests/CaffeVGG-Armnn/CaffeVGG-Armnn.cpp
@@ -9,9 +9,23 @@
int main(int argc, char* argv[])
{
armnn::TensorShape inputTensorShape({ 1, 3, 224, 224 });
- return armnn::test::ClassifierInferenceTestMain<ImageNetDatabase, armnnCaffeParser::ICaffeParser>(
- argc, argv, "VGG_CNN_S.caffemodel", true,
- "input", "prob", { 0 },
- [](const char* dataDir) { return ImageNetDatabase(dataDir, 224, 224); },
- &inputTensorShape);
+ int retVal = EXIT_FAILURE;
+ try
+ {
+ // Coverity fix: ClassifierInferenceTestMain() may throw uncaught exceptions.
+ retVal = armnn::test::ClassifierInferenceTestMain<ImageNetDatabase, armnnCaffeParser::ICaffeParser>(
+ argc, argv, "VGG_CNN_S.caffemodel", true,
+ "input", "prob", { 0 },
+ [](const char* dataDir) { return ImageNetDatabase(dataDir, 224, 224); },
+ &inputTensorShape);
+ }
+ catch (const std::exception& e)
+ {
+ // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
+ // exception of type std::length_error.
+ // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
+ std::cerr << "WARNING: CaffeVGG-Armnn: An error has occurred when running "
+ "the classifier inference tests: " << e.what() << std::endl;
+ }
+ return retVal;
}
diff --git a/tests/CaffeYolo-Armnn/CaffeYolo-Armnn.cpp b/tests/CaffeYolo-Armnn/CaffeYolo-Armnn.cpp
index af60be95ec..ad79d49f0c 100644
--- a/tests/CaffeYolo-Armnn/CaffeYolo-Armnn.cpp
+++ b/tests/CaffeYolo-Armnn/CaffeYolo-Armnn.cpp
@@ -13,27 +13,42 @@ int main(int argc, char* argv[])
using YoloInferenceModel = InferenceModel<armnnCaffeParser::ICaffeParser,
float>;
- return InferenceTestMain(argc, argv, { 0 },
- [&inputTensorShape]()
- {
- return make_unique<YoloTestCaseProvider<YoloInferenceModel>>(
- [&]
- (typename YoloInferenceModel::CommandLineOptions modelOptions)
- {
- if (!ValidateDirectory(modelOptions.m_ModelDir))
+ int retVal = EXIT_FAILURE;
+ try
+ {
+ // Coverity fix: InferenceTestMain() may throw uncaught exceptions.
+ retVal = InferenceTestMain(argc, argv, { 0 },
+ [&inputTensorShape]()
+ {
+ return make_unique<YoloTestCaseProvider<YoloInferenceModel>>(
+ [&]
+ (typename YoloInferenceModel::CommandLineOptions modelOptions)
{
- return std::unique_ptr<YoloInferenceModel>();
- }
+ if (!ValidateDirectory(modelOptions.m_ModelDir))
+ {
+ return std::unique_ptr<YoloInferenceModel>();
+ }
- typename YoloInferenceModel::Params modelParams;
- modelParams.m_ModelPath = modelOptions.m_ModelDir + "yolov1_tiny_voc2007_model.caffemodel";
- modelParams.m_InputBinding = "data";
- modelParams.m_OutputBinding = "fc12";
- modelParams.m_InputTensorShape = &inputTensorShape;
- modelParams.m_IsModelBinary = true;
- modelParams.m_ComputeDevice = modelOptions.m_ComputeDevice;
+ typename YoloInferenceModel::Params modelParams;
+ modelParams.m_ModelPath = modelOptions.m_ModelDir + "yolov1_tiny_voc2007_model.caffemodel";
+ modelParams.m_InputBinding = "data";
+ modelParams.m_OutputBinding = "fc12";
+ modelParams.m_InputTensorShape = &inputTensorShape;
+ modelParams.m_IsModelBinary = true;
+ modelParams.m_ComputeDevice = modelOptions.m_ComputeDevice;
+ modelParams.m_VisualizePostOptimizationModel = modelOptions.m_VisualizePostOptimizationModel;
- return std::make_unique<YoloInferenceModel>(modelParams);
+ return std::make_unique<YoloInferenceModel>(modelParams);
+ });
});
- });
+ }
+ catch (const std::exception& e)
+ {
+ // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
+ // exception of type std::length_error.
+ // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
+ std::cerr << "WARNING: CaffeYolo-Armnn: An error has occurred when running "
+ "the classifier inference tests: " << e.what() << std::endl;
+ }
+ return retVal;
}
diff --git a/tests/ExecuteNetwork/ExecuteNetwork.cpp b/tests/ExecuteNetwork/ExecuteNetwork.cpp
index 04ab195816..74737e2718 100644
--- a/tests/ExecuteNetwork/ExecuteNetwork.cpp
+++ b/tests/ExecuteNetwork/ExecuteNetwork.cpp
@@ -31,7 +31,16 @@ std::vector<T> ParseArrayImpl(std::istream& stream, TParseElementFunc parseEleme
while (std::getline(stream, line))
{
std::vector<std::string> tokens;
- boost::split(tokens, line, boost::algorithm::is_any_of("\t ,;:"), boost::token_compress_on);
+ try
+ {
+ // Coverity fix: boost::split() may throw an exception of type boost::bad_function_call.
+ boost::split(tokens, line, boost::algorithm::is_any_of("\t ,;:"), boost::token_compress_on);
+ }
+ catch (const std::exception& e)
+ {
+ BOOST_LOG_TRIVIAL(error) << "An error occurred when splitting tokens: " << e.what();
+ continue;
+ }
for (const std::string& token : tokens)
{
if (!token.empty()) // See https://stackoverflow.com/questions/10437406/
@@ -219,7 +228,17 @@ int main(int argc, char* argv[])
{
std::stringstream ss(inputTensorShapeStr);
std::vector<unsigned int> dims = ParseArray<unsigned int>(ss);
- inputTensorShape = std::make_unique<armnn::TensorShape>(dims.size(), dims.data());
+
+ try
+ {
+ // Coverity fix: An exception of type armnn::InvalidArgumentException is thrown and never caught.
+ inputTensorShape = std::make_unique<armnn::TensorShape>(dims.size(), dims.data());
+ }
+ catch (const armnn::InvalidArgumentException& e)
+ {
+ BOOST_LOG_TRIVIAL(fatal) << "Cannot create tensor shape: " << e.what();
+ return 1;
+ }
}
// Forward to implementation based on the parser type
diff --git a/tests/InferenceModel.hpp b/tests/InferenceModel.hpp
index c390ccdc2f..f5f00378ca 100644
--- a/tests/InferenceModel.hpp
+++ b/tests/InferenceModel.hpp
@@ -3,15 +3,19 @@
// See LICENSE file in the project root for full license information.
//
#pragma once
-
#include "armnn/ArmNN.hpp"
+#include "HeapProfiling.hpp"
+#include <boost/exception/exception.hpp>
+#include <boost/exception/diagnostic_information.hpp>
#include <boost/log/trivial.hpp>
#include <boost/format.hpp>
#include <boost/program_options.hpp>
+#include <boost/filesystem.hpp>
#include <map>
#include <string>
+#include <fstream>
template<typename TContainer>
inline armnn::InputTensors MakeInputTensors(const std::pair<armnn::LayerBindingId, armnn::TensorInfo>& input,
@@ -19,8 +23,16 @@ inline armnn::InputTensors MakeInputTensors(const std::pair<armnn::LayerBindingI
{
if (inputTensorData.size() != input.second.GetNumElements())
{
- throw armnn::Exception(boost::str(boost::format("Input tensor has incorrect size. Expected %1% elements "
- "but got %2%.") % input.second.GetNumElements() % inputTensorData.size()));
+ try
+ {
+ throw armnn::Exception(boost::str(boost::format("Input tensor has incorrect size. Expected %1% elements "
+ "but got %2%.") % input.second.GetNumElements() % inputTensorData.size()));
+ } catch (const boost::exception& e)
+ {
+ // Coverity fix: it should not be possible to get here but boost::str and boost::format can both
+ // throw uncaught exceptions - convert them to armnn exceptions and rethrow
+ throw armnn::Exception(diagnostic_information(e));
+ }
}
return { { input.first, armnn::ConstTensor(input.second, inputTensorData.data()) } };
}
@@ -46,6 +58,7 @@ public:
{
std::string m_ModelDir;
armnn::Compute m_ComputeDevice;
+ bool m_VisualizePostOptimizationModel;
};
static void AddCommandLineOptions(boost::program_options::options_description& desc, CommandLineOptions& options)
@@ -56,7 +69,11 @@ public:
("model-dir,m", po::value<std::string>(&options.m_ModelDir)->required(),
"Path to directory containing model files (.caffemodel/.prototxt)")
("compute,c", po::value<armnn::Compute>(&options.m_ComputeDevice)->default_value(armnn::Compute::CpuAcc),
- "Which device to run layers on by default. Possible choices: CpuAcc, CpuRef, GpuAcc");
+ "Which device to run layers on by default. Possible choices: CpuAcc, CpuRef, GpuAcc")
+ ("visualize-optimized-model,v",
+ po::value<bool>(&options.m_VisualizePostOptimizationModel)->default_value(false),
+ "Produce a dot file useful for visualizing the graph post optimization."
+ "The file will have the same name as the model with the .dot extention.");
}
struct Params
@@ -67,11 +84,13 @@ public:
const armnn::TensorShape* m_InputTensorShape;
armnn::Compute m_ComputeDevice;
bool m_IsModelBinary;
+ bool m_VisualizePostOptimizationModel;
Params()
: m_InputTensorShape(nullptr)
, m_ComputeDevice(armnn::Compute::CpuRef)
, m_IsModelBinary(true)
+ , m_VisualizePostOptimizationModel(false)
{
}
};
@@ -92,19 +111,38 @@ public:
}
std::vector<std::string> requestedOutputs{ params.m_OutputBinding };
- // Handle text and binary input differently by calling the corresponding parser function
- armnn::INetworkPtr network = (params.m_IsModelBinary ?
- parser->CreateNetworkFromBinaryFile(modelPath.c_str(), inputShapes, requestedOutputs) :
- parser->CreateNetworkFromTextFile(modelPath.c_str(), inputShapes, requestedOutputs));
+ armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
+ {
+ ARMNN_SCOPED_HEAP_PROFILING("Parsing");
+ // Handle text and binary input differently by calling the corresponding parser function
+ network = (params.m_IsModelBinary ?
+ parser->CreateNetworkFromBinaryFile(modelPath.c_str(), inputShapes, requestedOutputs) :
+ parser->CreateNetworkFromTextFile(modelPath.c_str(), inputShapes, requestedOutputs));
+ }
m_InputBindingInfo = parser->GetNetworkInputBindingInfo(params.m_InputBinding);
m_OutputBindingInfo = parser->GetNetworkOutputBindingInfo(params.m_OutputBinding);
- armnn::IOptimizedNetworkPtr optNet =
- armnn::Optimize(*network, m_Runtime->GetDeviceSpec());
+ armnn::IOptimizedNetworkPtr optNet{nullptr, [](armnn::IOptimizedNetwork *){}};
+ {
+ ARMNN_SCOPED_HEAP_PROFILING("Optimizing");
+ optNet = armnn::Optimize(*network, m_Runtime->GetDeviceSpec());
+ }
+
+ if (params.m_VisualizePostOptimizationModel)
+ {
+ boost::filesystem::path filename = params.m_ModelPath;
+ filename.replace_extension("dot");
+ std::fstream file(filename.c_str(),file.out);
+ optNet->SerializeToDot(file);
+ }
+
+ armnn::Status ret;
+ {
+ ARMNN_SCOPED_HEAP_PROFILING("LoadNetwork");
+ ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, std::move(optNet));
+ }
- // Load the network into the runtime.
- armnn::Status ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, std::move(optNet));
if (ret == armnn::Status::Failure)
{
throw armnn::Exception("IRuntime::LoadNetwork failed");
diff --git a/tests/InferenceTest.cpp b/tests/InferenceTest.cpp
index 55616798e2..161481f2cd 100644
--- a/tests/InferenceTest.cpp
+++ b/tests/InferenceTest.cpp
@@ -154,7 +154,7 @@ bool InferenceTest(const InferenceTestOptions& params,
}
const unsigned int nbTotalToProcess = params.m_IterationCount > 0 ? params.m_IterationCount
- : boost::numeric_cast<unsigned int>(defaultTestCaseIds.size());
+ : static_cast<unsigned int>(defaultTestCaseIds.size());
for (; nbProcessed < nbTotalToProcess; nbProcessed++)
{
diff --git a/tests/InferenceTest.inl b/tests/InferenceTest.inl
index 83a99459e3..a36e231e76 100644
--- a/tests/InferenceTest.inl
+++ b/tests/InferenceTest.inl
@@ -307,6 +307,7 @@ int ClassifierInferenceTestMain(int argc, char* argv[], const char* modelFilenam
modelParams.m_InputTensorShape = inputTensorShape;
modelParams.m_IsModelBinary = isModelBinary;
modelParams.m_ComputeDevice = modelOptions.m_ComputeDevice;
+ modelParams.m_VisualizePostOptimizationModel = modelOptions.m_VisualizePostOptimizationModel;
return std::make_unique<InferenceModel>(modelParams);
});
diff --git a/tests/MultipleNetworksCifar10/MultipleNetworksCifar10.cpp b/tests/MultipleNetworksCifar10/MultipleNetworksCifar10.cpp
index 3c75ed7f24..37138f4a78 100644
--- a/tests/MultipleNetworksCifar10/MultipleNetworksCifar10.cpp
+++ b/tests/MultipleNetworksCifar10/MultipleNetworksCifar10.cpp
@@ -190,7 +190,17 @@ int main(int argc, char* argv[])
}
catch (armnn::Exception const& e)
{
- BOOST_LOG_TRIVIAL(fatal) <<"Armnn Error: "<< e.what();
+ // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
+ // exception of type std::length_error.
+ // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
+ std::cerr << "Armnn Error: " << e.what() << std::endl;
return 1;
}
-} \ No newline at end of file
+ catch (const std::exception& e)
+ {
+ // Coverity fix: various boost exceptions can be thrown by methods called by this test.
+ std::cerr << "WARNING: MultipleNetworksCifar10: An error has occurred when running the "
+ "multiple networks inference tests: " << e.what() << std::endl;
+ return 1;
+ }
+}
diff --git a/tests/TfCifar10-Armnn/TfCifar10-Armnn.cpp b/tests/TfCifar10-Armnn/TfCifar10-Armnn.cpp
index 0d9e16a4ba..cfe95095a9 100644
--- a/tests/TfCifar10-Armnn/TfCifar10-Armnn.cpp
+++ b/tests/TfCifar10-Armnn/TfCifar10-Armnn.cpp
@@ -9,9 +9,24 @@
int main(int argc, char* argv[])
{
armnn::TensorShape inputTensorShape({ 1, 32, 32, 3 });
- return armnn::test::ClassifierInferenceTestMain<Cifar10Database, armnnTfParser::ITfParser>(
- argc, argv, "cifar10_tf.prototxt", false,
- "data", "prob", { 0, 1, 2, 4, 7 },
- [](const char* dataDir) { return Cifar10Database(dataDir, true); },
- &inputTensorShape);
+
+ int retVal = EXIT_FAILURE;
+ try
+ {
+ // Coverity fix: ClassifierInferenceTestMain() may throw uncaught exceptions.
+ retVal = armnn::test::ClassifierInferenceTestMain<Cifar10Database, armnnTfParser::ITfParser>(
+ argc, argv, "cifar10_tf.prototxt", false,
+ "data", "prob", { 0, 1, 2, 4, 7 },
+ [](const char* dataDir) { return Cifar10Database(dataDir, true); },
+ &inputTensorShape);
+ }
+ catch (const std::exception& e)
+ {
+ // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
+ // exception of type std::length_error.
+ // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
+ std::cerr << "WARNING: TfCifar10-Armnn: An error has occurred when running "
+ "the classifier inference tests: " << e.what() << std::endl;
+ }
+ return retVal;
}
diff --git a/tests/TfInceptionV3-Armnn/TfInceptionV3-Armnn.cpp b/tests/TfInceptionV3-Armnn/TfInceptionV3-Armnn.cpp
index 94878ae4ce..441b07c9c9 100644
--- a/tests/TfInceptionV3-Armnn/TfInceptionV3-Armnn.cpp
+++ b/tests/TfInceptionV3-Armnn/TfInceptionV3-Armnn.cpp
@@ -8,16 +8,33 @@
int main(int argc, char* argv[])
{
- std::vector<ImageSet> imageSet =
+ int retVal = EXIT_FAILURE;
+ try
{
- { "Dog.jpg", 208 },
- { "Cat.jpg", 283 },
- { "shark.jpg", 3 },
- };
- armnn::TensorShape inputTensorShape({ 1, 299, 299, 3 });
- return armnn::test::ClassifierInferenceTestMain<MobileNetDatabase, armnnTfParser::ITfParser>(
- argc, argv, "inception_v3_2016_08_28_frozen_transformed.pb", true,
- "input", "InceptionV3/Predictions/Reshape_1", { 0, 1, 2, },
- [&imageSet](const char* dataDir) { return MobileNetDatabase(dataDir, 299, 299, imageSet); },
- &inputTensorShape);
+ // Coverity fix: The following code may throw an exception of type std::length_error.
+ std::vector<ImageSet> imageSet =
+ {
+ { "Dog.jpg", 208 },
+ { "Cat.jpg", 283 },
+ { "shark.jpg", 3 },
+ };
+
+ armnn::TensorShape inputTensorShape({ 1, 299, 299, 3 });
+
+ // Coverity fix: InferenceTestMain() may throw uncaught exceptions.
+ retVal = armnn::test::ClassifierInferenceTestMain<MobileNetDatabase, armnnTfParser::ITfParser>(
+ argc, argv, "inception_v3_2016_08_28_frozen_transformed.pb", true,
+ "input", "InceptionV3/Predictions/Reshape_1", { 0, 1, 2, },
+ [&imageSet](const char* dataDir) { return MobileNetDatabase(dataDir, 299, 299, imageSet); },
+ &inputTensorShape);
+ }
+ catch (const std::exception& e)
+ {
+ // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
+ // exception of type std::length_error.
+ // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
+ std::cerr << "WARNING: TfInceptionV3-Armnn: An error has occurred when running "
+ "the classifier inference tests: " << e.what() << std::endl;
+ }
+ return retVal;
}
diff --git a/tests/TfMnist-Armnn/TfMnist-Armnn.cpp b/tests/TfMnist-Armnn/TfMnist-Armnn.cpp
index 5625f4c055..bcc3f416cc 100644
--- a/tests/TfMnist-Armnn/TfMnist-Armnn.cpp
+++ b/tests/TfMnist-Armnn/TfMnist-Armnn.cpp
@@ -9,9 +9,24 @@
int main(int argc, char* argv[])
{
armnn::TensorShape inputTensorShape({ 1, 784, 1, 1 });
- return armnn::test::ClassifierInferenceTestMain<MnistDatabase, armnnTfParser::ITfParser>(
- argc, argv, "simple_mnist_tf.prototxt", false,
- "Placeholder", "Softmax", { 0, 1, 2, 3, 4 },
- [](const char* dataDir) { return MnistDatabase(dataDir, true); },
- &inputTensorShape);
+
+ int retVal = EXIT_FAILURE;
+ try
+ {
+ // Coverity fix: ClassifierInferenceTestMain() may throw uncaught exceptions.
+ retVal = armnn::test::ClassifierInferenceTestMain<MnistDatabase, armnnTfParser::ITfParser>(
+ argc, argv, "simple_mnist_tf.prototxt", false,
+ "Placeholder", "Softmax", { 0, 1, 2, 3, 4 },
+ [](const char* dataDir) { return MnistDatabase(dataDir, true); },
+ &inputTensorShape);
+ }
+ catch (const std::exception& e)
+ {
+ // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
+ // exception of type std::length_error.
+ // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
+ std::cerr << "WARNING: TfMnist-Armnn: An error has occurred when running "
+ "the classifier inference tests: " << e.what() << std::endl;
+ }
+ return retVal;
}
diff --git a/tests/TfMobileNet-Armnn/TfMobileNet-Armnn.cpp b/tests/TfMobileNet-Armnn/TfMobileNet-Armnn.cpp
index e1aebb6bb0..54759bf88a 100644
--- a/tests/TfMobileNet-Armnn/TfMobileNet-Armnn.cpp
+++ b/tests/TfMobileNet-Armnn/TfMobileNet-Armnn.cpp
@@ -8,43 +8,59 @@
int main(int argc, char* argv[])
{
- std::vector<ImageSet> imageSet =
+ int retVal = EXIT_FAILURE;
+ try
{
- {"Dog.jpg", 209},
- // top five predictions in tensorflow:
- // -----------------------------------
- // 209:Labrador retriever 0.949995
- // 160:Rhodesian ridgeback 0.0270182
- // 208:golden retriever 0.0192866
- // 853:tennis ball 0.000470382
- // 239:Greater Swiss Mountain dog 0.000464451
- {"Cat.jpg", 283},
- // top five predictions in tensorflow:
- // -----------------------------------
- // 283:tiger cat 0.579016
- // 286:Egyptian cat 0.319676
- // 282:tabby, tabby cat 0.0873346
- // 288:lynx, catamount 0.011163
- // 289:leopard, Panthera pardus 0.000856755
- {"shark.jpg", 3},
- // top five predictions in tensorflow:
- // -----------------------------------
- // 3:great white shark, white shark, ... 0.996926
- // 4:tiger shark, Galeocerdo cuvieri 0.00270528
- // 149:killer whale, killer, orca, ... 0.000121848
- // 395:sturgeon 7.78977e-05
- // 5:hammerhead, hammerhead shark 6.44127e-055
- };
+ // Coverity fix: The following code may throw an exception of type std::length_error.
+ std::vector<ImageSet> imageSet =
+ {
+ {"Dog.jpg", 209},
+ // top five predictions in tensorflow:
+ // -----------------------------------
+ // 209:Labrador retriever 0.949995
+ // 160:Rhodesian ridgeback 0.0270182
+ // 208:golden retriever 0.0192866
+ // 853:tennis ball 0.000470382
+ // 239:Greater Swiss Mountain dog 0.000464451
+ {"Cat.jpg", 283},
+ // top five predictions in tensorflow:
+ // -----------------------------------
+ // 283:tiger cat 0.579016
+ // 286:Egyptian cat 0.319676
+ // 282:tabby, tabby cat 0.0873346
+ // 288:lynx, catamount 0.011163
+ // 289:leopard, Panthera pardus 0.000856755
+ {"shark.jpg", 3},
+ // top five predictions in tensorflow:
+ // -----------------------------------
+ // 3:great white shark, white shark, ... 0.996926
+ // 4:tiger shark, Galeocerdo cuvieri 0.00270528
+ // 149:killer whale, killer, orca, ... 0.000121848
+ // 395:sturgeon 7.78977e-05
+ // 5:hammerhead, hammerhead shark 6.44127e-055
+ };
- armnn::TensorShape inputTensorShape({ 1, 224, 224, 3 });
- return armnn::test::ClassifierInferenceTestMain<MobileNetDatabase, armnnTfParser::ITfParser>(
- argc, argv, "mobilenet_v1_1.0_224_fp32.pb", true, "input", "output", { 0, 1, 2 },
- [&imageSet](const char* dataDir) {
- return MobileNetDatabase(
- dataDir,
- 224,
- 224,
- imageSet);
- },
- &inputTensorShape);
+ armnn::TensorShape inputTensorShape({ 1, 224, 224, 3 });
+
+ // Coverity fix: ClassifierInferenceTestMain() may throw uncaught exceptions.
+ retVal = armnn::test::ClassifierInferenceTestMain<MobileNetDatabase, armnnTfParser::ITfParser>(
+ argc, argv, "mobilenet_v1_1.0_224_fp32.pb", true, "input", "output", { 0, 1, 2 },
+ [&imageSet](const char* dataDir) {
+ return MobileNetDatabase(
+ dataDir,
+ 224,
+ 224,
+ imageSet);
+ },
+ &inputTensorShape);
+ }
+ catch (const std::exception& e)
+ {
+ // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
+ // exception of type std::length_error.
+ // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
+ std::cerr << "WARNING: TfMobileNet-Armnn: An error has occurred when running "
+ "the classifier inference tests: " << e.what() << std::endl;
+ }
+ return retVal;
}
diff --git a/tests/TfResNext_Quantized-Armnn/TfResNext_Quantized-Armnn.cpp b/tests/TfResNext_Quantized-Armnn/TfResNext_Quantized-Armnn.cpp
new file mode 100644
index 0000000000..1e1ede3e68
--- /dev/null
+++ b/tests/TfResNext_Quantized-Armnn/TfResNext_Quantized-Armnn.cpp
@@ -0,0 +1,39 @@
+//
+// Copyright © 2017 Arm Ltd. All rights reserved.
+// See LICENSE file in the project root for full license information.
+//
+#include "../InferenceTest.hpp"
+#include "../ImageNetDatabase.hpp"
+#include "armnnTfParser/ITfParser.hpp"
+
+int main(int argc, char* argv[])
+{
+ int retVal = EXIT_FAILURE;
+ try
+ {
+ // Coverity fix: The following code may throw an exception of type std::length_error.
+ std::vector<ImageSet> imageSet =
+ {
+ {"ILSVRC2012_val_00000018.JPEG", 21 },
+ {"shark.jpg", 2}
+ };
+
+ armnn::TensorShape inputTensorShape({ 1, 3, 224, 224 });
+
+ // Coverity fix: ClassifierInferenceTestMain() may throw uncaught exceptions.
+ retVal = armnn::test::ClassifierInferenceTestMain<ImageNetDatabase, armnnTfParser::ITfParser>(
+ argc, argv, "resnext_TF_quantized_for_armnn_team.pb", true,
+ "inputs", "pool1", { 0, 1 },
+ [&imageSet](const char* dataDir) { return ImageNetDatabase(dataDir, 224, 224, imageSet); },
+ &inputTensorShape);
+ }
+ catch (const std::exception& e)
+ {
+ // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
+ // exception of type std::length_error.
+ // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
+ std::cerr << "WARNING: TfResNext_Quantized-Armnn: An error has occurred when running "
+ "the classifier inference tests: " << e.what() << std::endl;
+ }
+ return retVal;
+}