aboutsummaryrefslogtreecommitdiff
path: root/tests/CaffeYolo-Armnn
diff options
context:
space:
mode:
authorsurmeh01 <surabhi.mehta@arm.com>2018-05-18 16:31:43 +0100
committertelsoa01 <telmo.soares@arm.com>2018-05-23 13:09:07 +0100
commit3537c2ca7ebf31c1673b9ec2bb0c17b0406bbae0 (patch)
tree5950603ad78ec3fe56fb31ddc7f4d52a19f5bc60 /tests/CaffeYolo-Armnn
parentbceff2fb3fc68bb0aa88b886900c34b77340c826 (diff)
downloadarmnn-3537c2ca7ebf31c1673b9ec2bb0c17b0406bbae0.tar.gz
Release 18.05
Diffstat (limited to 'tests/CaffeYolo-Armnn')
-rw-r--r--tests/CaffeYolo-Armnn/CaffeYolo-Armnn.cpp53
1 files changed, 34 insertions, 19 deletions
diff --git a/tests/CaffeYolo-Armnn/CaffeYolo-Armnn.cpp b/tests/CaffeYolo-Armnn/CaffeYolo-Armnn.cpp
index af60be95ec..ad79d49f0c 100644
--- a/tests/CaffeYolo-Armnn/CaffeYolo-Armnn.cpp
+++ b/tests/CaffeYolo-Armnn/CaffeYolo-Armnn.cpp
@@ -13,27 +13,42 @@ int main(int argc, char* argv[])
using YoloInferenceModel = InferenceModel<armnnCaffeParser::ICaffeParser,
float>;
- return InferenceTestMain(argc, argv, { 0 },
- [&inputTensorShape]()
- {
- return make_unique<YoloTestCaseProvider<YoloInferenceModel>>(
- [&]
- (typename YoloInferenceModel::CommandLineOptions modelOptions)
- {
- if (!ValidateDirectory(modelOptions.m_ModelDir))
+ int retVal = EXIT_FAILURE;
+ try
+ {
+ // Coverity fix: InferenceTestMain() may throw uncaught exceptions.
+ retVal = InferenceTestMain(argc, argv, { 0 },
+ [&inputTensorShape]()
+ {
+ return make_unique<YoloTestCaseProvider<YoloInferenceModel>>(
+ [&]
+ (typename YoloInferenceModel::CommandLineOptions modelOptions)
{
- return std::unique_ptr<YoloInferenceModel>();
- }
+ if (!ValidateDirectory(modelOptions.m_ModelDir))
+ {
+ return std::unique_ptr<YoloInferenceModel>();
+ }
- typename YoloInferenceModel::Params modelParams;
- modelParams.m_ModelPath = modelOptions.m_ModelDir + "yolov1_tiny_voc2007_model.caffemodel";
- modelParams.m_InputBinding = "data";
- modelParams.m_OutputBinding = "fc12";
- modelParams.m_InputTensorShape = &inputTensorShape;
- modelParams.m_IsModelBinary = true;
- modelParams.m_ComputeDevice = modelOptions.m_ComputeDevice;
+ typename YoloInferenceModel::Params modelParams;
+ modelParams.m_ModelPath = modelOptions.m_ModelDir + "yolov1_tiny_voc2007_model.caffemodel";
+ modelParams.m_InputBinding = "data";
+ modelParams.m_OutputBinding = "fc12";
+ modelParams.m_InputTensorShape = &inputTensorShape;
+ modelParams.m_IsModelBinary = true;
+ modelParams.m_ComputeDevice = modelOptions.m_ComputeDevice;
+ modelParams.m_VisualizePostOptimizationModel = modelOptions.m_VisualizePostOptimizationModel;
- return std::make_unique<YoloInferenceModel>(modelParams);
+ return std::make_unique<YoloInferenceModel>(modelParams);
+ });
});
- });
+ }
+ catch (const std::exception& e)
+ {
+ // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
+ // exception of type std::length_error.
+ // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
+ std::cerr << "WARNING: CaffeYolo-Armnn: An error has occurred when running "
+ "the classifier inference tests: " << e.what() << std::endl;
+ }
+ return retVal;
}