From d089b74bebbcc8518fb0f4eacb7e6569ae170199 Mon Sep 17 00:00:00 2001 From: Aron Virginas-Tar Date: Tue, 29 Jan 2019 11:09:51 +0000 Subject: IVGCVSW-2437 Inference test for TensorFlow Lite MobileNet SSD Change-Id: If7ee1efa3ee79d9eca41c5a6219b3fc42e740efe --- .../TfLiteMobileNetSsd-Armnn.cpp | 76 ++++++++++++++++++++++ 1 file changed, 76 insertions(+) create mode 100644 tests/TfLiteMobileNetSsd-Armnn/TfLiteMobileNetSsd-Armnn.cpp (limited to 'tests/TfLiteMobileNetSsd-Armnn/TfLiteMobileNetSsd-Armnn.cpp') diff --git a/tests/TfLiteMobileNetSsd-Armnn/TfLiteMobileNetSsd-Armnn.cpp b/tests/TfLiteMobileNetSsd-Armnn/TfLiteMobileNetSsd-Armnn.cpp new file mode 100644 index 0000000000..b1bc0f6120 --- /dev/null +++ b/tests/TfLiteMobileNetSsd-Armnn/TfLiteMobileNetSsd-Armnn.cpp @@ -0,0 +1,76 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// SPDX-License-Identifier: MIT +// +#include "../MobileNetSsdInferenceTest.hpp" + +#include "armnnTfLiteParser/ITfLiteParser.hpp" + +#include +#include + +using namespace armnnTfLiteParser; + +int main(int argc, char* argv[]) +{ + int retVal = EXIT_FAILURE; + try + { + using DataType = float; + using Parser = armnnTfLiteParser::ITfLiteParser; + using Model = InferenceModel; + + armnn::TensorShape inputTensorShape({ 1, 300, 300, 3 }); + + std::vector inputLayerNames = + { + "normalized_input_image_tensor" + }; + + std::vector outputLayerNames = + { + "TFLite_Detection_PostProcess", + "TFLite_Detection_PostProcess:1", + "TFLite_Detection_PostProcess:2", + "TFLite_Detection_PostProcess:3" + }; + + retVal = InferenceTestMain(argc, argv, { 0 }, + [&inputTensorShape, inputLayerNames, outputLayerNames]() + { + return make_unique>( + [&] + (typename Model::CommandLineOptions modelOptions) + { + if (!ValidateDirectory(modelOptions.m_ModelDir)) + { + return std::unique_ptr(); + } + + typename Model::Params modelParams; + modelParams.m_ModelPath = + modelOptions.m_ModelDir + "ssd_mobilenet_v1.tflite"; + + std::copy(inputLayerNames.begin(), inputLayerNames.end(), + std::back_inserter(modelParams.m_InputBindings)); + + std::copy(outputLayerNames.begin(), outputLayerNames.end(), + std::back_inserter(modelParams.m_OutputBindings)); + + modelParams.m_InputShapes = { inputTensorShape }; + modelParams.m_IsModelBinary = true; + modelParams.m_ComputeDevice = modelOptions.m_ComputeDevice; + modelParams.m_VisualizePostOptimizationModel = modelOptions.m_VisualizePostOptimizationModel; + modelParams.m_EnableFp16TurboMode = modelOptions.m_EnableFp16TurboMode; + + return std::make_unique(modelParams); + }); + }); + } + catch (const std::exception& e) + { + std::cerr << "WARNING: " << *argv << ": An error has occurred when running " + "the classifier inference tests: " << e.what() << std::endl; + } + return retVal; +} -- cgit v1.2.1