aboutsummaryrefslogtreecommitdiff
path: root/tests/TfLiteMobileNetSsd-Armnn
diff options
context:
space:
mode:
authorAron Virginas-Tar <Aron.Virginas-Tar@arm.com>2019-01-29 11:09:51 +0000
committerMatteo Martincigh <matteo.martincigh@arm.com>2019-01-30 13:05:58 +0000
commitd089b74bebbcc8518fb0f4eacb7e6569ae170199 (patch)
treea86f9ec054d4daad3d20446ced27555768a84862 /tests/TfLiteMobileNetSsd-Armnn
parent7cf0eaa26c1fb29ca9df97e4734ec7c1e10f81c4 (diff)
downloadarmnn-d089b74bebbcc8518fb0f4eacb7e6569ae170199.tar.gz
IVGCVSW-2437 Inference test for TensorFlow Lite MobileNet SSD
Change-Id: If7ee1efa3ee79d9eca41c5a6219b3fc42e740efe
Diffstat (limited to 'tests/TfLiteMobileNetSsd-Armnn')
-rw-r--r--tests/TfLiteMobileNetSsd-Armnn/TfLiteMobileNetSsd-Armnn.cpp76
1 files changed, 76 insertions, 0 deletions
diff --git a/tests/TfLiteMobileNetSsd-Armnn/TfLiteMobileNetSsd-Armnn.cpp b/tests/TfLiteMobileNetSsd-Armnn/TfLiteMobileNetSsd-Armnn.cpp
new file mode 100644
index 0000000000..b1bc0f6120
--- /dev/null
+++ b/tests/TfLiteMobileNetSsd-Armnn/TfLiteMobileNetSsd-Armnn.cpp
@@ -0,0 +1,76 @@
+//
+// Copyright © 2017 Arm Ltd. All rights reserved.
+// SPDX-License-Identifier: MIT
+//
+#include "../MobileNetSsdInferenceTest.hpp"
+
+#include "armnnTfLiteParser/ITfLiteParser.hpp"
+
+#include <algorithm>
+#include <iterator>
+
+using namespace armnnTfLiteParser;
+
+int main(int argc, char* argv[])
+{
+ int retVal = EXIT_FAILURE;
+ try
+ {
+ using DataType = float;
+ using Parser = armnnTfLiteParser::ITfLiteParser;
+ using Model = InferenceModel<Parser, DataType>;
+
+ armnn::TensorShape inputTensorShape({ 1, 300, 300, 3 });
+
+ std::vector<const char*> inputLayerNames =
+ {
+ "normalized_input_image_tensor"
+ };
+
+ std::vector<const char*> outputLayerNames =
+ {
+ "TFLite_Detection_PostProcess",
+ "TFLite_Detection_PostProcess:1",
+ "TFLite_Detection_PostProcess:2",
+ "TFLite_Detection_PostProcess:3"
+ };
+
+ retVal = InferenceTestMain(argc, argv, { 0 },
+ [&inputTensorShape, inputLayerNames, outputLayerNames]()
+ {
+ return make_unique<MobileNetSsdTestCaseProvider<Model>>(
+ [&]
+ (typename Model::CommandLineOptions modelOptions)
+ {
+ if (!ValidateDirectory(modelOptions.m_ModelDir))
+ {
+ return std::unique_ptr<Model>();
+ }
+
+ typename Model::Params modelParams;
+ modelParams.m_ModelPath =
+ modelOptions.m_ModelDir + "ssd_mobilenet_v1.tflite";
+
+ std::copy(inputLayerNames.begin(), inputLayerNames.end(),
+ std::back_inserter(modelParams.m_InputBindings));
+
+ std::copy(outputLayerNames.begin(), outputLayerNames.end(),
+ std::back_inserter(modelParams.m_OutputBindings));
+
+ modelParams.m_InputShapes = { inputTensorShape };
+ modelParams.m_IsModelBinary = true;
+ modelParams.m_ComputeDevice = modelOptions.m_ComputeDevice;
+ modelParams.m_VisualizePostOptimizationModel = modelOptions.m_VisualizePostOptimizationModel;
+ modelParams.m_EnableFp16TurboMode = modelOptions.m_EnableFp16TurboMode;
+
+ return std::make_unique<Model>(modelParams);
+ });
+ });
+ }
+ catch (const std::exception& e)
+ {
+ std::cerr << "WARNING: " << *argv << ": An error has occurred when running "
+ "the classifier inference tests: " << e.what() << std::endl;
+ }
+ return retVal;
+}