From d089b74bebbcc8518fb0f4eacb7e6569ae170199 Mon Sep 17 00:00:00 2001 From: Aron Virginas-Tar Date: Tue, 29 Jan 2019 11:09:51 +0000 Subject: IVGCVSW-2437 Inference test for TensorFlow Lite MobileNet SSD Change-Id: If7ee1efa3ee79d9eca41c5a6219b3fc42e740efe --- tests/CMakeLists.txt | 7 + tests/MobileNetSsdDatabase.hpp | 105 +++++++++++ tests/MobileNetSsdInferenceTest.hpp | 202 +++++++++++++++++++++ tests/ObjectDetectionCommon.hpp | 49 +++++ .../TfLiteMobileNetSsd-Armnn.cpp | 76 ++++++++ 5 files changed, 439 insertions(+) create mode 100644 tests/MobileNetSsdDatabase.hpp create mode 100644 tests/MobileNetSsdInferenceTest.hpp create mode 100644 tests/ObjectDetectionCommon.hpp create mode 100644 tests/TfLiteMobileNetSsd-Armnn/TfLiteMobileNetSsd-Armnn.cpp diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 981553702e..e8f72eb4ee 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -164,6 +164,13 @@ if (BUILD_TF_LITE_PARSER) ImagePreprocessor.cpp) TfLiteParserTest(TfLiteMobilenetQuantized-Armnn "${TfLiteMobilenetQuantized-Armnn_sources}") + set(TfLiteMobileNetSsd-Armnn_sources + TfLiteMobileNetSsd-Armnn/TfLiteMobileNetSsd-Armnn.cpp + MobileNetSsdDatabase.hpp + MobileNetSsdInferenceTest.hpp + ObjectDetectionCommon.hpp) + TfLiteParserTest(TfLiteMobileNetSsd-Armnn "${TfLiteMobileNetSsd-Armnn_sources}") + set(TfLiteVGG16Quantized-Armnn_sources TfLiteVGG16Quantized-Armnn/TfLiteVGG16Quantized-Armnn.cpp ImagePreprocessor.hpp diff --git a/tests/MobileNetSsdDatabase.hpp b/tests/MobileNetSsdDatabase.hpp new file mode 100644 index 0000000000..e3a28d13bd --- /dev/null +++ b/tests/MobileNetSsdDatabase.hpp @@ -0,0 +1,105 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// SPDX-License-Identifier: MIT +// +#pragma once + +#include "ObjectDetectionCommon.hpp" + +#include +#include +#include + +#include + +#include +#include + +#include +#include + +#include "InferenceTestImage.hpp" + +namespace +{ + +struct MobileNetSsdTestCaseData +{ + MobileNetSsdTestCaseData( + std::vector inputData, + std::vector expectedOutput) + : m_InputData(std::move(inputData)) + , m_ExpectedOutput(std::move(expectedOutput)) + {} + + std::vector m_InputData; + std::vector m_ExpectedOutput; +}; + +class MobileNetSsdDatabase +{ +public: + explicit MobileNetSsdDatabase(const std::string& imageDir); + + std::unique_ptr GetTestCaseData(unsigned int testCaseId); + +private: + std::string m_ImageDir; +}; + +constexpr unsigned int k_MobileNetSsdImageWidth = 300u; +constexpr unsigned int k_MobileNetSsdImageHeight = k_MobileNetSsdImageWidth; + +// Test cases +const std::array g_PerTestCaseInput = +{ + ObjectDetectionInput + { + "Cat.jpg", + DetectedObject(16, BoundingBox(0.21678525f, 0.0859828f, 0.9271242f, 0.9453231f), 0.79296875f) + } +}; + +MobileNetSsdDatabase::MobileNetSsdDatabase(const std::string& imageDir) + : m_ImageDir(imageDir) +{} + +std::unique_ptr MobileNetSsdDatabase::GetTestCaseData(unsigned int testCaseId) +{ + const unsigned int safeTestCaseId = + testCaseId % boost::numeric_cast(g_PerTestCaseInput.size()); + const ObjectDetectionInput& testCaseInput = g_PerTestCaseInput[safeTestCaseId]; + + // Load test case input + const std::string imagePath = m_ImageDir + testCaseInput.first; + std::vector imageData; + try + { + InferenceTestImage image(imagePath.c_str()); + + // Resize image (if needed) + const unsigned int width = image.GetWidth(); + const unsigned int height = image.GetHeight(); + if (width != k_MobileNetSsdImageWidth || height != k_MobileNetSsdImageHeight) + { + image.Resize(k_MobileNetSsdImageWidth, k_MobileNetSsdImageHeight, CHECK_LOCATION()); + } + + // Get image data as a vector of floats + imageData = GetImageDataInArmNnLayoutAsNormalizedFloats(ImageChannelLayout::Rgb, image); + } + catch (const InferenceTestImageException& e) + { + BOOST_LOG_TRIVIAL(fatal) << "Failed to load image for test case " << testCaseId << ". Error: " << e.what(); + return nullptr; + } + + // Prepare test case expected output + std::vector expectedOutput; + expectedOutput.reserve(1); + expectedOutput.push_back(testCaseInput.second); + + return std::make_unique(std::move(imageData), std::move(expectedOutput)); +} + +} // anonymous namespace diff --git a/tests/MobileNetSsdInferenceTest.hpp b/tests/MobileNetSsdInferenceTest.hpp new file mode 100644 index 0000000000..cf00966e4b --- /dev/null +++ b/tests/MobileNetSsdInferenceTest.hpp @@ -0,0 +1,202 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// SPDX-License-Identifier: MIT +// +#pragma once + +#include "InferenceTest.hpp" +#include "MobileNetSsdDatabase.hpp" + +#include +#include +#include +#include + +#include + +namespace +{ + +template +class MobileNetSsdTestCase : public InferenceModelTestCase +{ +public: + MobileNetSsdTestCase(Model& model, + unsigned int testCaseId, + const MobileNetSsdTestCaseData& testCaseData) + : InferenceModelTestCase(model, + testCaseId, + { std::move(testCaseData.m_InputData) }, + { k_OutputSize1, k_OutputSize2, k_OutputSize3, k_OutputSize4 }) + , m_FloatComparer(boost::math::fpc::percent_tolerance(1.0f)) + , m_DetectedObjects(testCaseData.m_ExpectedOutput) + {} + + TestCaseResult ProcessResult(const InferenceTestOptions& options) override + { + const std::vector& output1 = this->GetOutputs()[0]; // bounding boxes + BOOST_ASSERT(output1.size() == k_OutputSize1); + + const std::vector& output2 = this->GetOutputs()[1]; // classes + BOOST_ASSERT(output2.size() == k_OutputSize2); + + const std::vector& output3 = this->GetOutputs()[2]; // scores + BOOST_ASSERT(output3.size() == k_OutputSize3); + + const std::vector& output4 = this->GetOutputs()[3]; // number of valid detections + BOOST_ASSERT(output4.size() == k_OutputSize4); + + // Extract detected objects from output data + std::vector detectedObjects; + const float* outputData = output1.data(); + for (unsigned int i = 0u; i < k_NumDetections; i++) + { + // NOTE: Order of coordinates in output data is yMin, xMin, yMax, xMax + float yMin = *outputData++; + float xMin = *outputData++; + float yMax = *outputData++; + float xMax = *outputData++; + + DetectedObject detectedObject( + static_cast(output2.at(i)), + BoundingBox(xMin, yMin, xMax, yMax), + output3.at(i)); + + detectedObjects.push_back(detectedObject); + } + + // Sort detected objects by confidence + std::sort(detectedObjects.begin(), detectedObjects.end(), + [](const DetectedObject& a, const DetectedObject& b) + { + return a.m_Confidence > b.m_Confidence || + (a.m_Confidence == b.m_Confidence && a.m_Class > b.m_Class); + }); + + // Check if number of valid detections matches expectations + const size_t numValidDetections = boost::numeric_cast(output4[0]); + if (numValidDetections != m_DetectedObjects.size()) + { + BOOST_LOG_TRIVIAL(error) << "Number of valid detections is incorrect: Expected (" << + m_DetectedObjects.size() << ")" << " but got (" << numValidDetections << ")"; + return TestCaseResult::Failed; + } + + // Compare detected objects with expected results + std::vector::const_iterator it = detectedObjects.begin(); + for (const DetectedObject& expectedDetection : m_DetectedObjects) + { + if (it == detectedObjects.end()) + { + BOOST_LOG_TRIVIAL(info) << "No more detected objects to compare"; + return TestCaseResult::Abort; + } + + const DetectedObject& detectedObject = *it; + if (detectedObject.m_Class != expectedDetection.m_Class) + { + BOOST_LOG_TRIVIAL(error) << "Prediction for test case " << this->GetTestCaseId() << + " is incorrect: Expected (" << expectedDetection.m_Class << ")" << + " but predicted (" << detectedObject.m_Class << ")"; + return TestCaseResult::Failed; + } + + if(!m_FloatComparer(detectedObject.m_Confidence, expectedDetection.m_Confidence)) + { + BOOST_LOG_TRIVIAL(error) << "Confidence of prediction for test case " << this->GetTestCaseId() << + " is incorrect: Expected (" << expectedDetection.m_Confidence << ") +- 1.0 pc" << + " but predicted (" << detectedObject.m_Confidence << ")"; + return TestCaseResult::Failed; + } + + if (!m_FloatComparer(detectedObject.m_BoundingBox.m_XMin, expectedDetection.m_BoundingBox.m_XMin) || + !m_FloatComparer(detectedObject.m_BoundingBox.m_YMin, expectedDetection.m_BoundingBox.m_YMin) || + !m_FloatComparer(detectedObject.m_BoundingBox.m_XMax, expectedDetection.m_BoundingBox.m_XMax) || + !m_FloatComparer(detectedObject.m_BoundingBox.m_YMax, expectedDetection.m_BoundingBox.m_YMax)) + { + BOOST_LOG_TRIVIAL(error) << "Detected bounding box for test case " << this->GetTestCaseId() << + " is incorrect"; + return TestCaseResult::Failed; + } + + ++it; + } + + return TestCaseResult::Ok; + } + +private: + static constexpr unsigned int k_NumDetections = 10u; + + static constexpr unsigned int k_OutputSize1 = k_NumDetections * 4u; + static constexpr unsigned int k_OutputSize2 = k_NumDetections; + static constexpr unsigned int k_OutputSize3 = k_NumDetections; + static constexpr unsigned int k_OutputSize4 = 1u; + + boost::math::fpc::close_at_tolerance m_FloatComparer; + std::vector m_DetectedObjects; +}; + +template +class MobileNetSsdTestCaseProvider : public IInferenceTestCaseProvider +{ +public: + template + explicit MobileNetSsdTestCaseProvider(TConstructModelCallable constructModel) + : m_ConstructModel(constructModel) + {} + + virtual void AddCommandLineOptions(boost::program_options::options_description& options) override + { + namespace po = boost::program_options; + + options.add_options() + ("data-dir,d", po::value(&m_DataDir)->required(), + "Path to directory containing test data"); + + Model::AddCommandLineOptions(options, m_ModelCommandLineOptions); + } + + virtual bool ProcessCommandLineOptions() override + { + if (!ValidateDirectory(m_DataDir)) + { + return false; + } + + m_Model = m_ConstructModel(m_ModelCommandLineOptions); + if (!m_Model) + { + return false; + } + + m_Database = std::make_unique(m_DataDir.c_str()); + if (!m_Database) + { + return false; + } + + return true; + } + + std::unique_ptr GetTestCase(unsigned int testCaseId) override + { + std::unique_ptr testCaseData = m_Database->GetTestCaseData(testCaseId); + if (!testCaseData) + { + return nullptr; + } + + return std::make_unique>(*m_Model, testCaseId, *testCaseData); + } + +private: + typename Model::CommandLineOptions m_ModelCommandLineOptions; + std::function(typename Model::CommandLineOptions)> m_ConstructModel; + std::unique_ptr m_Model; + + std::string m_DataDir; + std::unique_ptr m_Database; +}; + +} // anonymous namespace \ No newline at end of file diff --git a/tests/ObjectDetectionCommon.hpp b/tests/ObjectDetectionCommon.hpp new file mode 100644 index 0000000000..85b54c255f --- /dev/null +++ b/tests/ObjectDetectionCommon.hpp @@ -0,0 +1,49 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// SPDX-License-Identifier: MIT +// +#pragma once + +#include +#include + +namespace +{ + +struct BoundingBox +{ + BoundingBox() + : BoundingBox(0.0f, 0.0f, 0.0f, 0.0f) + {} + + BoundingBox(float xMin, float yMin, float xMax, float yMax) + : m_XMin(xMin) + , m_YMin(yMin) + , m_XMax(xMax) + , m_YMax(yMax) + {} + + float m_XMin; + float m_YMin; + float m_XMax; + float m_YMax; +}; + +struct DetectedObject +{ + DetectedObject(unsigned int detectedClass, + const BoundingBox& boundingBox, + float confidence) + : m_Class(detectedClass) + , m_BoundingBox(boundingBox) + , m_Confidence(confidence) + {} + + unsigned int m_Class; + BoundingBox m_BoundingBox; + float m_Confidence; +}; + +using ObjectDetectionInput = std::pair; + +} // anonymous namespace \ No newline at end of file diff --git a/tests/TfLiteMobileNetSsd-Armnn/TfLiteMobileNetSsd-Armnn.cpp b/tests/TfLiteMobileNetSsd-Armnn/TfLiteMobileNetSsd-Armnn.cpp new file mode 100644 index 0000000000..b1bc0f6120 --- /dev/null +++ b/tests/TfLiteMobileNetSsd-Armnn/TfLiteMobileNetSsd-Armnn.cpp @@ -0,0 +1,76 @@ +// +// Copyright © 2017 Arm Ltd. All rights reserved. +// SPDX-License-Identifier: MIT +// +#include "../MobileNetSsdInferenceTest.hpp" + +#include "armnnTfLiteParser/ITfLiteParser.hpp" + +#include +#include + +using namespace armnnTfLiteParser; + +int main(int argc, char* argv[]) +{ + int retVal = EXIT_FAILURE; + try + { + using DataType = float; + using Parser = armnnTfLiteParser::ITfLiteParser; + using Model = InferenceModel; + + armnn::TensorShape inputTensorShape({ 1, 300, 300, 3 }); + + std::vector inputLayerNames = + { + "normalized_input_image_tensor" + }; + + std::vector outputLayerNames = + { + "TFLite_Detection_PostProcess", + "TFLite_Detection_PostProcess:1", + "TFLite_Detection_PostProcess:2", + "TFLite_Detection_PostProcess:3" + }; + + retVal = InferenceTestMain(argc, argv, { 0 }, + [&inputTensorShape, inputLayerNames, outputLayerNames]() + { + return make_unique>( + [&] + (typename Model::CommandLineOptions modelOptions) + { + if (!ValidateDirectory(modelOptions.m_ModelDir)) + { + return std::unique_ptr(); + } + + typename Model::Params modelParams; + modelParams.m_ModelPath = + modelOptions.m_ModelDir + "ssd_mobilenet_v1.tflite"; + + std::copy(inputLayerNames.begin(), inputLayerNames.end(), + std::back_inserter(modelParams.m_InputBindings)); + + std::copy(outputLayerNames.begin(), outputLayerNames.end(), + std::back_inserter(modelParams.m_OutputBindings)); + + modelParams.m_InputShapes = { inputTensorShape }; + modelParams.m_IsModelBinary = true; + modelParams.m_ComputeDevice = modelOptions.m_ComputeDevice; + modelParams.m_VisualizePostOptimizationModel = modelOptions.m_VisualizePostOptimizationModel; + modelParams.m_EnableFp16TurboMode = modelOptions.m_EnableFp16TurboMode; + + return std::make_unique(modelParams); + }); + }); + } + catch (const std::exception& e) + { + std::cerr << "WARNING: " << *argv << ": An error has occurred when running " + "the classifier inference tests: " << e.what() << std::endl; + } + return retVal; +} -- cgit v1.2.1