// // Copyright © 2017 Arm Ltd. All rights reserved. // SPDX-License-Identifier: MIT // #include "../MobileNetSsdInferenceTest.hpp" #include "armnnTfLiteParser/ITfLiteParser.hpp" #include #include using namespace armnnTfLiteParser; int main(int argc, char* argv[]) { int retVal = EXIT_FAILURE; try { using DataType = float; using Parser = armnnTfLiteParser::ITfLiteParser; using Model = InferenceModel; armnn::TensorShape inputTensorShape({ 1, 300, 300, 3 }); std::vector inputLayerNames = { "normalized_input_image_tensor" }; std::vector outputLayerNames = { "TFLite_Detection_PostProcess", "TFLite_Detection_PostProcess:1", "TFLite_Detection_PostProcess:2", "TFLite_Detection_PostProcess:3" }; retVal = InferenceTestMain(argc, argv, { 0 }, [&inputTensorShape, inputLayerNames, outputLayerNames]() { return make_unique>( [&] (const InferenceTestOptions& commonOptions, typename Model::CommandLineOptions modelOptions) { if (!ValidateDirectory(modelOptions.m_ModelDir)) { return std::unique_ptr(); } typename Model::Params modelParams; modelParams.m_ModelPath = modelOptions.m_ModelDir + "ssd_mobilenet_v1.tflite"; std::copy(inputLayerNames.begin(), inputLayerNames.end(), std::back_inserter(modelParams.m_InputBindings)); std::copy(outputLayerNames.begin(), outputLayerNames.end(), std::back_inserter(modelParams.m_OutputBindings)); modelParams.m_InputShapes = { inputTensorShape }; modelParams.m_IsModelBinary = true; modelParams.m_ComputeDevices = modelOptions.GetComputeDevicesAsBackendIds(); modelParams.m_VisualizePostOptimizationModel = modelOptions.m_VisualizePostOptimizationModel; modelParams.m_EnableFp16TurboMode = modelOptions.m_EnableFp16TurboMode; return std::make_unique(modelParams, commonOptions.m_EnableProfiling, commonOptions.m_DynamicBackendsPath); }); }); } catch (const std::exception& e) { std::cerr << "WARNING: " << *argv << ": An error has occurred when running " "the classifier inference tests: " << e.what() << std::endl; } return retVal; }