5 #include "../MobileNetSsdInferenceTest.hpp" 14 int main(
int argc,
char* argv[])
16 int retVal = EXIT_FAILURE;
25 std::vector<const char*> inputLayerNames =
27 "normalized_input_image_tensor" 30 std::vector<const char*> outputLayerNames =
32 "TFLite_Detection_PostProcess",
33 "TFLite_Detection_PostProcess:1",
34 "TFLite_Detection_PostProcess:2",
35 "TFLite_Detection_PostProcess:3" 39 [&inputTensorShape, inputLayerNames, outputLayerNames]()
41 return make_unique<MobileNetSsdTestCaseProvider<Model>>(
44 typename Model::CommandLineOptions modelOptions)
48 return std::unique_ptr<Model>();
51 typename Model::Params modelParams;
52 modelParams.m_ModelPath =
53 modelOptions.m_ModelDir +
"ssd_mobilenet_v1.tflite";
55 std::copy(inputLayerNames.begin(), inputLayerNames.end(),
56 std::back_inserter(modelParams.m_InputBindings));
58 std::copy(outputLayerNames.begin(), outputLayerNames.end(),
59 std::back_inserter(modelParams.m_OutputBindings));
61 modelParams.m_InputShapes = { inputTensorShape };
62 modelParams.m_IsModelBinary =
true;
63 modelParams.m_ComputeDevices = modelOptions.GetComputeDevicesAsBackendIds();
64 modelParams.m_VisualizePostOptimizationModel = modelOptions.m_VisualizePostOptimizationModel;
65 modelParams.m_EnableFp16TurboMode = modelOptions.m_EnableFp16TurboMode;
67 return std::make_unique<Model>(modelParams,
68 commonOptions.m_EnableProfiling,
69 commonOptions.m_DynamicBackendsPath);
73 catch (
const std::exception& e)
75 std::cerr <<
"WARNING: " << *argv <<
": An error has occurred when running " 76 "the classifier inference tests: " << e.what() << std::endl;
int main(int argc, char *argv[])
bool ValidateDirectory(std::string &dir)
int InferenceTestMain(int argc, char *argv[], const std::vector< unsigned int > &defaultTestCaseIds, TConstructTestCaseProvider constructTestCaseProvider)