From fd627ffaec8fd8801d980b4c91ee7c0607ab6aaf Mon Sep 17 00:00:00 2001 From: Jan Eilers Date: Thu, 25 Feb 2021 17:44:00 +0000 Subject: IVGCVSW-5687 Update Doxygen Docu * Update Doxygen Documentation for 21.02 release Signed-off-by: Jan Eilers Change-Id: I9ed2f9caab038836ea99d7b378d7899fe431a4e5 --- ...tf_lite_mobile_net_ssd-_armnn_8cpp_source.xhtml | 123 +++++++++++++++++++++ 1 file changed, 123 insertions(+) create mode 100644 21.02/_tf_lite_mobile_net_ssd-_armnn_8cpp_source.xhtml (limited to '21.02/_tf_lite_mobile_net_ssd-_armnn_8cpp_source.xhtml') diff --git a/21.02/_tf_lite_mobile_net_ssd-_armnn_8cpp_source.xhtml b/21.02/_tf_lite_mobile_net_ssd-_armnn_8cpp_source.xhtml new file mode 100644 index 0000000000..0b1c0b7333 --- /dev/null +++ b/21.02/_tf_lite_mobile_net_ssd-_armnn_8cpp_source.xhtml @@ -0,0 +1,123 @@ + + + + + + + + + + + + + +ArmNN: tests/TfLiteMobileNetSsd-Armnn/TfLiteMobileNetSsd-Armnn.cpp Source File + + + + + + + + + + + + + + + + +
+
+ + + + ArmNN + + + +
+
+  21.02 +
+
+
+ + + + + + + +
+
+ +
+
+
+ +
+ +
+
+ + +
+ +
+ +
+
+
TfLiteMobileNetSsd-Armnn.cpp
+
+
+Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 #include "../MobileNetSsdInferenceTest.hpp"
6 
8 
9 #include <algorithm>
10 #include <iterator>
11 
12 using namespace armnnTfLiteParser;
13 
14 int main(int argc, char* argv[])
15 {
16  int retVal = EXIT_FAILURE;
17  try
18  {
19  using DataType = float;
20  using Parser = armnnTfLiteParser::ITfLiteParser;
22 
23  armnn::TensorShape inputTensorShape({ 1, 300, 300, 3 });
24 
25  std::vector<const char*> inputLayerNames =
26  {
27  "normalized_input_image_tensor"
28  };
29 
30  std::vector<const char*> outputLayerNames =
31  {
32  "TFLite_Detection_PostProcess",
33  "TFLite_Detection_PostProcess:1",
34  "TFLite_Detection_PostProcess:2",
35  "TFLite_Detection_PostProcess:3"
36  };
37 
38  retVal = InferenceTestMain(argc, argv, { 0 },
39  [&inputTensorShape, inputLayerNames, outputLayerNames]()
40  {
41  return make_unique<MobileNetSsdTestCaseProvider<Model>>(
42  [&]
43  (const InferenceTestOptions& commonOptions,
44  typename Model::CommandLineOptions modelOptions)
45  {
46  if (!ValidateDirectory(modelOptions.m_ModelDir))
47  {
48  return std::unique_ptr<Model>();
49  }
50 
51  typename Model::Params modelParams;
52  modelParams.m_ModelPath =
53  modelOptions.m_ModelDir + "ssd_mobilenet_v1.tflite";
54 
55  std::copy(inputLayerNames.begin(), inputLayerNames.end(),
56  std::back_inserter(modelParams.m_InputBindings));
57 
58  std::copy(outputLayerNames.begin(), outputLayerNames.end(),
59  std::back_inserter(modelParams.m_OutputBindings));
60 
61  modelParams.m_InputShapes = { inputTensorShape };
62  modelParams.m_IsModelBinary = true;
63  modelParams.m_ComputeDevices = modelOptions.GetComputeDevicesAsBackendIds();
64  modelParams.m_VisualizePostOptimizationModel = modelOptions.m_VisualizePostOptimizationModel;
65  modelParams.m_EnableFp16TurboMode = modelOptions.m_EnableFp16TurboMode;
66 
67  return std::make_unique<Model>(modelParams,
68  commonOptions.m_EnableProfiling,
69  commonOptions.m_DynamicBackendsPath);
70  });
71  });
72  }
73  catch (const std::exception& e)
74  {
75  std::cerr << "WARNING: " << *argv << ": An error has occurred when running "
76  "the classifier inference tests: " << e.what() << std::endl;
77  }
78  return retVal;
79 }
+ + + + +
DataType
Definition: Types.hpp:32
+
int main(int argc, char *argv[])
+
bool ValidateDirectory(std::string &dir)
+ +
int InferenceTestMain(int argc, char *argv[], const std::vector< unsigned int > &defaultTestCaseIds, TConstructTestCaseProvider constructTestCaseProvider)
+
+
+ + + + -- cgit v1.2.1