diff options
Diffstat (limited to 'tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp')
-rw-r--r-- | tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp | 12 |
1 files changed, 10 insertions, 2 deletions
diff --git a/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp b/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp index 5c1337f769..87b38c5f78 100644 --- a/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp +++ b/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp @@ -216,7 +216,8 @@ ProgramOptions::ProgramOptions() : m_CxxOptions{"ExecuteNetwork", ("m,model-path", "Path to model file, e.g. .armnn, .tflite, .onnx. " - "DEPRECATED: .pb and .prototxt model files no longer load and are deprecated.", + "DEPRECATED: .pb and .prototxt model files no longer loaded and are deprecated." + "DEPRECATED: .onnx model files will no longer loaded from 24.08 onwards.", cxxopts::value<std::string>(m_ExNetParams.m_ModelPath)); m_CxxOptions.add_options("b) Ordering") @@ -237,7 +238,8 @@ ProgramOptions::ProgramOptions() : m_CxxOptions{"ExecuteNetwork", cxxopts::value<std::string>(m_RuntimeOptions.m_DynamicBackendsPath)) ("P, thread-pool-size", - "Run the network using the Arm NN thread pool with the number of threads provided. ", + "Run the network using the Arm NN thread pool with the number of threads provided. " + "DECRECATED: The asynchronous execution interface will be removed in 24.08", cxxopts::value<size_t>(m_ExNetParams.m_ThreadPoolSize)->default_value("0")) ("d,input-tensor-data", @@ -351,6 +353,12 @@ ProgramOptions::ProgramOptions() : m_CxxOptions{"ExecuteNetwork", "Perform an per byte root mean square error calculation of the output of the inference with" " the tflite ref model.", cxxopts::value<bool>(m_ExNetParams.m_CompareWithTflite)->default_value("false") + ->implicit_value("true")) + ("serialize-to-armnn", + "Serialize the loaded network to an .armnn file. This option will also serialize the optimized network" + " in dot format. This option only works with both the TfLite parser and the Arm NN serializer" + " enabled in the build. An inference will NOT be executed.", + cxxopts::value<bool>(m_ExNetParams.m_SerializeToArmNN)->default_value("false") ->implicit_value("true")); m_CxxOptions.add_options("d) Optimization") |