From f806c4d075814a9dc9d206a4db123d3060ad7ebd Mon Sep 17 00:00:00 2001 From: Finn Williams Date: Mon, 22 Feb 2021 15:13:12 +0000 Subject: IVGCVSW-5612 Fix tiny_wav2letter_relu_fixed_int8 delegate output * fix delegate perchannel quantization * change delegate to check reshape options before inputs * Add int8 "qsymms8" option to ExecuteNetwork * Add option to run ExecuteNetwork on tflite w/o delegate !referencetests:301301 Signed-off-by: Finn Williams Change-Id: If3e12599b17aff1199d7ab0a55e1c901e480083d --- .../ExecuteNetworkProgramOptions.cpp | 48 +++++++++++++++++++--- 1 file changed, 43 insertions(+), 5 deletions(-) (limited to 'tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp') diff --git a/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp b/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp index ba28dd0173..62057eaef2 100644 --- a/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp +++ b/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp @@ -177,10 +177,6 @@ ProgramOptions::ProgramOptions() : m_CxxOptions{"ExecuteNetwork", "tensorflow-text.", cxxopts::value()) - ("D,armnn-tflite-delegate", - "enable Arm NN TfLite delegate", - cxxopts::value(m_ExNetParams.m_EnableDelegate)->default_value("false")->implicit_value("true")) - ("m,model-path", "Path to model file, e.g. .armnn, .caffemodel, .prototxt, .tflite, .onnx", cxxopts::value(m_ExNetParams.m_ModelPath)) @@ -271,7 +267,19 @@ ProgramOptions::ProgramOptions() : m_CxxOptions{"ExecuteNetwork", "The type of the output tensors in the network separated by comma. " "If unset, defaults to \"float\" for all defined outputs. " "Accepted values (float, int or qasymm8).", - cxxopts::value()); + cxxopts::value()) + + ("T,tflite-executor", + "Set the executor for the tflite model: parser, delegate, tflite" + "parser is the ArmNNTfLiteParser, " + "delegate is the ArmNNTfLiteDelegate, " + "tflite is the TfliteInterpreter", + cxxopts::value()->default_value("parser")) + + ("D,armnn-tflite-delegate", + "Enable Arm NN TfLite delegate. " + "This option is depreciated please use tflite-executor instead", + cxxopts::value(m_ExNetParams.m_EnableDelegate)->default_value("false")->implicit_value("true")); m_CxxOptions.add_options("c) Optimization") ("bf16-turbo-mode", @@ -409,6 +417,36 @@ void ProgramOptions::ParseOptions(int ac, const char* av[]) m_ExNetParams.m_InputTensorDataFilePaths.empty(); m_ExNetParams.m_DynamicBackendsPath = m_RuntimeOptions.m_DynamicBackendsPath; + + std::string tfliteExecutor = GetOptionValue("tflite-executor", m_CxxResult); + + if (tfliteExecutor.size() == 0 || tfliteExecutor == "parser") + { + m_ExNetParams.m_TfLiteExecutor = ExecuteNetworkParams::TfLiteExecutor::ArmNNTfLiteParser; + } + else if (tfliteExecutor == "delegate") + { + m_ExNetParams.m_TfLiteExecutor = ExecuteNetworkParams::TfLiteExecutor::ArmNNTfLiteDelegate; + } + else if (tfliteExecutor == "tflite") + { + m_ExNetParams.m_TfLiteExecutor = ExecuteNetworkParams::TfLiteExecutor::TfliteInterpreter; + } + else + { + ARMNN_LOG(info) << fmt::format("Invalid tflite-executor option '{}'.", tfliteExecutor); + throw armnn::InvalidArgumentException ("Invalid tflite-executor option"); + } + + if (m_ExNetParams.m_EnableDelegate) + { + m_ExNetParams.m_TfLiteExecutor = ExecuteNetworkParams::TfLiteExecutor::ArmNNTfLiteDelegate; + ARMNN_LOG(info) << fmt::format("armnn-tflite-delegate option is being depreciated, " + "please use tflite-executor instead."); + } + + + // Parse input tensor shape from the string we got from the command-line. std::vector inputTensorShapesVector = ParseStringList(GetOptionValue("input-tensor-shape", m_CxxResult), ":"); -- cgit v1.2.1