diff options
Diffstat (limited to 'tests/ExecuteNetwork')
-rw-r--r-- | tests/ExecuteNetwork/ExecuteNetwork.cpp | 1 | ||||
-rw-r--r-- | tests/ExecuteNetwork/ExecuteNetworkParams.cpp | 21 | ||||
-rw-r--r-- | tests/ExecuteNetwork/ExecuteNetworkParams.hpp | 1 | ||||
-rw-r--r-- | tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp | 7 |
4 files changed, 30 insertions, 0 deletions
diff --git a/tests/ExecuteNetwork/ExecuteNetwork.cpp b/tests/ExecuteNetwork/ExecuteNetwork.cpp index ddabf3c11f..f0a3d0821e 100644 --- a/tests/ExecuteNetwork/ExecuteNetwork.cpp +++ b/tests/ExecuteNetwork/ExecuteNetwork.cpp @@ -389,6 +389,7 @@ int MainImpl(const ExecuteNetworkParams& params, // Creates an InferenceModel, which will parse the model and load it into an IRuntime. typename InferenceModel<TParser, TDataType>::Params inferenceModelParams; inferenceModelParams.m_ModelPath = params.m_ModelPath; + inferenceModelParams.m_AllowExpandedDims = params.m_AllowExpandedDims; inferenceModelParams.m_IsModelBinary = params.m_IsModelBinary; inferenceModelParams.m_ComputeDevices = params.m_ComputeDevices; inferenceModelParams.m_DynamicBackendsPath = params.m_DynamicBackendsPath; diff --git a/tests/ExecuteNetwork/ExecuteNetworkParams.cpp b/tests/ExecuteNetwork/ExecuteNetworkParams.cpp index b3d18cdfd1..cc75bb4323 100644 --- a/tests/ExecuteNetwork/ExecuteNetworkParams.cpp +++ b/tests/ExecuteNetwork/ExecuteNetworkParams.cpp @@ -232,6 +232,11 @@ void ExecuteNetworkParams::ValidateParams() { ARMNN_LOG(warning) << "No input files provided, input tensors will be filled with 0s."; } + + if (m_AllowExpandedDims && m_InferOutputShape) + { + throw armnn::InvalidArgumentException("infer-output-shape and allow-expanded-dims cannot be used together."); + } } #if defined(ARMNN_TFLITE_DELEGATE) @@ -277,6 +282,22 @@ armnnDelegate::DelegateOptions ExecuteNetworkParams::ToDelegateOptions() const options.m_ModelOptions.push_back(gpuAcc); options.m_ModelOptions.push_back(cpuAcc); + if (m_InferOutputShape) + { + armnn::BackendOptions networkOption("ShapeInferenceMethod", + { + {"InferAndValidate", true} + }); + options.m_ModelOptions.push_back(networkOption); + } + if (m_AllowExpandedDims) + { + armnn::BackendOptions networkOption("AllowExpandedDims", + { + {"AllowExpandedDims", true} + }); + options.m_ModelOptions.push_back(networkOption); + } delegateOptions.SetOptimizerOptions(options); // If v,visualize-optimized-model is enabled then construct a file name for the dot file. diff --git a/tests/ExecuteNetwork/ExecuteNetworkParams.hpp b/tests/ExecuteNetwork/ExecuteNetworkParams.hpp index 04a073311d..5ef2b6ea7c 100644 --- a/tests/ExecuteNetwork/ExecuteNetworkParams.hpp +++ b/tests/ExecuteNetwork/ExecuteNetworkParams.hpp @@ -25,6 +25,7 @@ struct ExecuteNetworkParams TfliteInterpreter }; + bool m_AllowExpandedDims; std::string m_CachedNetworkFilePath; std::vector<armnn::BackendId> m_ComputeDevices; bool m_Concurrent; diff --git a/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp b/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp index c84c79ea78..ad35092c1d 100644 --- a/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp +++ b/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp @@ -228,6 +228,13 @@ ProgramOptions::ProgramOptions() : m_CxxOptions{"ExecuteNetwork", "parser)", cxxopts::value<bool>(m_ExNetParams.m_InferOutputShape)->default_value("false")->implicit_value("true")) + ("allow-expanded-dims", + "If true will disregard dimensions with a size of 1 when validating tensor shapes. Tensor sizes must " + "still match. This is an Experimental parameter that is incompatible with infer-output-shape. " + "This parameter may be removed in a later update. ", + cxxopts::value<bool>(m_ExNetParams.m_AllowExpandedDims)->default_value("false") + ->implicit_value("true")) + ("iterations", "Number of iterations to run the network for, default is set to 1. " "If you wish to run the model with different input data for every execution you can do so by " |