diff options
author | Jim Flynn <jim.flynn@arm.com> | 2022-10-14 11:20:07 +0100 |
---|---|---|
committer | TeresaARM <teresa.charlinreyes@arm.com> | 2022-10-14 14:58:27 +0000 |
commit | fcc72f53c56683fe697ac23662c49af09048a428 (patch) | |
tree | 92f903de70015a03e8748b0af160af34ceec4d22 /tests/ExecuteNetwork | |
parent | 16c76d5db629d3ef7e4cb143bfa7e1d717e1d492 (diff) | |
download | armnn-fcc72f53c56683fe697ac23662c49af09048a428.tar.gz |
IVGCVSW-7267 Make the AllowExpandedDims option work
Signed-off-by: Jim Flynn <jim.flynn@arm.com>
Change-Id: I3573078206272c3a72a2b3acf8781ab458ea6c90
Diffstat (limited to 'tests/ExecuteNetwork')
-rw-r--r-- | tests/ExecuteNetwork/ArmNNExecutor.cpp | 15 |
1 files changed, 15 insertions, 0 deletions
diff --git a/tests/ExecuteNetwork/ArmNNExecutor.cpp b/tests/ExecuteNetwork/ArmNNExecutor.cpp index 4d63b4890e..797c09a1b2 100644 --- a/tests/ExecuteNetwork/ArmNNExecutor.cpp +++ b/tests/ExecuteNetwork/ArmNNExecutor.cpp @@ -514,6 +514,7 @@ armnn::IOptimizedNetworkPtr ArmNNExecutor::OptimizeNetwork(armnn::INetwork* netw armnn::ShapeInferenceMethod::InferAndValidate : armnn::ShapeInferenceMethod::ValidateOnly; options.m_ProfilingEnabled = m_Params.m_EnableProfiling; + options.m_AllowExpandedDims = m_Params.m_AllowExpandedDims; armnn::BackendOptions gpuAcc("GpuAcc", { @@ -530,6 +531,19 @@ armnn::IOptimizedNetworkPtr ArmNNExecutor::OptimizeNetwork(armnn::INetwork* netw }); options.m_ModelOptions.push_back(gpuAcc); options.m_ModelOptions.push_back(cpuAcc); + // The shapeInferenceMethod and allowExpandedDims values have to be added to the model options + // because these are what are passed to the OptimizeSubgraphViews method and are used to create + // the new optimized INetwork that method uses + armnn::BackendOptions allowExDimOpt("AllowExpandedDims", + { + { "AllowExpandedDims", m_Params.m_AllowExpandedDims } + }); + options.m_ModelOptions.push_back(allowExDimOpt); + armnn::BackendOptions shapeInferOpt("ShapeInferenceMethod", + { + { "InferAndValidate", m_Params.m_InferOutputShape } + }); + options.m_ModelOptions.push_back(shapeInferOpt); const auto optimization_start_time = armnn::GetTimeNow(); optNet = armnn::Optimize(*network, m_Params.m_ComputeDevices, m_Runtime->GetDeviceSpec(), options); @@ -758,6 +772,7 @@ ArmNNExecutor::TfliteParser::TfliteParser(const ExecuteNetworkParams& params) armnnTfLiteParser::ITfLiteParser::TfLiteParserOptions options; options.m_StandInLayerForUnsupported = params.m_ParseUnsupported; options.m_InferAndValidate = params.m_InferOutputShape; + options.m_AllowExpandedDims = params.m_AllowExpandedDims; m_Parser = armnnTfLiteParser::ITfLiteParser::Create(options); } |