aboutsummaryrefslogtreecommitdiff
path: root/src/armnnTfLiteParser/TfLiteParser.cpp
diff options
context:
space:
mode:
authorMike Kelly <mike.kelly@arm.com>2022-05-16 23:10:42 +0100
committerTeresaARM <teresa.charlinreyes@arm.com>2022-05-18 14:19:19 +0000
commit80512b0e1e98cb6782f5526fc8308ae6a571cd59 (patch)
treebad2f314defadd4b340343d99b6e157b46622039 /src/armnnTfLiteParser/TfLiteParser.cpp
parentd0420cb62b71f9c015d2b0e889011899051e56c0 (diff)
downloadarmnn-80512b0e1e98cb6782f5526fc8308ae6a571cd59.tar.gz
IVGCVSW-6929 Support for models with implicit expanded
dimensions * Added allow-expanded-dims to TFLite parser and ArmNN delegate * If true ArmNN will disregard dimensions with a size of 1 when validating tensor shapes. Tensor sizes must still match. * This allows us to support models where tensors have expanded dimensions (i.e. extra dimensions with a size of 1). * Fixed bug in Network where it assumed that only the first option could be ShapeInferenceMethod. * Fixed bug where m_ShapeInferenceMethod was lost when copying or moving Graphs. * Changed Delegate to pass "infer-output-shape", "allow-expanded-dims" and other BackendOptions through to the Network during construction. Signed-off-by: Mike Kelly <mike.kelly@arm.com> Change-Id: Ibe7c5ae6597796fc9164cb07bd372bd7f8f8cacf
Diffstat (limited to 'src/armnnTfLiteParser/TfLiteParser.cpp')
-rw-r--r--src/armnnTfLiteParser/TfLiteParser.cpp25
1 files changed, 18 insertions, 7 deletions
diff --git a/src/armnnTfLiteParser/TfLiteParser.cpp b/src/armnnTfLiteParser/TfLiteParser.cpp
index aa07f7b3f9..49f1f9f856 100644
--- a/src/armnnTfLiteParser/TfLiteParser.cpp
+++ b/src/armnnTfLiteParser/TfLiteParser.cpp
@@ -793,16 +793,27 @@ INetworkPtr TfLiteParserImpl::CreateNetworkFromModel()
using NetworkOptions = std::vector<BackendOptions>;
NetworkOptions networkOptions = {};
- if (m_Options && m_Options.value().m_InferAndValidate)
+ if (m_Options)
{
- BackendOptions shapeInferenceMethodOption("ShapeInferenceMethod",
- {
- { "InferAndValidate", true }
- });
+ if (m_Options.value().m_InferAndValidate)
+ {
+ BackendOptions shapeInferenceMethodOption("ShapeInferenceMethod",
+ {
+ { "InferAndValidate", true }
+ });
- networkOptions.push_back(shapeInferenceMethodOption);
- }
+ networkOptions.push_back(shapeInferenceMethodOption);
+ }
+ if (m_Options.value().m_AllowExpandedDims)
+ {
+ BackendOptions shapeInferenceMethodOption("AllowExpandedDims",
+ {
+ { "AllowExpandedDims", true }
+ });
+ networkOptions.push_back(shapeInferenceMethodOption);
+ }
+ }
m_Network = INetwork::Create(networkOptions);
ARMNN_ASSERT(m_Model.get() != nullptr);