aboutsummaryrefslogtreecommitdiff
path: root/tests/InferenceModel.hpp
diff options
context:
space:
mode:
authorMike Kelly <mike.kelly@arm.com>2022-05-16 23:10:42 +0100
committerRyan OShea <ryan.oshea3@arm.com>2022-05-19 11:06:34 +0100
commit21fe06fad6760a0d453f2de9c8dd790983ae940c (patch)
treebad2f314defadd4b340343d99b6e157b46622039 /tests/InferenceModel.hpp
parentb5e03cc39cdabc49bf117c119073f60e9d36a474 (diff)
downloadarmnn-21fe06fad6760a0d453f2de9c8dd790983ae940c.tar.gz
IVGCVSW-6929 Support for models with implicit expanded
dimensions * Added allow-expanded-dims to TFLite parser and ArmNN delegate * If true ArmNN will disregard dimensions with a size of 1 when validating tensor shapes. Tensor sizes must still match. * This allows us to support models where tensors have expanded dimensions (i.e. extra dimensions with a size of 1). * Fixed bug in Network where it assumed that only the first option could be ShapeInferenceMethod. * Fixed bug where m_ShapeInferenceMethod was lost when copying or moving Graphs. * Changed Delegate to pass "infer-output-shape", "allow-expanded-dims" and other BackendOptions through to the Network during construction. Signed-off-by: Mike Kelly <mike.kelly@arm.com> Change-Id: Ibe7c5ae6597796fc9164cb07bd372bd7f8f8cacf
Diffstat (limited to 'tests/InferenceModel.hpp')
-rw-r--r--tests/InferenceModel.hpp3
1 files changed, 3 insertions, 0 deletions
diff --git a/tests/InferenceModel.hpp b/tests/InferenceModel.hpp
index e2a1a97568..93716e1a6f 100644
--- a/tests/InferenceModel.hpp
+++ b/tests/InferenceModel.hpp
@@ -95,6 +95,7 @@ struct Params
std::vector<armnn::BackendId> m_ComputeDevices;
std::string m_DynamicBackendsPath;
size_t m_SubgraphId;
+ bool m_AllowExpandedDims;
bool m_IsModelBinary;
bool m_VisualizePostOptimizationModel;
bool m_EnableFp16TurboMode;
@@ -117,6 +118,7 @@ struct Params
Params()
: m_ComputeDevices{}
, m_SubgraphId(0)
+ , m_AllowExpandedDims(false)
, m_IsModelBinary(true)
, m_VisualizePostOptimizationModel(false)
, m_EnableFp16TurboMode(false)
@@ -268,6 +270,7 @@ public:
// Create a network from a file on disk
IParser::TfLiteParserOptions options;
+ options.m_AllowExpandedDims = params.m_AllowExpandedDims;
options.m_StandInLayerForUnsupported = params.m_ParseUnsupported;
options.m_InferAndValidate = params.m_InferOutputShape;
auto parser(IParser::Create(options));