diff options
author | Jim Flynn <jim.flynn@arm.com> | 2022-10-14 11:20:07 +0100 |
---|---|---|
committer | TeresaARM <teresa.charlinreyes@arm.com> | 2022-10-14 14:58:27 +0000 |
commit | fcc72f53c56683fe697ac23662c49af09048a428 (patch) | |
tree | 92f903de70015a03e8748b0af160af34ceec4d22 | |
parent | 16c76d5db629d3ef7e4cb143bfa7e1d717e1d492 (diff) | |
download | armnn-fcc72f53c56683fe697ac23662c49af09048a428.tar.gz |
IVGCVSW-7267 Make the AllowExpandedDims option work
Signed-off-by: Jim Flynn <jim.flynn@arm.com>
Change-Id: I3573078206272c3a72a2b3acf8781ab458ea6c90
-rw-r--r-- | include/armnn/INetwork.hpp | 14 | ||||
-rw-r--r-- | include/armnn/backends/OptimizationViews.hpp | 4 | ||||
-rw-r--r-- | src/armnn/Graph.hpp | 4 | ||||
-rw-r--r-- | src/armnn/Network.cpp | 6 | ||||
-rw-r--r-- | src/armnn/Network.hpp | 2 | ||||
-rw-r--r-- | tests/ExecuteNetwork/ArmNNExecutor.cpp | 15 |
6 files changed, 34 insertions, 11 deletions
diff --git a/include/armnn/INetwork.hpp b/include/armnn/INetwork.hpp index fefb2ebc2d..0289a90e71 100644 --- a/include/armnn/INetwork.hpp +++ b/include/armnn/INetwork.hpp @@ -135,6 +135,7 @@ struct OptimizerOptions , m_ModelOptions() , m_ProfilingEnabled(false) , m_ExportEnabled(false) + , m_AllowExpandedDims(false) {} OptimizerOptions(bool reduceFp32ToFp16, bool debug, bool reduceFp32ToBf16, bool importEnabled, @@ -147,6 +148,7 @@ struct OptimizerOptions , m_ModelOptions(modelOptions) , m_ProfilingEnabled(false) , m_ExportEnabled(exportEnabled) + , m_AllowExpandedDims(false) { if (m_ReduceFp32ToFp16 && m_ReduceFp32ToBf16) { @@ -156,7 +158,8 @@ struct OptimizerOptions OptimizerOptions(bool reduceFp32ToFp16, bool debug, bool reduceFp32ToBf16 = false, ShapeInferenceMethod shapeInferenceMethod = armnn::ShapeInferenceMethod::ValidateOnly, - bool importEnabled = false, ModelOptions modelOptions = {}, bool exportEnabled = false) + bool importEnabled = false, ModelOptions modelOptions = {}, bool exportEnabled = false, + bool allowExpandedDims = false) : m_ReduceFp32ToFp16(reduceFp32ToFp16) , m_Debug(debug) , m_ReduceFp32ToBf16(reduceFp32ToBf16) @@ -165,6 +168,7 @@ struct OptimizerOptions , m_ModelOptions(modelOptions) , m_ProfilingEnabled(false) , m_ExportEnabled(exportEnabled) + , m_AllowExpandedDims(allowExpandedDims) { if (m_ReduceFp32ToFp16 && m_ReduceFp32ToBf16) { @@ -184,6 +188,7 @@ struct OptimizerOptions stream << "\tImportEnabled: " << m_ImportEnabled << "\n"; stream << "\tExportEnabled: " << m_ExportEnabled << "\n"; stream << "\tProfilingEnabled: " << m_ProfilingEnabled << "\n"; + stream << "\tAllowExpandedDims: " << m_AllowExpandedDims << "\n"; stream << "\tModelOptions: \n"; for (auto optionsGroup : m_ModelOptions) @@ -231,6 +236,9 @@ struct OptimizerOptions // Enable Export bool m_ExportEnabled; + + // When calculating tensor sizes dimensions of size == 1 will be ignored + bool m_AllowExpandedDims; }; class IWorkloadFactory; @@ -246,8 +254,8 @@ using CompiledBlobPtr = std::unique_ptr<void, CompiledBlobDeleter>; class INetwork { public: - static INetwork* CreateRaw(NetworkOptions networkOptions = {}); - static INetworkPtr Create(NetworkOptions networkOptions = {}); + static INetwork* CreateRaw(const NetworkOptions& networkOptions = {}); + static INetworkPtr Create(const NetworkOptions& networkOptions = {}); static void Destroy(INetwork* network); Status PrintGraph(); diff --git a/include/armnn/backends/OptimizationViews.hpp b/include/armnn/backends/OptimizationViews.hpp index a7714ee0fa..110c5f7492 100644 --- a/include/armnn/backends/OptimizationViews.hpp +++ b/include/armnn/backends/OptimizationViews.hpp @@ -13,7 +13,7 @@ namespace armnn class OptimizationViews { public: - OptimizationViews(NetworkOptions networkOptions = {}) : m_INetwork(INetwork::Create(networkOptions)) {} + OptimizationViews(const NetworkOptions& networkOptions = {}) : m_INetwork(INetwork::Create(networkOptions)) {} OptimizationViews(const OptimizationViews&) = delete; OptimizationViews& operator=(const OptimizationViews&) = delete; OptimizationViews(OptimizationViews&&) = default; @@ -72,7 +72,7 @@ private: /// INetworkPtr object used only as a container for any layer generated by the optimization process /// Also, can use to AddPrecompiledLayer to the SubstitutionPair /// Use in favour of m_Graph which depreciates in 23.08 - INetworkPtr m_INetwork = INetwork::Create();; + INetworkPtr m_INetwork = INetwork::Create(); }; } //namespace armnn diff --git a/src/armnn/Graph.hpp b/src/armnn/Graph.hpp index 482d9277e8..1b87751e9b 100644 --- a/src/armnn/Graph.hpp +++ b/src/armnn/Graph.hpp @@ -119,12 +119,12 @@ public: m_LayersInOrder = std::move(other.m_LayersInOrder); m_Views = std::move(other.m_Views); m_Profiler = std::move(other.m_Profiler); + m_AllowExpandedDims = other.m_AllowExpandedDims; + m_ShapeInferenceMethod = other.m_ShapeInferenceMethod; other.ForEachLayer([this](Layer* otherLayer) { otherLayer->Reparent(*this, m_Layers.end()); }); - m_AllowExpandedDims = other.m_AllowExpandedDims; - m_ShapeInferenceMethod = other.m_ShapeInferenceMethod; ARMNN_ASSERT(other.m_PosInGraphMap.empty()); ARMNN_ASSERT(other.m_Layers.empty()); diff --git a/src/armnn/Network.cpp b/src/armnn/Network.cpp index 5930805f11..1b1815f73d 100644 --- a/src/armnn/Network.cpp +++ b/src/armnn/Network.cpp @@ -442,12 +442,12 @@ void INetwork::ExecuteStrategy(IStrategy& strategy) const return pNetworkImpl->ExecuteStrategy(strategy); } -armnn::INetwork* INetwork::CreateRaw(NetworkOptions networkOptions) +armnn::INetwork* INetwork::CreateRaw(const NetworkOptions& networkOptions) { return new INetwork(networkOptions); } -armnn::INetworkPtr INetwork::Create(NetworkOptions networkOptions) +armnn::INetworkPtr INetwork::Create(const NetworkOptions& networkOptions) { return INetworkPtr(CreateRaw(networkOptions), &INetwork::Destroy); } @@ -1879,7 +1879,7 @@ bool NetworkImpl::GetAllowExpandedDims() return allowExpandedDims; } -NetworkImpl::NetworkImpl(NetworkOptions networkOptions) +NetworkImpl::NetworkImpl(const NetworkOptions& networkOptions) : m_NetworkOptions(networkOptions), m_Graph(std::make_unique<Graph>(GetShapeInferenceMethod(), GetAllowExpandedDims())) {} diff --git a/src/armnn/Network.hpp b/src/armnn/Network.hpp index 8bd56d367b..5ca16e2968 100644 --- a/src/armnn/Network.hpp +++ b/src/armnn/Network.hpp @@ -31,7 +31,7 @@ using NetworkImplPtr = std::unique_ptr<NetworkImpl, void (*)(NetworkImpl* networ class NetworkImpl { public: - NetworkImpl(NetworkOptions networkOptions = {}); + NetworkImpl(const NetworkOptions& networkOptions = {}); ~NetworkImpl(); const Graph& GetGraph() const diff --git a/tests/ExecuteNetwork/ArmNNExecutor.cpp b/tests/ExecuteNetwork/ArmNNExecutor.cpp index 4d63b4890e..797c09a1b2 100644 --- a/tests/ExecuteNetwork/ArmNNExecutor.cpp +++ b/tests/ExecuteNetwork/ArmNNExecutor.cpp @@ -514,6 +514,7 @@ armnn::IOptimizedNetworkPtr ArmNNExecutor::OptimizeNetwork(armnn::INetwork* netw armnn::ShapeInferenceMethod::InferAndValidate : armnn::ShapeInferenceMethod::ValidateOnly; options.m_ProfilingEnabled = m_Params.m_EnableProfiling; + options.m_AllowExpandedDims = m_Params.m_AllowExpandedDims; armnn::BackendOptions gpuAcc("GpuAcc", { @@ -530,6 +531,19 @@ armnn::IOptimizedNetworkPtr ArmNNExecutor::OptimizeNetwork(armnn::INetwork* netw }); options.m_ModelOptions.push_back(gpuAcc); options.m_ModelOptions.push_back(cpuAcc); + // The shapeInferenceMethod and allowExpandedDims values have to be added to the model options + // because these are what are passed to the OptimizeSubgraphViews method and are used to create + // the new optimized INetwork that method uses + armnn::BackendOptions allowExDimOpt("AllowExpandedDims", + { + { "AllowExpandedDims", m_Params.m_AllowExpandedDims } + }); + options.m_ModelOptions.push_back(allowExDimOpt); + armnn::BackendOptions shapeInferOpt("ShapeInferenceMethod", + { + { "InferAndValidate", m_Params.m_InferOutputShape } + }); + options.m_ModelOptions.push_back(shapeInferOpt); const auto optimization_start_time = armnn::GetTimeNow(); optNet = armnn::Optimize(*network, m_Params.m_ComputeDevices, m_Runtime->GetDeviceSpec(), options); @@ -758,6 +772,7 @@ ArmNNExecutor::TfliteParser::TfliteParser(const ExecuteNetworkParams& params) armnnTfLiteParser::ITfLiteParser::TfLiteParserOptions options; options.m_StandInLayerForUnsupported = params.m_ParseUnsupported; options.m_InferAndValidate = params.m_InferOutputShape; + options.m_AllowExpandedDims = params.m_AllowExpandedDims; m_Parser = armnnTfLiteParser::ITfLiteParser::Create(options); } |