aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMike Kelly <mike.kelly@arm.com>2020-11-17 11:41:38 +0000
committerTeresaARM <teresa.charlinreyes@arm.com>2020-11-17 12:57:52 +0000
commit1ac690a12d4952acb8c06b41f0492ed47524b1f1 (patch)
tree2a9e5c8c6e68215e4a579b9e47aac132439a5e8e
parent91c4171421633b3ff9764bd586f43137aef0ff1a (diff)
downloadarmnn-1ac690a12d4952acb8c06b41f0492ed47524b1f1.tar.gz
MLCE-278-IVGCVSW-5530 FusedActivation issues
* GetOverriddenDataType was returning incorrect quantization data * Optimized CpuAcc and GpuAcc SubGraphs fail validation on debug versions of ArmNN Signed-off-by: Mike Kelly <mike.kelly@arm.com> Change-Id: Ie97935cc2af67bd9aeebc94b63dafa458bd1aa8c
-rw-r--r--src/backends/aclCommon/ArmComputeSubgraphUtils.hpp37
-rw-r--r--src/backends/cl/ClBackend.cpp40
-rw-r--r--src/backends/neon/NeonBackend.cpp37
3 files changed, 71 insertions, 43 deletions
diff --git a/src/backends/aclCommon/ArmComputeSubgraphUtils.hpp b/src/backends/aclCommon/ArmComputeSubgraphUtils.hpp
index d969d5bc31..860d88df80 100644
--- a/src/backends/aclCommon/ArmComputeSubgraphUtils.hpp
+++ b/src/backends/aclCommon/ArmComputeSubgraphUtils.hpp
@@ -47,38 +47,17 @@ SubgraphView::OutputSlots CreateOutputsFrom(const std::vector<Layer*>& layers)
} // namespace
-inline const TensorInfo GetOverriddenDataType(const TensorInfo& info, Optional<DataType> type)
+inline void ReportUntouchedLayers(OptimizationViews& optimizationViews, std::map<LayerGuid, Layer*> untouched)
{
- if (!type)
+ std::vector<Layer*> untouchedVector;
+ for (const auto& pair : untouched)
{
- return info;
+ Layer* layer = pair.second;
+ SubgraphView subgraphView(CreateInputsFrom({layer}),
+ CreateOutputsFrom({layer}),
+ {layer});
+ optimizationViews.AddUntouchedSubgraph(std::move(subgraphView));
}
-
- return TensorInfo(info.GetShape(), type.value(), info.GetQuantizationScale(), info.GetQuantizationOffset());
-}
-
-inline armnn::Optional<armnn::DataType> GetOptionalBiasTypeFromWeightsType(armnn::Optional<armnn::DataType> weightsType)
-{
- if (!weightsType)
- {
- return weightsType;
- }
-
- switch(weightsType.value())
- {
- case armnn::DataType::BFloat16:
- case armnn::DataType::Float16:
- case armnn::DataType::Float32:
- return weightsType;
- case armnn::DataType::QAsymmS8:
- case armnn::DataType::QAsymmU8:
- case armnn::DataType::QSymmS8:
- case armnn::DataType::QSymmS16:
- return armnn::DataType::Signed32;
- default:
- ARMNN_ASSERT_MSG(false, "GetBiasTypeFromWeightsType(): Unsupported data type.");
- }
- return armnn::EmptyOptional();
}
template<typename LayerType>
diff --git a/src/backends/cl/ClBackend.cpp b/src/backends/cl/ClBackend.cpp
index 57a5851650..80c7969e91 100644
--- a/src/backends/cl/ClBackend.cpp
+++ b/src/backends/cl/ClBackend.cpp
@@ -148,7 +148,16 @@ OptimizationViews ClBackend::OptimizeSubgraphView(const SubgraphView& subgraph,
auto it = subgraph.end();
bool isFastMathEnabled = false;
+ std::map<LayerGuid, Layer*> untouched;
+ while (it != subgraph.begin())
+ {
+ --it;
+ Layer& base = **it;
+ untouched.insert({base.GetGuid(), &base});
+ }
+
+ it = subgraph.end();
#if defined(ARMCOMPUTECL_ENABLED)
IBackendInternal::IBackendSpecificModelContextPtr modelContextPtr = CreateBackendSpecificModelContext(modelOptions);
@@ -161,7 +170,6 @@ OptimizationViews ClBackend::OptimizeSubgraphView(const SubgraphView& subgraph,
}
}
#endif
-
while (it != subgraph.begin())
{
--it;
@@ -199,9 +207,7 @@ OptimizationViews ClBackend::OptimizeSubgraphView(const SubgraphView& subgraph,
if (baseLayer->GetParameters().m_BiasEnabled)
{
- biases = GetOverriddenDataType(baseLayer->m_Bias->GetTensorInfo(),
- GetOptionalBiasTypeFromWeightsType(
- baseLayer->m_Weight->GetTensorInfo().GetDataType()));
+ biases = baseLayer->m_Bias->GetTensorInfo();
}
arm_compute::Status status = ClConvolution2dWorkloadValidate(
@@ -220,6 +226,8 @@ OptimizationViews ClBackend::OptimizeSubgraphView(const SubgraphView& subgraph,
activationLayer,
activationDesc,
name);
+ untouched.erase(baseLayer->GetGuid());
+ untouched.erase(activationLayer->GetGuid());
}
}
else if (base.GetType() == LayerType::DepthwiseConvolution2d)
@@ -231,9 +239,7 @@ OptimizationViews ClBackend::OptimizeSubgraphView(const SubgraphView& subgraph,
if (baseLayer->GetParameters().m_BiasEnabled)
{
- biases = GetOverriddenDataType(baseLayer->m_Bias->GetTensorInfo(),
- GetOptionalBiasTypeFromWeightsType(
- baseLayer->m_Weight->GetTensorInfo().GetDataType()));
+ biases = baseLayer->m_Bias->GetTensorInfo();
}
arm_compute::Status status = ClDepthwiseConvolutionWorkloadValidate(
@@ -251,6 +257,8 @@ OptimizationViews ClBackend::OptimizeSubgraphView(const SubgraphView& subgraph,
activationLayer,
activationDesc,
name);
+ untouched.erase(baseLayer->GetGuid());
+ untouched.erase(activationLayer->GetGuid());
}
}
else if (base.GetType() == LayerType::FullyConnected)
@@ -272,6 +280,8 @@ OptimizationViews ClBackend::OptimizeSubgraphView(const SubgraphView& subgraph,
activationLayer,
activationDesc,
name);
+ untouched.erase(baseLayer->GetGuid());
+ untouched.erase(activationLayer->GetGuid());
}
}
else if (base.GetType() == LayerType::BatchNormalization)
@@ -302,6 +312,8 @@ OptimizationViews ClBackend::OptimizeSubgraphView(const SubgraphView& subgraph,
replacementLayer->m_Gamma = std::move(baseLayer->m_Gamma);
replacementLayer->m_Mean = std::move(baseLayer->m_Mean);
replacementLayer->m_Variance = std::move(baseLayer->m_Variance);
+ untouched.erase(baseLayer->GetGuid());
+ untouched.erase(activationLayer->GetGuid());
}
}
else if (base.GetType() == LayerType::Addition)
@@ -321,6 +333,8 @@ OptimizationViews ClBackend::OptimizeSubgraphView(const SubgraphView& subgraph,
activationLayer,
activationDesc,
name);
+ untouched.erase(baseLayer->GetGuid());
+ untouched.erase(activationLayer->GetGuid());
}
}
else if (base.GetType() == LayerType::Division)
@@ -340,6 +354,8 @@ OptimizationViews ClBackend::OptimizeSubgraphView(const SubgraphView& subgraph,
activationLayer,
activationDesc,
name);
+ untouched.erase(baseLayer->GetGuid());
+ untouched.erase(activationLayer->GetGuid());
}
}
else if (base.GetType() == LayerType::Multiplication)
@@ -359,6 +375,8 @@ OptimizationViews ClBackend::OptimizeSubgraphView(const SubgraphView& subgraph,
activationLayer,
activationDesc,
name);
+ untouched.erase(baseLayer->GetGuid());
+ untouched.erase(activationLayer->GetGuid());
}
}
else if (base.GetType() == LayerType::Subtraction)
@@ -378,6 +396,8 @@ OptimizationViews ClBackend::OptimizeSubgraphView(const SubgraphView& subgraph,
activationLayer,
activationDesc,
name);
+ untouched.erase(baseLayer->GetGuid());
+ untouched.erase(activationLayer->GetGuid());
}
}
}
@@ -386,11 +406,15 @@ OptimizationViews ClBackend::OptimizeSubgraphView(const SubgraphView& subgraph,
}
}
}
- // end each optimization
+
if (optimizationViews.GetSubstitutions().empty())
{
optimizationViews.AddUntouchedSubgraph(SubgraphView(subgraph));
}
+ else
+ {
+ ReportUntouchedLayers(optimizationViews, untouched);
+ }
return optimizationViews;
}
diff --git a/src/backends/neon/NeonBackend.cpp b/src/backends/neon/NeonBackend.cpp
index 150bc345db..240456f44b 100644
--- a/src/backends/neon/NeonBackend.cpp
+++ b/src/backends/neon/NeonBackend.cpp
@@ -136,11 +136,20 @@ OptimizationViews NeonBackend::OptimizeSubgraphView(const SubgraphView& subgraph
OptimizationViews optimizationViews;
auto it = subgraph.end();
+ std::map<LayerGuid, Layer*> untouched;
while (it != subgraph.begin())
{
--it;
Layer& base = **it;
+ untouched.insert({base.GetGuid(), &base});
+ }
+
+ it = subgraph.end();
+ while (it != subgraph.begin())
+ {
+ --it;
+ Layer& base = **it;
if ((base.GetType() == LayerType::DepthwiseConvolution2d || base.GetType() == LayerType::Convolution2d
|| base.GetType() == LayerType::BatchNormalization || base.GetType() == LayerType::FullyConnected
@@ -174,9 +183,7 @@ OptimizationViews NeonBackend::OptimizeSubgraphView(const SubgraphView& subgraph
if (baseLayer->GetParameters().m_BiasEnabled)
{
- biases = GetOverriddenDataType(baseLayer->m_Bias->GetTensorInfo(),
- GetOptionalBiasTypeFromWeightsType(
- baseLayer->m_Weight->GetTensorInfo().GetDataType()));
+ biases = baseLayer->m_Bias->GetTensorInfo();
}
arm_compute::Status status = NeonConvolution2dWorkloadValidate(
@@ -195,6 +202,8 @@ OptimizationViews NeonBackend::OptimizeSubgraphView(const SubgraphView& subgraph
activationLayer,
activationDesc,
name);
+ untouched.erase(baseLayer->GetGuid());
+ untouched.erase(activationLayer->GetGuid());
}
}
else if (base.GetType() == LayerType::DepthwiseConvolution2d)
@@ -206,9 +215,7 @@ OptimizationViews NeonBackend::OptimizeSubgraphView(const SubgraphView& subgraph
if (baseLayer->GetParameters().m_BiasEnabled)
{
- biases = GetOverriddenDataType(baseLayer->m_Bias->GetTensorInfo(),
- GetOptionalBiasTypeFromWeightsType(
- baseLayer->m_Weight->GetTensorInfo().GetDataType()));
+ biases = baseLayer->m_Bias->GetTensorInfo();
}
arm_compute::Status status = NeonDepthwiseConvolutionWorkloadValidate(
@@ -226,6 +233,8 @@ OptimizationViews NeonBackend::OptimizeSubgraphView(const SubgraphView& subgraph
activationLayer,
activationDesc,
name);
+ untouched.erase(baseLayer->GetGuid());
+ untouched.erase(activationLayer->GetGuid());
}
}
else if (base.GetType() == LayerType::FullyConnected)
@@ -247,6 +256,8 @@ OptimizationViews NeonBackend::OptimizeSubgraphView(const SubgraphView& subgraph
activationLayer,
activationDesc,
name);
+ untouched.erase(baseLayer->GetGuid());
+ untouched.erase(activationLayer->GetGuid());
}
}
else if (base.GetType() == LayerType::BatchNormalization)
@@ -278,6 +289,8 @@ OptimizationViews NeonBackend::OptimizeSubgraphView(const SubgraphView& subgraph
replacementLayer->m_Gamma = std::move(baseLayer->m_Gamma);
replacementLayer->m_Mean = std::move(baseLayer->m_Mean);
replacementLayer->m_Variance = std::move(baseLayer->m_Variance);
+ untouched.erase(baseLayer->GetGuid());
+ untouched.erase(activationLayer->GetGuid());
}
}
else if (base.GetType() == LayerType::Addition)
@@ -297,6 +310,8 @@ OptimizationViews NeonBackend::OptimizeSubgraphView(const SubgraphView& subgraph
activationLayer,
activationDesc,
name);
+ untouched.erase(baseLayer->GetGuid());
+ untouched.erase(activationLayer->GetGuid());
}
}
else if (base.GetType() == LayerType::Division)
@@ -316,6 +331,8 @@ OptimizationViews NeonBackend::OptimizeSubgraphView(const SubgraphView& subgraph
activationLayer,
activationDesc,
name);
+ untouched.erase(baseLayer->GetGuid());
+ untouched.erase(activationLayer->GetGuid());
}
}
else if (base.GetType() == LayerType::Multiplication)
@@ -335,6 +352,8 @@ OptimizationViews NeonBackend::OptimizeSubgraphView(const SubgraphView& subgraph
activationLayer,
activationDesc,
name);
+ untouched.erase(baseLayer->GetGuid());
+ untouched.erase(activationLayer->GetGuid());
}
}
else if (base.GetType() == LayerType::Subtraction)
@@ -354,6 +373,8 @@ OptimizationViews NeonBackend::OptimizeSubgraphView(const SubgraphView& subgraph
activationLayer,
activationDesc,
name);
+ untouched.erase(baseLayer->GetGuid());
+ untouched.erase(activationLayer->GetGuid());
}
}
}
@@ -367,6 +388,10 @@ OptimizationViews NeonBackend::OptimizeSubgraphView(const SubgraphView& subgraph
{
optimizationViews.AddUntouchedSubgraph(SubgraphView(subgraph));
}
+ else
+ {
+ ReportUntouchedLayers(optimizationViews, untouched);
+ }
return optimizationViews;
}