aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorAndre Ghattas <andre.ghattas@arm.com>2019-08-07 12:18:38 +0100
committerMatteo Martincigh <matteo.martincigh@arm.com>2019-09-04 09:41:41 +0000
commit23ae2eae1caefba4948e6afda154a66238b26c2a (patch)
tree084b6e7b45add57a363826d1088c7821fe93e9e9 /src
parent9bb51d7c3668f6b2715735f286ffd89b727d6805 (diff)
downloadarmnn-23ae2eae1caefba4948e6afda154a66238b26c2a.tar.gz
IVGCVBENCH-1337 Added additional layer parameters to dot file and -v option
* Generic layer parameters now show up in dot file * Convolution layer parameters have also been added to dot file * ExecucteNetwork has an additional -v flag which generated dot file if there Change-Id: I210bb19b45384eb3639b7e488c7a89049fa6f18d Signed-off-by: Andre Ghattas <andre.ghattas@arm.com> Signed-off-by: Szilard Papp <szilard.papp@arm.com>
Diffstat (limited to 'src')
-rw-r--r--src/armnn/Layer.cpp19
-rw-r--r--src/armnn/Layer.hpp2
-rw-r--r--src/armnn/SerializeLayerParameters.cpp2
-rw-r--r--src/armnn/layers/Convolution2dLayer.cpp23
-rw-r--r--src/armnn/layers/Convolution2dLayer.hpp3
-rw-r--r--src/armnn/layers/DepthwiseConvolution2dLayer.cpp24
-rw-r--r--src/armnn/layers/DepthwiseConvolution2dLayer.hpp2
-rw-r--r--src/armnn/layers/LayerWithParameters.hpp3
-rw-r--r--src/backends/backendsCommon/test/OptimizedNetworkTests.cpp6
-rw-r--r--src/backends/reference/test/RefOptimizedNetworkTests.cpp6
10 files changed, 80 insertions, 10 deletions
diff --git a/src/armnn/Layer.cpp b/src/armnn/Layer.cpp
index 528020bab5..1e384336c9 100644
--- a/src/armnn/Layer.cpp
+++ b/src/armnn/Layer.cpp
@@ -400,4 +400,23 @@ std::vector<TensorShape> Layer::InferOutputShapes(const std::vector<TensorShape>
}
return inputShapes;
}
+
+void Layer::SerializeLayerParameters(ParameterStringifyFunction& fn) const
+{
+ std::string layerType = GetLayerTypeAsCString(m_Type);
+ std::string backendId = std::string(m_BackendId);
+ if(!(m_LayerName.compare("") == 0) && !m_LayerName.empty())
+ {
+ fn("LayerName",m_LayerName);
+ }
+ if(!(layerType.compare("") == 0) && !layerType.empty())
+ {
+ fn("LayerType",layerType);
+ }
+ if(!(backendId.compare("") == 0) && !backendId.empty())
+ {
+ fn("BackendID",backendId);
+ }
+}
+
} // namespace armnn
diff --git a/src/armnn/Layer.hpp b/src/armnn/Layer.hpp
index 5944ea83ed..c571e50a95 100644
--- a/src/armnn/Layer.hpp
+++ b/src/armnn/Layer.hpp
@@ -281,7 +281,7 @@ public:
/// Helper to serialize the layer parameters to string.
/// (currently used in DotSerializer and company).
- virtual void SerializeLayerParameters(ParameterStringifyFunction &) const {}
+ virtual void SerializeLayerParameters(ParameterStringifyFunction& fn) const;
// Free up the constant source data
virtual void ReleaseConstantData();
diff --git a/src/armnn/SerializeLayerParameters.cpp b/src/armnn/SerializeLayerParameters.cpp
index d416a28f4a..1b0ec0202b 100644
--- a/src/armnn/SerializeLayerParameters.cpp
+++ b/src/armnn/SerializeLayerParameters.cpp
@@ -68,6 +68,7 @@ StringifyLayerParameters<Convolution2dDescriptor>::Serialize(ParameterStringifyF
}
fn("BiasEnabled",(desc.m_BiasEnabled?"true":"false"));
+ fn("DataLayout",GetDataLayoutName(desc.m_DataLayout));
}
void
@@ -95,6 +96,7 @@ StringifyLayerParameters<DepthwiseConvolution2dDescriptor>::Serialize(ParameterS
}
fn("BiasEnabled",(desc.m_BiasEnabled?"true":"false"));
+ fn("DataLayout",std::to_string(int(desc.m_DataLayout)));
}
void
diff --git a/src/armnn/layers/Convolution2dLayer.cpp b/src/armnn/layers/Convolution2dLayer.cpp
index 2c7a570790..4300d55e1e 100644
--- a/src/armnn/layers/Convolution2dLayer.cpp
+++ b/src/armnn/layers/Convolution2dLayer.cpp
@@ -9,7 +9,7 @@
#include <armnn/TypesUtils.hpp>
#include <backendsCommon/CpuTensorHandle.hpp>
#include <backendsCommon/WorkloadFactory.hpp>
-
+#include <string>
#include <DataLayoutIndexed.hpp>
using namespace armnnUtils;
@@ -20,6 +20,27 @@ namespace armnn
Convolution2dLayer::Convolution2dLayer(const Convolution2dDescriptor& param, const char* name)
: LayerWithParameters(1, 1, LayerType::Convolution2d, param, name)
{
+
+}
+
+void Convolution2dLayer::SerializeLayerParameters(ParameterStringifyFunction& fn) const
+{
+ //using DescriptorType = Parameters;
+ const std::vector<TensorShape>& inputShapes =
+ {
+ GetInputSlot(0).GetConnection()->GetTensorInfo().GetShape(),
+ m_Weight->GetTensorInfo().GetShape()
+ };
+ const TensorShape filterShape = inputShapes[1];
+ DataLayoutIndexed dataLayoutIndex(m_Param.m_DataLayout);
+ unsigned int filterWidth = filterShape[dataLayoutIndex.GetWidthIndex()];
+ unsigned int filterHeight = filterShape[dataLayoutIndex.GetHeightIndex()];
+ unsigned int outChannels = filterShape[0];
+
+ fn("OutputChannels",std::to_string(outChannels));
+ fn("FilterWidth",std::to_string(filterWidth));
+ fn("FilterHeight",std::to_string(filterHeight));
+ LayerWithParameters<Convolution2dDescriptor>::SerializeLayerParameters(fn);
}
std::unique_ptr<IWorkload> Convolution2dLayer::CreateWorkload(const Graph& graph, const IWorkloadFactory& factory) const
diff --git a/src/armnn/layers/Convolution2dLayer.hpp b/src/armnn/layers/Convolution2dLayer.hpp
index 05a26da82a..0e85b33355 100644
--- a/src/armnn/layers/Convolution2dLayer.hpp
+++ b/src/armnn/layers/Convolution2dLayer.hpp
@@ -15,6 +15,7 @@ class ScopedCpuTensorHandle;
class Convolution2dLayer : public LayerWithParameters<Convolution2dDescriptor>
{
public:
+
/// A unique pointer to store Weight values.
std::unique_ptr<ScopedCpuTensorHandle> m_Weight;
/// A unique pointer to store Bias values.
@@ -43,6 +44,8 @@ public:
void Accept(ILayerVisitor& visitor) const override;
+ void SerializeLayerParameters(ParameterStringifyFunction& fn) const override;
+
protected:
/// Constructor to create a Convolution2dLayer.
/// @param [in] param Convolution2dDescriptor to configure the convolution2d operation.
diff --git a/src/armnn/layers/DepthwiseConvolution2dLayer.cpp b/src/armnn/layers/DepthwiseConvolution2dLayer.cpp
index e49c179eb1..a50a0f6310 100644
--- a/src/armnn/layers/DepthwiseConvolution2dLayer.cpp
+++ b/src/armnn/layers/DepthwiseConvolution2dLayer.cpp
@@ -10,7 +10,7 @@
#include <backendsCommon/CpuTensorHandle.hpp>
#include <backendsCommon/WorkloadFactory.hpp>
-
+#include <string>
#include <DataLayoutIndexed.hpp>
using namespace armnnUtils;
@@ -24,6 +24,28 @@ DepthwiseConvolution2dLayer::DepthwiseConvolution2dLayer(const DepthwiseConvolut
{
}
+void DepthwiseConvolution2dLayer::SerializeLayerParameters(ParameterStringifyFunction& fn) const
+{
+ const std::vector<TensorShape>& inputShapes =
+ {
+ GetInputSlot(0).GetConnection()->GetTensorInfo().GetShape(),
+ m_Weight->GetTensorInfo().GetShape()
+ };
+ const TensorShape filterShape = inputShapes[1];
+ DataLayoutIndexed dataLayoutIndex(m_Param.m_DataLayout);
+ unsigned int inputChannels = filterShape[1];
+ unsigned int filterWidth = filterShape[3];
+ unsigned int filterHeight = filterShape[2];
+ unsigned int depthMultiplier = filterShape[0];
+
+ fn("FilterWidth",std::to_string(filterWidth));
+ fn("FilterHeight",std::to_string(filterHeight));
+ fn("DepthMultiplier",std::to_string(depthMultiplier));
+ fn("InputChannels",std::to_string(inputChannels));
+
+ LayerWithParameters<DepthwiseConvolution2dDescriptor>::SerializeLayerParameters(fn);
+}
+
std::unique_ptr<IWorkload> DepthwiseConvolution2dLayer::CreateWorkload(const Graph& graph,
const IWorkloadFactory& factory) const
{
diff --git a/src/armnn/layers/DepthwiseConvolution2dLayer.hpp b/src/armnn/layers/DepthwiseConvolution2dLayer.hpp
index 065ba6cec2..f57591097c 100644
--- a/src/armnn/layers/DepthwiseConvolution2dLayer.hpp
+++ b/src/armnn/layers/DepthwiseConvolution2dLayer.hpp
@@ -43,6 +43,8 @@ public:
void Accept(ILayerVisitor& visitor) const override;
+ void SerializeLayerParameters(ParameterStringifyFunction& fn) const override;
+
protected:
/// Constructor to create a DepthwiseConvolution2dLayer.
/// @param [in] param DepthwiseConvolution2dDescriptor to configure the depthwise convolution2d.
diff --git a/src/armnn/layers/LayerWithParameters.hpp b/src/armnn/layers/LayerWithParameters.hpp
index ba43d6f88d..cce9ca209f 100644
--- a/src/armnn/layers/LayerWithParameters.hpp
+++ b/src/armnn/layers/LayerWithParameters.hpp
@@ -19,9 +19,10 @@ public:
/// Helper to serialize the layer parameters to string
/// (currently used in DotSerializer and company).
- void SerializeLayerParameters(ParameterStringifyFunction & fn) const
+ void SerializeLayerParameters(ParameterStringifyFunction& fn) const override
{
StringifyLayerParameters<Parameters>::Serialize(fn, m_Param);
+ Layer::SerializeLayerParameters(fn);
}
protected:
diff --git a/src/backends/backendsCommon/test/OptimizedNetworkTests.cpp b/src/backends/backendsCommon/test/OptimizedNetworkTests.cpp
index 7b6135df71..cbe74b856f 100644
--- a/src/backends/backendsCommon/test/OptimizedNetworkTests.cpp
+++ b/src/backends/backendsCommon/test/OptimizedNetworkTests.cpp
@@ -51,9 +51,9 @@ BOOST_AUTO_TEST_CASE(SerializeToDot)
"digraph Optimized {\n"
" node [shape=\"record\"];\n"
" edge [fontsize=8 fontcolor=\"blue\" fontname=\"arial-bold\"];\n"
- " " << inputId << " [label=\"{Input}\"];\n"
- " " << addId << " [label=\"{Addition}\"];\n"
- " " << outputId << " [label=\"{Output}\"];\n"
+ " " << inputId << " [label=\"{Input|LayerType : Input\\lBackendID : CpuRef\\l}\"];\n"
+ " " << addId << " [label=\"{Addition|LayerType : Addition\\lBackendID : CpuRef\\l}\"];\n"
+ " " << outputId << " [label=\"{Output|LayerType : Output\\lBackendID : CpuRef\\l}\"];\n"
" " << inputId << " -> " << addId << " [label=< [4] >];\n"
" " << inputId << " -> " << addId << " [label=< [4] >];\n"
" " << addId << " -> " << outputId << " [label=< [4] >];\n"
diff --git a/src/backends/reference/test/RefOptimizedNetworkTests.cpp b/src/backends/reference/test/RefOptimizedNetworkTests.cpp
index 68617b9d4d..1a29e73af8 100644
--- a/src/backends/reference/test/RefOptimizedNetworkTests.cpp
+++ b/src/backends/reference/test/RefOptimizedNetworkTests.cpp
@@ -200,9 +200,9 @@ BOOST_AUTO_TEST_CASE(FP16TurboModeTestOnCpuRef)
"digraph Optimized {\n"
" node [shape=\"record\"];\n"
" edge [fontsize=8 fontcolor=\"blue\" fontname=\"arial-bold\"];\n"
- " " << inputId << " [label=\"{Input}\"];\n"
- " " << floorId << " [label=\"{Floor}\"];\n"
- " " << outputId << " [label=\"{Output}\"];\n"
+ " " << inputId << " [label=\"{Input|LayerType : Input\\lBackendID : CpuRef\\l}\"];\n"
+ " " << floorId << " [label=\"{Floor|LayerType : Floor\\lBackendID : CpuRef\\l}\"];\n"
+ " " << outputId << " [label=\"{Output|LayerType : Output\\lBackendID : CpuRef\\l}\"];\n"
" " << inputId << " -> " << floorId << " [label=< [4] >];\n"
" " << floorId << " -> " << outputId << " [label=< [4] >];\n"
"}\n";