aboutsummaryrefslogtreecommitdiff
path: root/src/armnn/test/ConstTensorLayerVisitor.hpp
diff options
context:
space:
mode:
authorJan Eilers <jan.eilers@arm.com>2021-09-24 15:45:46 +0100
committerJan Eilers <jan.eilers@arm.com>2021-10-02 16:27:39 +0100
commit1b2654fb799c3d25ffcef4d31b5d026d359e2f8f (patch)
tree0397fdf24f286715e26a0e63bddaa0502f64caf7 /src/armnn/test/ConstTensorLayerVisitor.hpp
parentb63a31170aee1d28267d83a4bc67b57708fb6b05 (diff)
downloadarmnn-1b2654fb799c3d25ffcef4d31b5d026d359e2f8f.tar.gz
IVGCVSW-5985 Remove deprecated code
* Removes deprecated AddLayer, IsLayerSupported functions * Marks the whole LayerVisitor class as deprecated not just the constructor. This required to wrap all Accept functions in a no deprecate macro because the LayerVisitor is used as a parameter in there * Removes usage of deprecated LayerVisitor and replaces it with ExecuteStrategy. This required a few structural changes in the unit tests * Adds a default implementation for IStrategy called StrategyBase * Changes pyarmnn to use non deprecated constructor for INetworkProperties and adds related unit test * Marks usage of deprecated code in pyarmnn as deprecated. This required to extend INetworkProperties to allow backwards compatibility * Removes deprecated functions from CpuAcc, GpuAcc and Ref backends Note: This patch breaks compatibility with backends that are not updated in this patch !android-nn-driver:6325 Signed-off-by: Jan Eilers <jan.eilers@arm.com> Change-Id: Id13b6f37a74d26eadeda2da1dc92915e725ed5a5
Diffstat (limited to 'src/armnn/test/ConstTensorLayerVisitor.hpp')
-rw-r--r--src/armnn/test/ConstTensorLayerVisitor.hpp358
1 files changed, 276 insertions, 82 deletions
diff --git a/src/armnn/test/ConstTensorLayerVisitor.hpp b/src/armnn/test/ConstTensorLayerVisitor.hpp
index 35e2e872f7..5538852b60 100644
--- a/src/armnn/test/ConstTensorLayerVisitor.hpp
+++ b/src/armnn/test/ConstTensorLayerVisitor.hpp
@@ -5,9 +5,14 @@
#pragma once
#include "TestLayerVisitor.hpp"
+#include "LayersFwd.hpp"
#include <armnn/Descriptors.hpp>
#include <armnn/LstmParams.hpp>
#include <armnn/QuantizedLstmParams.hpp>
+#include <armnn/utility/PolymorphicDowncast.hpp>
+#include <backendsCommon/TensorHandle.hpp>
+
+#include <doctest/doctest.h>
namespace armnn
{
@@ -27,17 +32,33 @@ public:
virtual ~TestConvolution2dLayerVisitor() {}
- void VisitConvolution2dLayer(const IConnectableLayer* layer,
- const Convolution2dDescriptor& convolution2dDescriptor,
- const ConstTensor& weights,
- const Optional<ConstTensor>& biases,
- const char* name = nullptr) override
+ void ExecuteStrategy(const armnn::IConnectableLayer* layer,
+ const armnn::BaseDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name,
+ const armnn::LayerBindingId id = 0) override
{
- CheckLayerPointer(layer);
- CheckLayerName(name);
- CheckDescriptor(convolution2dDescriptor);
- CheckConstTensors(m_Weights, weights);
- CheckOptionalConstTensors(m_Biases, biases);
+ armnn::IgnoreUnused(descriptor, constants, id);
+ switch (layer->GetType())
+ {
+ case armnn::LayerType::Convolution2d:
+ {
+ CheckLayerPointer(layer);
+ CheckLayerName(name);
+ CheckDescriptor(static_cast<const armnn::Convolution2dDescriptor&>(descriptor));
+ CheckConstTensors(m_Weights, constants[0]);
+ if (m_Biases.has_value())
+ {
+ CHECK(constants.size() == 2);
+ CheckConstTensors(m_Biases.value(), constants[1]);
+ }
+ break;
+ }
+ default:
+ {
+ m_DefaultStrategy.Apply(GetLayerTypeAsCString(layer->GetType()));
+ }
+ }
}
protected:
@@ -64,17 +85,33 @@ public:
virtual ~TestDepthwiseConvolution2dLayerVisitor() {}
- void VisitDepthwiseConvolution2dLayer(const IConnectableLayer* layer,
- const DepthwiseConvolution2dDescriptor& convolution2dDescriptor,
- const ConstTensor& weights,
- const Optional<ConstTensor>& biases,
- const char* name = nullptr) override
+ void ExecuteStrategy(const armnn::IConnectableLayer* layer,
+ const armnn::BaseDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name,
+ const armnn::LayerBindingId id = 0) override
{
- CheckLayerPointer(layer);
- CheckLayerName(name);
- CheckDescriptor(convolution2dDescriptor);
- CheckConstTensors(m_Weights, weights);
- CheckOptionalConstTensors(m_Biases, biases);
+ armnn::IgnoreUnused(descriptor, constants, id);
+ switch (layer->GetType())
+ {
+ case armnn::LayerType::DepthwiseConvolution2d:
+ {
+ CheckLayerPointer(layer);
+ CheckLayerName(name);
+ CheckDescriptor(static_cast<const armnn::DepthwiseConvolution2dDescriptor&>(descriptor));
+ CheckConstTensors(m_Weights, constants[0]);
+ if (m_Biases.has_value())
+ {
+ CHECK(constants.size() == 2);
+ CheckConstTensors(m_Biases.value(), constants[1]);
+ }
+ break;
+ }
+ default:
+ {
+ m_DefaultStrategy.Apply(GetLayerTypeAsCString(layer->GetType()));
+ }
+ }
}
protected:
@@ -97,13 +134,27 @@ public:
virtual ~TestFullyConnectedLayerVistor() {}
- void VisitFullyConnectedLayer(const IConnectableLayer* layer,
- const FullyConnectedDescriptor& fullyConnectedDescriptor,
- const char* name = nullptr) override
+ void ExecuteStrategy(const armnn::IConnectableLayer* layer,
+ const armnn::BaseDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name,
+ const armnn::LayerBindingId id = 0) override
{
- CheckLayerPointer(layer);
- CheckLayerName(name);
- CheckDescriptor(fullyConnectedDescriptor);
+ armnn::IgnoreUnused(descriptor, constants, id);
+ switch (layer->GetType())
+ {
+ case armnn::LayerType::FullyConnected:
+ {
+ CheckLayerPointer(layer);
+ CheckLayerName(name);
+ CheckDescriptor(static_cast<const armnn::FullyConnectedDescriptor&>(descriptor));
+ break;
+ }
+ default:
+ {
+ m_DefaultStrategy.Apply(GetLayerTypeAsCString(layer->GetType()));
+ }
+ }
}
protected:
@@ -129,21 +180,31 @@ public:
, m_Gamma(gamma)
{}
- void VisitBatchNormalizationLayer(const IConnectableLayer* layer,
- const BatchNormalizationDescriptor& descriptor,
- const ConstTensor& mean,
- const ConstTensor& variance,
- const ConstTensor& beta,
- const ConstTensor& gamma,
- const char* name = nullptr) override
+ void ExecuteStrategy(const armnn::IConnectableLayer* layer,
+ const armnn::BaseDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name,
+ const armnn::LayerBindingId id = 0) override
{
- CheckLayerPointer(layer);
- CheckLayerName(name);
- CheckDescriptor(descriptor);
- CheckConstTensors(m_Mean, mean);
- CheckConstTensors(m_Variance, variance);
- CheckConstTensors(m_Beta, beta);
- CheckConstTensors(m_Gamma, gamma);
+ armnn::IgnoreUnused(descriptor, constants, id);
+ switch (layer->GetType())
+ {
+ case armnn::LayerType::BatchNormalization:
+ {
+ CheckLayerPointer(layer);
+ CheckLayerName(name);
+ CheckDescriptor(static_cast<const armnn::BatchNormalizationDescriptor&>(descriptor));
+ CheckConstTensors(m_Mean, constants[0]);
+ CheckConstTensors(m_Variance, constants[1]);
+ CheckConstTensors(m_Beta, constants[2]);
+ CheckConstTensors(m_Gamma, constants[3]);
+ break;
+ }
+ default:
+ {
+ m_DefaultStrategy.Apply(GetLayerTypeAsCString(layer->GetType()));
+ }
+ }
}
protected:
@@ -166,81 +227,201 @@ public:
, m_Input(input)
{}
- void VisitConstantLayer(const IConnectableLayer* layer,
- const ConstTensor& input,
- const char* name = nullptr)
+ void ExecuteStrategy(const armnn::IConnectableLayer* layer,
+ const armnn::BaseDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name,
+ const armnn::LayerBindingId id = 0) override
{
- CheckLayerPointer(layer);
- CheckLayerName(name);
- CheckConstTensors(m_Input, input);
+ armnn::IgnoreUnused(descriptor, constants, id);
+ switch (layer->GetType())
+ {
+ case armnn::LayerType::Constant:
+ {
+ CheckLayerPointer(layer);
+ CheckLayerName(name);
+ CheckConstTensors(m_Input, constants[0]);
+ break;
+ }
+ default:
+ {
+ m_DefaultStrategy.Apply(GetLayerTypeAsCString(layer->GetType()));
+ }
+ }
}
private:
ConstTensor m_Input;
};
-class TestLstmLayerVisitor : public TestLayerVisitor
+// Used to supply utility functions to the actual lstm test visitors
+class LstmVisitor : public TestLayerVisitor
+{
+public:
+ explicit LstmVisitor(const LstmInputParams& params,
+ const char* name = nullptr)
+ : TestLayerVisitor(name)
+ , m_InputParams(params) {}
+
+protected:
+ template<typename LayerType>
+ void CheckInputParameters(const LayerType* layer, const LstmInputParams& inputParams);
+
+ LstmInputParams m_InputParams;
+};
+
+template<typename LayerType>
+void LstmVisitor::CheckInputParameters(const LayerType* layer, const LstmInputParams& inputParams)
+{
+ CheckConstTensorPtrs("OutputGateBias",
+ inputParams.m_OutputGateBias,
+ layer->m_BasicParameters.m_OutputGateBias);
+ CheckConstTensorPtrs("InputToForgetWeights",
+ inputParams.m_InputToForgetWeights,
+ layer->m_BasicParameters.m_InputToForgetWeights);
+ CheckConstTensorPtrs("InputToCellWeights",
+ inputParams.m_InputToCellWeights,
+ layer->m_BasicParameters.m_InputToCellWeights);
+ CheckConstTensorPtrs("InputToOutputWeights",
+ inputParams.m_InputToOutputWeights,
+ layer->m_BasicParameters.m_InputToOutputWeights);
+ CheckConstTensorPtrs("RecurrentToForgetWeights",
+ inputParams.m_RecurrentToForgetWeights,
+ layer->m_BasicParameters.m_RecurrentToForgetWeights);
+ CheckConstTensorPtrs("RecurrentToCellWeights",
+ inputParams.m_RecurrentToCellWeights,
+ layer->m_BasicParameters.m_RecurrentToCellWeights);
+ CheckConstTensorPtrs("RecurrentToOutputWeights",
+ inputParams.m_RecurrentToOutputWeights,
+ layer->m_BasicParameters.m_RecurrentToOutputWeights);
+ CheckConstTensorPtrs("ForgetGateBias",
+ inputParams.m_ForgetGateBias,
+ layer->m_BasicParameters.m_ForgetGateBias);
+ CheckConstTensorPtrs("CellBias",
+ inputParams.m_CellBias,
+ layer->m_BasicParameters.m_CellBias);
+
+ CheckConstTensorPtrs("InputToInputWeights",
+ inputParams.m_InputToInputWeights,
+ layer->m_CifgParameters.m_InputToInputWeights);
+ CheckConstTensorPtrs("RecurrentToInputWeights",
+ inputParams.m_RecurrentToInputWeights,
+ layer->m_CifgParameters.m_RecurrentToInputWeights);
+ CheckConstTensorPtrs("InputGateBias",
+ inputParams.m_InputGateBias,
+ layer->m_CifgParameters.m_InputGateBias);
+
+ CheckConstTensorPtrs("ProjectionBias",
+ inputParams.m_ProjectionBias,
+ layer->m_ProjectionParameters.m_ProjectionBias);
+ CheckConstTensorPtrs("ProjectionWeights",
+ inputParams.m_ProjectionWeights,
+ layer->m_ProjectionParameters.m_ProjectionWeights);
+
+ CheckConstTensorPtrs("CellToInputWeights",
+ inputParams.m_CellToInputWeights,
+ layer->m_PeepholeParameters.m_CellToInputWeights);
+ CheckConstTensorPtrs("CellToForgetWeights",
+ inputParams.m_CellToForgetWeights,
+ layer->m_PeepholeParameters.m_CellToForgetWeights);
+ CheckConstTensorPtrs("CellToOutputWeights",
+ inputParams.m_CellToOutputWeights,
+ layer->m_PeepholeParameters.m_CellToOutputWeights);
+
+ CheckConstTensorPtrs("InputLayerNormWeights",
+ inputParams.m_InputLayerNormWeights,
+ layer->m_LayerNormParameters.m_InputLayerNormWeights);
+ CheckConstTensorPtrs("ForgetLayerNormWeights",
+ inputParams.m_ForgetLayerNormWeights,
+ layer->m_LayerNormParameters.m_ForgetLayerNormWeights);
+ CheckConstTensorPtrs("CellLayerNormWeights",
+ inputParams.m_CellLayerNormWeights,
+ layer->m_LayerNormParameters.m_CellLayerNormWeights);
+ CheckConstTensorPtrs("OutputLayerNormWeights",
+ inputParams.m_OutputLayerNormWeights,
+ layer->m_LayerNormParameters.m_OutputLayerNormWeights);
+}
+
+class TestLstmLayerVisitor : public LstmVisitor
{
public:
explicit TestLstmLayerVisitor(const LstmDescriptor& descriptor,
const LstmInputParams& params,
const char* name = nullptr)
- : TestLayerVisitor(name)
+ : LstmVisitor(params, name)
, m_Descriptor(descriptor)
- , m_InputParams(params)
{}
- void VisitLstmLayer(const IConnectableLayer* layer,
- const LstmDescriptor& descriptor,
- const LstmInputParams& params,
- const char* name = nullptr)
+ void ExecuteStrategy(const armnn::IConnectableLayer* layer,
+ const armnn::BaseDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name,
+ const armnn::LayerBindingId id = 0) override
{
- CheckLayerPointer(layer);
- CheckLayerName(name);
- CheckDescriptor(descriptor);
- CheckInputParameters(params);
+ armnn::IgnoreUnused(descriptor, constants, id);
+ switch (layer->GetType())
+ {
+ case armnn::LayerType::Lstm:
+ {
+ CheckLayerPointer(layer);
+ CheckLayerName(name);
+ CheckDescriptor(static_cast<const armnn::LstmDescriptor&>(descriptor));
+ CheckInputParameters<const LstmLayer>(PolymorphicDowncast<const LstmLayer*>(layer), m_InputParams);
+ break;
+ }
+ default:
+ {
+ m_DefaultStrategy.Apply(GetLayerTypeAsCString(layer->GetType()));
+ }
+ }
}
protected:
void CheckDescriptor(const LstmDescriptor& descriptor);
- void CheckInputParameters(const LstmInputParams& inputParams);
- void CheckConstTensorPtrs(const std::string& name, const ConstTensor* expected, const ConstTensor* actual);
private:
LstmDescriptor m_Descriptor;
- LstmInputParams m_InputParams;
};
-class TestQLstmLayerVisitor : public TestLayerVisitor
+class TestQLstmLayerVisitor : public LstmVisitor
{
public:
explicit TestQLstmLayerVisitor(const QLstmDescriptor& descriptor,
const LstmInputParams& params,
const char* name = nullptr)
- : TestLayerVisitor(name)
+ : LstmVisitor(params, name)
, m_Descriptor(descriptor)
- , m_InputParams(params)
{}
- void VisitQLstmLayer(const IConnectableLayer* layer,
- const QLstmDescriptor& descriptor,
- const LstmInputParams& params,
- const char* name = nullptr)
+ void ExecuteStrategy(const armnn::IConnectableLayer* layer,
+ const armnn::BaseDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name,
+ const armnn::LayerBindingId id = 0) override
{
- CheckLayerPointer(layer);
- CheckLayerName(name);
- CheckDescriptor(descriptor);
- CheckInputParameters(params);
+ armnn::IgnoreUnused(descriptor, constants, id);
+ switch (layer->GetType())
+ {
+ case armnn::LayerType::QLstm:
+ {
+ CheckLayerPointer(layer);
+ CheckLayerName(name);
+ CheckDescriptor(static_cast<const armnn::QLstmDescriptor&>(descriptor));
+ CheckInputParameters<const QLstmLayer>(PolymorphicDowncast<const QLstmLayer*>(layer), m_InputParams);
+ break;
+ }
+ default:
+ {
+ m_DefaultStrategy.Apply(GetLayerTypeAsCString(layer->GetType()));
+ }
+ }
}
protected:
void CheckDescriptor(const QLstmDescriptor& descriptor);
- void CheckInputParameters(const LstmInputParams& inputParams);
- void CheckConstTensorPtrs(const std::string& name, const ConstTensor* expected, const ConstTensor* actual);
private:
QLstmDescriptor m_Descriptor;
- LstmInputParams m_InputParams;
};
@@ -253,18 +434,31 @@ public:
, m_InputParams(params)
{}
- void VisitQuantizedLstmLayer(const IConnectableLayer* layer,
- const QuantizedLstmInputParams& params,
- const char* name = nullptr)
+ void ExecuteStrategy(const armnn::IConnectableLayer* layer,
+ const armnn::BaseDescriptor& descriptor,
+ const std::vector<armnn::ConstTensor>& constants,
+ const char* name,
+ const armnn::LayerBindingId id = 0) override
{
- CheckLayerPointer(layer);
- CheckLayerName(name);
- CheckInputParameters(params);
+ armnn::IgnoreUnused(descriptor, constants, id);
+ switch (layer->GetType())
+ {
+ case armnn::LayerType::QuantizedLstm:
+ {
+ CheckLayerPointer(layer);
+ CheckLayerName(name);
+ CheckInputParameters(m_InputParams);
+ break;
+ }
+ default:
+ {
+ m_DefaultStrategy.Apply(GetLayerTypeAsCString(layer->GetType()));
+ }
+ }
}
protected:
- void CheckInputParameters(const QuantizedLstmInputParams& inputParams);
- void CheckConstTensorPtrs(const std::string& name, const ConstTensor* expected, const ConstTensor* actual);
+ void CheckInputParameters(const QuantizedLstmInputParams& params);
private:
QuantizedLstmInputParams m_InputParams;