diff options
author | Jan Eilers <jan.eilers@arm.com> | 2021-09-24 15:45:46 +0100 |
---|---|---|
committer | Jan Eilers <jan.eilers@arm.com> | 2021-10-02 16:27:39 +0100 |
commit | 1b2654fb799c3d25ffcef4d31b5d026d359e2f8f (patch) | |
tree | 0397fdf24f286715e26a0e63bddaa0502f64caf7 /src/armnn/test/NetworkTests.cpp | |
parent | b63a31170aee1d28267d83a4bc67b57708fb6b05 (diff) | |
download | armnn-1b2654fb799c3d25ffcef4d31b5d026d359e2f8f.tar.gz |
IVGCVSW-5985 Remove deprecated code
* Removes deprecated AddLayer, IsLayerSupported functions
* Marks the whole LayerVisitor class as deprecated not just the
constructor. This required to wrap all Accept functions in a
no deprecate macro because the LayerVisitor is used as a parameter in
there
* Removes usage of deprecated LayerVisitor and replaces it
with ExecuteStrategy. This required a few structural changes
in the unit tests
* Adds a default implementation for IStrategy called StrategyBase
* Changes pyarmnn to use non deprecated constructor for
INetworkProperties and adds related unit test
* Marks usage of deprecated code in pyarmnn as deprecated. This
required to extend INetworkProperties to allow backwards compatibility
* Removes deprecated functions from CpuAcc, GpuAcc and Ref backends
Note: This patch breaks compatibility with backends that are not
updated in this patch
!android-nn-driver:6325
Signed-off-by: Jan Eilers <jan.eilers@arm.com>
Change-Id: Id13b6f37a74d26eadeda2da1dc92915e725ed5a5
Diffstat (limited to 'src/armnn/test/NetworkTests.cpp')
-rw-r--r-- | src/armnn/test/NetworkTests.cpp | 118 |
1 files changed, 77 insertions, 41 deletions
diff --git a/src/armnn/test/NetworkTests.cpp b/src/armnn/test/NetworkTests.cpp index 9acb60df4a..25dab596fd 100644 --- a/src/armnn/test/NetworkTests.cpp +++ b/src/armnn/test/NetworkTests.cpp @@ -398,26 +398,44 @@ TEST_CASE("NetworkModification_SplitterMultiplication") TEST_CASE("Network_AddQuantize") { - struct Test : public armnn::LayerVisitorBase<armnn::VisitorNoThrowPolicy> + struct Test : public armnn::IStrategy { - void VisitQuantizeLayer(const armnn::IConnectableLayer* layer, const char* name) override + void ExecuteStrategy(const armnn::IConnectableLayer* layer, + const armnn::BaseDescriptor& descriptor, + const std::vector<armnn::ConstTensor>& constants, + const char* name, + const armnn::LayerBindingId id = 0) override { - m_Visited = true; - - CHECK(layer); - - std::string expectedName = std::string("quantize"); - CHECK(std::string(layer->GetName()) == expectedName); - CHECK(std::string(name) == expectedName); - - CHECK(layer->GetNumInputSlots() == 1); - CHECK(layer->GetNumOutputSlots() == 1); - - const armnn::TensorInfo& infoIn = layer->GetInputSlot(0).GetConnection()->GetTensorInfo(); - CHECK((infoIn.GetDataType() == armnn::DataType::Float32)); - - const armnn::TensorInfo& infoOut = layer->GetOutputSlot(0).GetTensorInfo(); - CHECK((infoOut.GetDataType() == armnn::DataType::QAsymmU8)); + armnn::IgnoreUnused(descriptor, constants, id); + switch (layer->GetType()) + { + case armnn::LayerType::Input: break; + case armnn::LayerType::Output: break; + case armnn::LayerType::Quantize: + { + m_Visited = true; + + CHECK(layer); + + std::string expectedName = std::string("quantize"); + CHECK(std::string(layer->GetName()) == expectedName); + CHECK(std::string(name) == expectedName); + + CHECK(layer->GetNumInputSlots() == 1); + CHECK(layer->GetNumOutputSlots() == 1); + + const armnn::TensorInfo& infoIn = layer->GetInputSlot(0).GetConnection()->GetTensorInfo(); + CHECK((infoIn.GetDataType() == armnn::DataType::Float32)); + + const armnn::TensorInfo& infoOut = layer->GetOutputSlot(0).GetTensorInfo(); + CHECK((infoOut.GetDataType() == armnn::DataType::QAsymmU8)); + break; + } + default: + { + // nothing + } + } } bool m_Visited = false; @@ -440,7 +458,7 @@ TEST_CASE("Network_AddQuantize") quantize->GetOutputSlot(0).SetTensorInfo(infoOut); Test testQuantize; - graph->Accept(testQuantize); + graph->ExecuteStrategy(testQuantize); CHECK(testQuantize.m_Visited == true); @@ -448,29 +466,47 @@ TEST_CASE("Network_AddQuantize") TEST_CASE("Network_AddMerge") { - struct Test : public armnn::LayerVisitorBase<armnn::VisitorNoThrowPolicy> + struct Test : public armnn::IStrategy { - void VisitMergeLayer(const armnn::IConnectableLayer* layer, const char* name) override + void ExecuteStrategy(const armnn::IConnectableLayer* layer, + const armnn::BaseDescriptor& descriptor, + const std::vector<armnn::ConstTensor>& constants, + const char* name, + const armnn::LayerBindingId id = 0) override { - m_Visited = true; - - CHECK(layer); - - std::string expectedName = std::string("merge"); - CHECK(std::string(layer->GetName()) == expectedName); - CHECK(std::string(name) == expectedName); - - CHECK(layer->GetNumInputSlots() == 2); - CHECK(layer->GetNumOutputSlots() == 1); - - const armnn::TensorInfo& infoIn0 = layer->GetInputSlot(0).GetConnection()->GetTensorInfo(); - CHECK((infoIn0.GetDataType() == armnn::DataType::Float32)); - - const armnn::TensorInfo& infoIn1 = layer->GetInputSlot(1).GetConnection()->GetTensorInfo(); - CHECK((infoIn1.GetDataType() == armnn::DataType::Float32)); - - const armnn::TensorInfo& infoOut = layer->GetOutputSlot(0).GetTensorInfo(); - CHECK((infoOut.GetDataType() == armnn::DataType::Float32)); + armnn::IgnoreUnused(descriptor, constants, id); + switch (layer->GetType()) + { + case armnn::LayerType::Input: break; + case armnn::LayerType::Output: break; + case armnn::LayerType::Merge: + { + m_Visited = true; + + CHECK(layer); + + std::string expectedName = std::string("merge"); + CHECK(std::string(layer->GetName()) == expectedName); + CHECK(std::string(name) == expectedName); + + CHECK(layer->GetNumInputSlots() == 2); + CHECK(layer->GetNumOutputSlots() == 1); + + const armnn::TensorInfo& infoIn0 = layer->GetInputSlot(0).GetConnection()->GetTensorInfo(); + CHECK((infoIn0.GetDataType() == armnn::DataType::Float32)); + + const armnn::TensorInfo& infoIn1 = layer->GetInputSlot(1).GetConnection()->GetTensorInfo(); + CHECK((infoIn1.GetDataType() == armnn::DataType::Float32)); + + const armnn::TensorInfo& infoOut = layer->GetOutputSlot(0).GetTensorInfo(); + CHECK((infoOut.GetDataType() == armnn::DataType::Float32)); + break; + } + default: + { + // nothing + } + } } bool m_Visited = false; @@ -493,7 +529,7 @@ TEST_CASE("Network_AddMerge") merge->GetOutputSlot(0).SetTensorInfo(info); Test testMerge; - network->Accept(testMerge); + network->ExecuteStrategy(testMerge); CHECK(testMerge.m_Visited == true); } |