ArmNN
 20.02
MoveTransposeUpTests.cpp File Reference
#include "../TestUtils.hpp"
#include <Optimizer.hpp>
#include <boost/test/unit_test.hpp>

Go to the source code of this file.

Functions

 BOOST_AUTO_TEST_CASE (MoveTransposeUpTest)
 

Function Documentation

◆ BOOST_AUTO_TEST_CASE()

BOOST_AUTO_TEST_CASE ( MoveTransposeUpTest  )

Definition at line 15 of file MoveTransposeUpTests.cpp.

References BOOST_AUTO_TEST_SUITE_END(), CheckSequence(), armnn::Float32, Layer::GetInputSlot(), Layer::GetOutputHandler(), armnn::MakeOptimizations(), Optimizer::Pass(), and OutputHandler::SetTensorInfo().

16 {
17  const armnn::TensorInfo info({ 1, 5, 2, 3 }, armnn::DataType::Float32);
18  const armnn::TensorInfo transposed({ 1, 3, 5, 2 }, armnn::DataType::Float32);
19 
20  armnn::Graph graph;
21 
22  armnn::LayerBindingId inputId = 0;
23 
24  armnn::Layer* head = graph.AddLayer<armnn::OutputLayer>(0, "output");
25 
26  std::string transposeLayerName = "original_transpose";
27 
28  // Insert transpose
29  head = graph.InsertNewLayer<armnn::TransposeLayer>(head->GetInputSlot(0),
30  armnn::TransposeDescriptor({ 0, 3, 1, 2 }),
31  transposeLayerName.c_str());
32 
33  head->GetOutputHandler().SetTensorInfo(transposed);
34 
35  // Inserts layers that don't care about data format.
36  head = graph.InsertNewLayer<armnn::ActivationLayer>(head->GetInputSlot(0), armnn::ActivationDescriptor{}, "");
37  head->GetOutputHandler().SetTensorInfo(info);
38 
39  head = graph.InsertNewLayer<armnn::AdditionLayer>(head->GetInputSlot(0), "");
40  head->GetOutputHandler().SetTensorInfo(info);
41 
42  // Inserts input for 2nd input of Addition.
43  graph.InsertNewLayer<armnn::InputLayer>(head->GetInputSlot(1), inputId++, "")
44  ->GetOutputHandler()
45  .SetTensorInfo(info);
46 
47  head = graph.InsertNewLayer<armnn::FakeQuantizationLayer>(head->GetInputSlot(0),
49  head->GetOutputHandler().SetTensorInfo(info);
50 
51  head = graph.InsertNewLayer<armnn::FloorLayer>(head->GetInputSlot(0), "");
52  head->GetOutputHandler().SetTensorInfo(info);
53 
54  head = graph.InsertNewLayer<armnn::MemCopyLayer>(head->GetInputSlot(0), "");
55  head->GetOutputHandler().SetTensorInfo(info);
56 
57  head = graph.InsertNewLayer<armnn::MultiplicationLayer>(head->GetInputSlot(0), "");
58  head->GetOutputHandler().SetTensorInfo(info);
59 
60  // Inserts input for 2nd input of Multiplication.
61  graph.InsertNewLayer<armnn::InputLayer>(head->GetInputSlot(1), inputId++, "")
62  ->GetOutputHandler()
63  .SetTensorInfo(info);
64 
65  // Inserts input.
66  graph.InsertNewLayer<armnn::InputLayer>(head->GetInputSlot(0), inputId++, "")
67  ->GetOutputHandler()
68  .SetTensorInfo(info);
69 
70  BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(), &IsLayerOfType<armnn::InputLayer>,
71  &IsLayerOfType<armnn::InputLayer>, &IsLayerOfType<armnn::InputLayer>,
72  &IsLayerOfType<armnn::MultiplicationLayer>, &IsLayerOfType<armnn::MemCopyLayer>,
73  &IsLayerOfType<armnn::FloorLayer>, &IsLayerOfType<armnn::FakeQuantizationLayer>,
74  &IsLayerOfType<armnn::AdditionLayer>, &IsLayerOfType<armnn::ActivationLayer>,
75  &IsLayerOfType<armnn::TransposeLayer>, &IsLayerOfType<armnn::OutputLayer>));
76 
78 
79  // The transpose is moved to the top. New transposes for layers with multiple inputs.
80  BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(), &IsLayerOfType<armnn::InputLayer>,
81  &IsLayerOfType<armnn::InputLayer>, &IsLayerOfType<armnn::InputLayer>,
82  &IsLayerOfType<armnn::TransposeLayer>, &IsLayerOfType<armnn::TransposeLayer>,
83  &IsLayerOfType<armnn::TransposeLayer>, &IsLayerOfType<armnn::MultiplicationLayer>,
84  &IsLayerOfType<armnn::MemCopyLayer>, &IsLayerOfType<armnn::FloorLayer>,
85  &IsLayerOfType<armnn::FakeQuantizationLayer>, &IsLayerOfType<armnn::AdditionLayer>,
86  &IsLayerOfType<armnn::ActivationLayer>, &IsLayerOfType<armnn::OutputLayer>));
87 
88  std::list<std::string> testRelatedLayers = { transposeLayerName };
89 
90  BOOST_TEST(CheckRelatedLayers<armnn::TransposeLayer>(graph, testRelatedLayers));
91 }
Optimizer::Optimizations MakeOptimizations(Args &&... args)
Definition: Optimizer.hpp:43
static void Pass(Graph &graph, const Optimizations &optimizations)
Definition: Optimizer.cpp:16
This layer represents an activation operation with the specified activation function.
OptimizeForConnection< Layer, TransposeLayer, MoveTransposeUpImpl > MoveTransposeUp
int LayerBindingId
Type of identifiers for bindable layers (inputs, outputs).
Definition: Types.hpp:171
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
Definition: Layer.hpp:310
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: OutputLayer.hpp:13
A FakeQuantizationDescriptor for the FakeQuantizationLayer.
This layer represents a memory copy operation.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
This layer represents a fake quantization operation.
This layer represents a floor operation.
Definition: FloorLayer.hpp:13
This layer represents a transpose operation.
This layer represents an addition operation.
void SetTensorInfo(const TensorInfo &tensorInfo)
Sets the TensorInfo used by this output handler.
const OutputHandler & GetOutputHandler(unsigned int i=0) const
Definition: Layer.hpp:221
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: InputLayer.hpp:13
bool CheckSequence(const armnn::Graph::ConstIterator first, const armnn::Graph::ConstIterator last)
Definition: TestUtils.hpp:20
A TransposeDescriptor for the TransposeLayer.
This layer represents a multiplication operation.