ArmNN
 21.05
AddBroadcastReshapeLayerTests.cpp File Reference
#include "../GraphUtils.hpp"
#include "../TestUtils.hpp"
#include <Optimizer.hpp>
#include <boost/test/unit_test.hpp>

Go to the source code of this file.

Functions

void AddBroadcastReshapeLayerOptimizerTest (const TensorInfo &info0, const TensorInfo &info1, const TensorInfo &outputInfo, const std::string &reshapeLayerName, const TensorShape &expectedReshapeShape, const DataType expectedDataType)
 
 BOOST_AUTO_TEST_CASE (AddBroadcastReshapeLayerSimpleTest)
 
 BOOST_AUTO_TEST_CASE (AddBroadcastReshapeLayer1DTest)
 
 BOOST_AUTO_TEST_CASE (AddBroadcastReshapeLayer2DTest)
 
 BOOST_AUTO_TEST_CASE (AddBroadcastReshapeLayer3DTest)
 
 BOOST_AUTO_TEST_CASE (AddBroadcastReshapeLayer3DMergedTest)
 
 BOOST_AUTO_TEST_CASE (AddBroadcastReshapeLayerSubtractionTest)
 
 BOOST_AUTO_TEST_CASE (AddBroadcastReshapeLayerDivisionTest)
 
 BOOST_AUTO_TEST_CASE (AddBroadcastReshapeLayerMultiplicationTest)
 
 BOOST_AUTO_TEST_CASE (AddNoBroadcastReshapeLayerTest)
 
 BOOST_AUTO_TEST_CASE (ReshapeParentConstLayerTest)
 
 BOOST_AUTO_TEST_CASE (ReshapeParentConstAddLayerMultipleConnectionsTest)
 

Function Documentation

◆ AddBroadcastReshapeLayerOptimizerTest()

void AddBroadcastReshapeLayerOptimizerTest ( const TensorInfo info0,
const TensorInfo info1,
const TensorInfo outputInfo,
const std::string &  reshapeLayerName,
const TensorShape expectedReshapeShape,
const DataType  expectedDataType 
)

Definition at line 18 of file AddBroadcastReshapeLayerTests.cpp.

References Graph::AddLayer(), Graph::cbegin(), Graph::cend(), CheckSequence(), OutputSlot::Connect(), GetFirstLayerWithName(), Layer::GetInputSlot(), Layer::GetOutputSlot(), OutputSlot::GetTensorInfo(), armnn::MakeOptimizations(), Optimizer::Pass(), and OutputSlot::SetTensorInfo().

Referenced by BOOST_AUTO_TEST_CASE().

24 {
25  Graph graph;
26 
27  auto input0 = graph.AddLayer<InputLayer>(0, "input0");
28  auto input1 = graph.AddLayer<InputLayer>(1, "input1");
29  auto add = graph.AddLayer<AdditionLayer>("add");
30  auto output = graph.AddLayer<OutputLayer>(0, "output");
31  input0->GetOutputSlot().SetTensorInfo(info0);
32  input1->GetOutputSlot().SetTensorInfo(info1);
33  add->GetOutputSlot().SetTensorInfo(outputInfo);
34 
35  input0->GetOutputSlot().Connect(add->GetInputSlot(0));
36  input1->GetOutputSlot().Connect(add->GetInputSlot(1));
37  add->GetOutputSlot().Connect(output->GetInputSlot(0));
38 
39  BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(),
40  &IsLayerOfType<InputLayer>,
41  &IsLayerOfType<InputLayer>,
42  &IsLayerOfType<AdditionLayer>,
43  &IsLayerOfType<OutputLayer>));
44 
45  // Run optimizer
47 
48  // Broadcast reshape layer has been added to the graph correctly
49  BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(),
50  &IsLayerOfType<InputLayer>,
51  &IsLayerOfType<InputLayer>,
52  &IsLayerOfType<ReshapeLayer>,
53  &IsLayerOfType<AdditionLayer>,
54  &IsLayerOfType<OutputLayer>));
55 
56  Layer* const reshapeLayer = GetFirstLayerWithName(graph, reshapeLayerName);
57  BOOST_TEST(reshapeLayer);
58  auto addedReshapeTensorInfo = reshapeLayer->GetOutputSlot().GetTensorInfo();
59 
60  // Tensorshape and the data type are correct
61  BOOST_TEST((addedReshapeTensorInfo.GetShape() == expectedReshapeShape));
62  BOOST_TEST((addedReshapeTensorInfo.GetDataType() == expectedDataType));
63 }
Optimizer::Optimizations MakeOptimizations(Args &&... args)
Definition: Optimizer.hpp:43
armnn::Layer * GetFirstLayerWithName(armnn::Graph &graph, const std::string &name)
Definition: GraphUtils.cpp:22
LayerT * AddLayer(Args &&... args)
Adds a new layer, of type LayerType, to the graph constructed with the arguments passed.
Definition: Graph.hpp:402
ConstIterator cbegin() const
Returns const iterator pointing to the beginning of the list. Lowercase for range-based for loops...
Definition: Graph.hpp:172
int Connect(InputSlot &destination)
Definition: Layer.cpp:83
static void Pass(Graph &graph, const Optimizations &optimizations)
Definition: Optimizer.cpp:16
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
Definition: Layer.hpp:316
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: OutputLayer.hpp:13
This layer represents an addition operation.
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: InputLayer.hpp:13
bool CheckSequence(const armnn::Graph::ConstIterator first, const armnn::Graph::ConstIterator last)
Definition: TestUtils.hpp:21
void SetTensorInfo(const TensorInfo &tensorInfo) override
Definition: Layer.cpp:58
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
Definition: Layer.hpp:318
ConstIterator cend() const
Returns const iterator pointing to the end of the list. Lowercase for range-based for loops...
Definition: Graph.hpp:174
OptimizeForType< Layer, AddBroadcastReshapeLayerImpl > AddBroadcastReshapeLayer
const TensorInfo & GetTensorInfo() const override
Definition: Layer.cpp:63

◆ BOOST_AUTO_TEST_CASE() [1/11]

BOOST_AUTO_TEST_CASE ( AddBroadcastReshapeLayerSimpleTest  )

Definition at line 65 of file AddBroadcastReshapeLayerTests.cpp.

References AddBroadcastReshapeLayerOptimizerTest(), and armnn::Float32.

66 {
67  const TensorInfo info0({ 1, 2, 3, 5 }, DataType::Float32);
68  const TensorInfo info1({ 1 }, DataType::Float32);
69  AddBroadcastReshapeLayerOptimizerTest(info0, info1, info0, "Reshape_for:add-1",
70  TensorShape({ 1, 1, 1, 1 }),
71  DataType::Float32);
72 }
void AddBroadcastReshapeLayerOptimizerTest(const TensorInfo &info0, const TensorInfo &info1, const TensorInfo &outputInfo, const std::string &reshapeLayerName, const TensorShape &expectedReshapeShape, const DataType expectedDataType)

◆ BOOST_AUTO_TEST_CASE() [2/11]

BOOST_AUTO_TEST_CASE ( AddBroadcastReshapeLayer1DTest  )

Definition at line 74 of file AddBroadcastReshapeLayerTests.cpp.

References AddBroadcastReshapeLayerOptimizerTest(), and armnn::Float32.

75 {
76  const TensorInfo info0({ 1, 2, 3, 5 }, DataType::Float32);
77  const TensorInfo info1({ 5 }, DataType::Float32);
78  const TensorInfo outputInfo({ 1, 1, 1, 5 }, DataType::Float32);
79  AddBroadcastReshapeLayerOptimizerTest(info0, info1, outputInfo, "Reshape_for:add-1",
80  TensorShape({ 1, 1, 1, 5 }),
81  DataType::Float32);
82 }
void AddBroadcastReshapeLayerOptimizerTest(const TensorInfo &info0, const TensorInfo &info1, const TensorInfo &outputInfo, const std::string &reshapeLayerName, const TensorShape &expectedReshapeShape, const DataType expectedDataType)

◆ BOOST_AUTO_TEST_CASE() [3/11]

BOOST_AUTO_TEST_CASE ( AddBroadcastReshapeLayer2DTest  )

Definition at line 84 of file AddBroadcastReshapeLayerTests.cpp.

References AddBroadcastReshapeLayerOptimizerTest(), and armnn::Float32.

85 {
86  const TensorInfo info0({ 1, 2, 3, 5 }, DataType::Float32);
87  const TensorInfo info1({ 3, 5 }, DataType::Float32);
88  const TensorInfo outputInfo({ 1, 2, 3, 5 }, DataType::Float32);
89  AddBroadcastReshapeLayerOptimizerTest(info0, info1, outputInfo, "Reshape_for:add-1",
90  TensorShape({ 1, 1, 3, 5 }),
91  DataType::Float32);
92 }
void AddBroadcastReshapeLayerOptimizerTest(const TensorInfo &info0, const TensorInfo &info1, const TensorInfo &outputInfo, const std::string &reshapeLayerName, const TensorShape &expectedReshapeShape, const DataType expectedDataType)

◆ BOOST_AUTO_TEST_CASE() [4/11]

BOOST_AUTO_TEST_CASE ( AddBroadcastReshapeLayer3DTest  )

Definition at line 94 of file AddBroadcastReshapeLayerTests.cpp.

References AddBroadcastReshapeLayerOptimizerTest(), and armnn::Float32.

95 {
96  const TensorInfo info0({ 2, 1, 1, 1 }, DataType::Float32);
97  const TensorInfo info1({ 3, 4, 5 }, DataType::Float32);
98  const TensorInfo outputInfo({ 2, 3, 4, 5 }, DataType::Float32);
99  AddBroadcastReshapeLayerOptimizerTest(info0, info1, outputInfo, "Reshape_for:add-1",
100  TensorShape({ 1, 3, 4, 5 }),
101  DataType::Float32);
102 }
void AddBroadcastReshapeLayerOptimizerTest(const TensorInfo &info0, const TensorInfo &info1, const TensorInfo &outputInfo, const std::string &reshapeLayerName, const TensorShape &expectedReshapeShape, const DataType expectedDataType)

◆ BOOST_AUTO_TEST_CASE() [5/11]

BOOST_AUTO_TEST_CASE ( AddBroadcastReshapeLayer3DMergedTest  )

Definition at line 104 of file AddBroadcastReshapeLayerTests.cpp.

References AddBroadcastReshapeLayerOptimizerTest(), and armnn::Float32.

105 {
106  const TensorInfo info0({ 2, 3, 1, 1 }, DataType::Float32);
107  const TensorInfo info1({ 3, 4, 5 }, DataType::Float32);
108  const TensorInfo outputInfo({ 2, 3, 4, 5 }, DataType::Float32);
109  AddBroadcastReshapeLayerOptimizerTest(info0, info1, outputInfo, "Reshape_for:add-1",
110  TensorShape({ 1, 3, 4, 5 }),
111  DataType::Float32);
112 }
void AddBroadcastReshapeLayerOptimizerTest(const TensorInfo &info0, const TensorInfo &info1, const TensorInfo &outputInfo, const std::string &reshapeLayerName, const TensorShape &expectedReshapeShape, const DataType expectedDataType)

◆ BOOST_AUTO_TEST_CASE() [6/11]

BOOST_AUTO_TEST_CASE ( AddBroadcastReshapeLayerSubtractionTest  )

Definition at line 114 of file AddBroadcastReshapeLayerTests.cpp.

References Graph::AddLayer(), Graph::cbegin(), Graph::cend(), CheckSequence(), OutputSlot::Connect(), armnn::Float32, GetFirstLayerWithName(), Layer::GetInputSlot(), Layer::GetOutputSlot(), OutputSlot::GetTensorInfo(), armnn::MakeOptimizations(), Optimizer::Pass(), and OutputSlot::SetTensorInfo().

115 {
116  Graph graph;
117  const TensorInfo info0({ 5 }, DataType::Float32);
118  const TensorInfo info1({ 1, 2, 3, 5 }, DataType::Float32);
119  const TensorInfo outputInfo({ 1, 2, 3, 5 }, DataType::Float32);
120 
121  auto input0 = graph.AddLayer<InputLayer>(0, "input0");
122  auto input1 = graph.AddLayer<InputLayer>(1, "input1");
123  auto sub = graph.AddLayer<SubtractionLayer>("sub");
124  auto output = graph.AddLayer<OutputLayer>(0, "output");
125  input0->GetOutputSlot().SetTensorInfo(info0);
126  input1->GetOutputSlot().SetTensorInfo(info1);
127  sub->GetOutputSlot().SetTensorInfo(outputInfo);
128 
129  input0->GetOutputSlot().Connect(sub->GetInputSlot(0));
130  input1->GetOutputSlot().Connect(sub->GetInputSlot(1));
131  sub->GetOutputSlot().Connect(output->GetInputSlot(0));
132 
133  BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(),
134  &IsLayerOfType<InputLayer>,
135  &IsLayerOfType<InputLayer>,
136  &IsLayerOfType<SubtractionLayer>,
137  &IsLayerOfType<OutputLayer>));
138 
139  // Run optimizer
141 
142  // Broadcast reshape layer has been added to the graph correctly
143  BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(),
144  &IsLayerOfType<InputLayer>,
145  &IsLayerOfType<InputLayer>,
146  &IsLayerOfType<ReshapeLayer>,
147  &IsLayerOfType<SubtractionLayer>,
148  &IsLayerOfType<OutputLayer>));
149 
150  Layer* const reshapeLayer = GetFirstLayerWithName(graph, "Reshape_for:sub-0");
151  BOOST_TEST(reshapeLayer);
152  auto addedReshapeTensorInfo = reshapeLayer->GetOutputSlot().GetTensorInfo();
153 
154  // Tensorshape and the data type are correct
155  BOOST_TEST((addedReshapeTensorInfo.GetShape() == TensorShape({ 1, 1, 1, 5 })));
156  BOOST_TEST((addedReshapeTensorInfo.GetDataType() == DataType::Float32));
157 }
Optimizer::Optimizations MakeOptimizations(Args &&... args)
Definition: Optimizer.hpp:43
armnn::Layer * GetFirstLayerWithName(armnn::Graph &graph, const std::string &name)
Definition: GraphUtils.cpp:22
LayerT * AddLayer(Args &&... args)
Adds a new layer, of type LayerType, to the graph constructed with the arguments passed.
Definition: Graph.hpp:402
ConstIterator cbegin() const
Returns const iterator pointing to the beginning of the list. Lowercase for range-based for loops...
Definition: Graph.hpp:172
int Connect(InputSlot &destination)
Definition: Layer.cpp:83
static void Pass(Graph &graph, const Optimizations &optimizations)
Definition: Optimizer.cpp:16
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
Definition: Layer.hpp:316
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: OutputLayer.hpp:13
This layer represents a subtraction operation.
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: InputLayer.hpp:13
bool CheckSequence(const armnn::Graph::ConstIterator first, const armnn::Graph::ConstIterator last)
Definition: TestUtils.hpp:21
void SetTensorInfo(const TensorInfo &tensorInfo) override
Definition: Layer.cpp:58
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
Definition: Layer.hpp:318
ConstIterator cend() const
Returns const iterator pointing to the end of the list. Lowercase for range-based for loops...
Definition: Graph.hpp:174
OptimizeForType< Layer, AddBroadcastReshapeLayerImpl > AddBroadcastReshapeLayer
const TensorInfo & GetTensorInfo() const override
Definition: Layer.cpp:63

◆ BOOST_AUTO_TEST_CASE() [7/11]

BOOST_AUTO_TEST_CASE ( AddBroadcastReshapeLayerDivisionTest  )

Definition at line 159 of file AddBroadcastReshapeLayerTests.cpp.

References Graph::AddLayer(), Graph::cbegin(), Graph::cend(), CheckSequence(), OutputSlot::Connect(), GetFirstLayerWithName(), Layer::GetInputSlot(), Layer::GetOutputSlot(), OutputSlot::GetTensorInfo(), armnn::MakeOptimizations(), Optimizer::Pass(), armnn::QAsymmS8, and OutputSlot::SetTensorInfo().

160 {
161  Graph graph;
162  const TensorInfo info0({ 1, 4, 5 }, DataType::QAsymmS8);
163  const TensorInfo info1({ 1, 2, 4, 5 }, DataType::QAsymmS8);
164  const TensorInfo outputInfo({ 1, 2, 4, 5 }, DataType::QAsymmS8);
165 
166  auto input0 = graph.AddLayer<InputLayer>(0, "input0");
167  auto input1 = graph.AddLayer<InputLayer>(1, "input1");
168  auto div = graph.AddLayer<DivisionLayer>("div");
169  auto output = graph.AddLayer<OutputLayer>(0, "output");
170  input0->GetOutputSlot().SetTensorInfo(info0);
171  input1->GetOutputSlot().SetTensorInfo(info1);
172  div->GetOutputSlot().SetTensorInfo(outputInfo);
173 
174  input0->GetOutputSlot().Connect(div->GetInputSlot(0));
175  input1->GetOutputSlot().Connect(div->GetInputSlot(1));
176  div->GetOutputSlot().Connect(output->GetInputSlot(0));
177 
178  BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(),
179  &IsLayerOfType<InputLayer>,
180  &IsLayerOfType<InputLayer>,
181  &IsLayerOfType<DivisionLayer>,
182  &IsLayerOfType<OutputLayer>));
183 
184  // Run optimizer
186 
187  // Broadcast reshape layer has been added to the graph correctly
188  BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(),
189  &IsLayerOfType<InputLayer>,
190  &IsLayerOfType<InputLayer>,
191  &IsLayerOfType<ReshapeLayer>,
192  &IsLayerOfType<DivisionLayer>,
193  &IsLayerOfType<OutputLayer>));
194 
195  Layer* const reshapeLayer = GetFirstLayerWithName(graph, "Reshape_for:div-0");
196  BOOST_TEST(reshapeLayer);
197  auto addedReshapeTensorInfo = reshapeLayer->GetOutputSlot().GetTensorInfo();
198 
199  // Tensorshape and the data type are correct
200  BOOST_TEST((addedReshapeTensorInfo.GetShape() == TensorShape({ 1, 1, 4, 5 })));
201  BOOST_TEST((addedReshapeTensorInfo.GetDataType() == DataType::QAsymmS8));
202 }
Optimizer::Optimizations MakeOptimizations(Args &&... args)
Definition: Optimizer.hpp:43
armnn::Layer * GetFirstLayerWithName(armnn::Graph &graph, const std::string &name)
Definition: GraphUtils.cpp:22
LayerT * AddLayer(Args &&... args)
Adds a new layer, of type LayerType, to the graph constructed with the arguments passed.
Definition: Graph.hpp:402
ConstIterator cbegin() const
Returns const iterator pointing to the beginning of the list. Lowercase for range-based for loops...
Definition: Graph.hpp:172
int Connect(InputSlot &destination)
Definition: Layer.cpp:83
static void Pass(Graph &graph, const Optimizations &optimizations)
Definition: Optimizer.cpp:16
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
Definition: Layer.hpp:316
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: OutputLayer.hpp:13
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: InputLayer.hpp:13
bool CheckSequence(const armnn::Graph::ConstIterator first, const armnn::Graph::ConstIterator last)
Definition: TestUtils.hpp:21
void SetTensorInfo(const TensorInfo &tensorInfo) override
Definition: Layer.cpp:58
This layer represents a division operation.
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
Definition: Layer.hpp:318
ConstIterator cend() const
Returns const iterator pointing to the end of the list. Lowercase for range-based for loops...
Definition: Graph.hpp:174
OptimizeForType< Layer, AddBroadcastReshapeLayerImpl > AddBroadcastReshapeLayer
const TensorInfo & GetTensorInfo() const override
Definition: Layer.cpp:63

◆ BOOST_AUTO_TEST_CASE() [8/11]

BOOST_AUTO_TEST_CASE ( AddBroadcastReshapeLayerMultiplicationTest  )

Definition at line 204 of file AddBroadcastReshapeLayerTests.cpp.

References Graph::AddLayer(), Graph::cbegin(), Graph::cend(), CheckSequence(), OutputSlot::Connect(), GetFirstLayerWithName(), Layer::GetInputSlot(), Layer::GetOutputSlot(), OutputSlot::GetTensorInfo(), armnn::MakeOptimizations(), Optimizer::Pass(), armnn::QAsymmU8, and OutputSlot::SetTensorInfo().

205 {
206  Graph graph;
207  const TensorInfo info0({ 3, 5 }, DataType::QAsymmU8);
208  const TensorInfo info1({ 1, 2, 3, 5 }, DataType::QAsymmU8);
209  const TensorInfo outputInfo({ 1, 2, 3, 5 }, DataType::QAsymmU8);
210 
211  auto input0 = graph.AddLayer<InputLayer>(0, "input0");
212  auto input1 = graph.AddLayer<InputLayer>(1, "input1");
213  auto mul = graph.AddLayer<MultiplicationLayer>("mul");
214  auto output = graph.AddLayer<OutputLayer>(0, "output");
215  input0->GetOutputSlot().SetTensorInfo(info0);
216  input1->GetOutputSlot().SetTensorInfo(info1);
217  mul->GetOutputSlot().SetTensorInfo(outputInfo);
218 
219  input0->GetOutputSlot().Connect(mul->GetInputSlot(0));
220  input1->GetOutputSlot().Connect(mul->GetInputSlot(1));
221  mul->GetOutputSlot().Connect(output->GetInputSlot(0));
222 
223  BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(),
224  &IsLayerOfType<InputLayer>,
225  &IsLayerOfType<InputLayer>,
226  &IsLayerOfType<MultiplicationLayer>,
227  &IsLayerOfType<OutputLayer>));
228 
229  // Run optimizer
231 
232  // Broadcast reshape layer has been added to the graph correctly
233  BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(),
234  &IsLayerOfType<InputLayer>,
235  &IsLayerOfType<InputLayer>,
236  &IsLayerOfType<ReshapeLayer>,
237  &IsLayerOfType<MultiplicationLayer>,
238  &IsLayerOfType<OutputLayer>));
239 
240  Layer* const reshapeLayer = GetFirstLayerWithName(graph, "Reshape_for:mul-0");
241  BOOST_TEST(reshapeLayer);
242  auto addedReshapeTensorInfo = reshapeLayer->GetOutputSlot().GetTensorInfo();
243 
244  // Tensorshape and the data type are correct
245  BOOST_TEST((addedReshapeTensorInfo.GetShape() == TensorShape({ 1, 1, 3, 5 })));
246  BOOST_TEST((addedReshapeTensorInfo.GetDataType() == DataType::QAsymmU8));
247 }
Optimizer::Optimizations MakeOptimizations(Args &&... args)
Definition: Optimizer.hpp:43
armnn::Layer * GetFirstLayerWithName(armnn::Graph &graph, const std::string &name)
Definition: GraphUtils.cpp:22
LayerT * AddLayer(Args &&... args)
Adds a new layer, of type LayerType, to the graph constructed with the arguments passed.
Definition: Graph.hpp:402
ConstIterator cbegin() const
Returns const iterator pointing to the beginning of the list. Lowercase for range-based for loops...
Definition: Graph.hpp:172
int Connect(InputSlot &destination)
Definition: Layer.cpp:83
static void Pass(Graph &graph, const Optimizations &optimizations)
Definition: Optimizer.cpp:16
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
Definition: Layer.hpp:316
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: OutputLayer.hpp:13
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: InputLayer.hpp:13
bool CheckSequence(const armnn::Graph::ConstIterator first, const armnn::Graph::ConstIterator last)
Definition: TestUtils.hpp:21
void SetTensorInfo(const TensorInfo &tensorInfo) override
Definition: Layer.cpp:58
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
Definition: Layer.hpp:318
ConstIterator cend() const
Returns const iterator pointing to the end of the list. Lowercase for range-based for loops...
Definition: Graph.hpp:174
OptimizeForType< Layer, AddBroadcastReshapeLayerImpl > AddBroadcastReshapeLayer
This layer represents a multiplication operation.
const TensorInfo & GetTensorInfo() const override
Definition: Layer.cpp:63

◆ BOOST_AUTO_TEST_CASE() [9/11]

BOOST_AUTO_TEST_CASE ( AddNoBroadcastReshapeLayerTest  )

Definition at line 249 of file AddBroadcastReshapeLayerTests.cpp.

References Graph::AddLayer(), Graph::cbegin(), Graph::cend(), CheckSequence(), OutputSlot::Connect(), GetFirstLayerWithName(), Layer::GetInputSlot(), Layer::GetOutputSlot(), armnn::MakeOptimizations(), Optimizer::Pass(), armnn::QAsymmU8, and OutputSlot::SetTensorInfo().

250 {
251  Graph graph;
252  const TensorInfo info0({ 1, 1, 1, 1 }, DataType::QAsymmU8);
253  const TensorInfo info1({ 1, 2, 3, 5 }, DataType::QAsymmU8);
254  const TensorInfo outputInfo({ 1, 2, 3, 5 }, DataType::QAsymmU8);
255 
256  auto input0 = graph.AddLayer<InputLayer>(0, "input0");
257  auto input1 = graph.AddLayer<InputLayer>(1, "input1");
258  auto mul = graph.AddLayer<MultiplicationLayer>("mul");
259  auto output = graph.AddLayer<OutputLayer>(0, "output");
260  input0->GetOutputSlot().SetTensorInfo(info0);
261  input1->GetOutputSlot().SetTensorInfo(info1);
262  mul->GetOutputSlot().SetTensorInfo(outputInfo);
263 
264  input0->GetOutputSlot().Connect(mul->GetInputSlot(0));
265  input1->GetOutputSlot().Connect(mul->GetInputSlot(1));
266  mul->GetOutputSlot().Connect(output->GetInputSlot(0));
267 
268  BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(),
269  &IsLayerOfType<InputLayer>,
270  &IsLayerOfType<InputLayer>,
271  &IsLayerOfType<MultiplicationLayer>,
272  &IsLayerOfType<OutputLayer>));
273 
274  // Run optimizer
276 
277  // Broadcast reshape layer has not been added to the graph
278  BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(),
279  &IsLayerOfType<InputLayer>,
280  &IsLayerOfType<InputLayer>,
281  &IsLayerOfType<MultiplicationLayer>,
282  &IsLayerOfType<OutputLayer>));
283 
284  Layer* const reshapeLayer = GetFirstLayerWithName(graph, "Reshape_for:mul-0");
285  BOOST_TEST(!reshapeLayer);
286 }
Optimizer::Optimizations MakeOptimizations(Args &&... args)
Definition: Optimizer.hpp:43
armnn::Layer * GetFirstLayerWithName(armnn::Graph &graph, const std::string &name)
Definition: GraphUtils.cpp:22
LayerT * AddLayer(Args &&... args)
Adds a new layer, of type LayerType, to the graph constructed with the arguments passed.
Definition: Graph.hpp:402
ConstIterator cbegin() const
Returns const iterator pointing to the beginning of the list. Lowercase for range-based for loops...
Definition: Graph.hpp:172
int Connect(InputSlot &destination)
Definition: Layer.cpp:83
static void Pass(Graph &graph, const Optimizations &optimizations)
Definition: Optimizer.cpp:16
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
Definition: Layer.hpp:316
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: OutputLayer.hpp:13
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: InputLayer.hpp:13
bool CheckSequence(const armnn::Graph::ConstIterator first, const armnn::Graph::ConstIterator last)
Definition: TestUtils.hpp:21
void SetTensorInfo(const TensorInfo &tensorInfo) override
Definition: Layer.cpp:58
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
Definition: Layer.hpp:318
ConstIterator cend() const
Returns const iterator pointing to the end of the list. Lowercase for range-based for loops...
Definition: Graph.hpp:174
OptimizeForType< Layer, AddBroadcastReshapeLayerImpl > AddBroadcastReshapeLayer
This layer represents a multiplication operation.

◆ BOOST_AUTO_TEST_CASE() [10/11]

BOOST_AUTO_TEST_CASE ( ReshapeParentConstLayerTest  )

Definition at line 288 of file AddBroadcastReshapeLayerTests.cpp.

References Graph::AddLayer(), Graph::cbegin(), Graph::cend(), CheckSequence(), OutputSlot::Connect(), GetFirstLayerWithName(), Layer::GetInputSlot(), Layer::GetOutputSlot(), armnn::MakeOptimizations(), Optimizer::Pass(), armnn::QAsymmU8, and OutputSlot::SetTensorInfo().

289 {
290  Graph graph;
291  const TensorInfo info0({ 1, 2, 3, 5 }, DataType::QAsymmU8);
292  const TensorInfo info1({ 5 }, DataType::QAsymmU8);
293  const TensorInfo outputInfo({ 1, 2, 3, 5 }, DataType::QAsymmU8);
294 
295  auto input = graph.AddLayer<InputLayer>(0, "input");
296  auto constant = graph.AddLayer<ConstantLayer>("constant");
297  auto mul = graph.AddLayer<MultiplicationLayer>("mul");
298  auto output = graph.AddLayer<OutputLayer>(0, "output");
299 
300  uint8_t tensor[] = { 1, 1, 1, 1, 1 };
301 
302  constant->m_LayerOutput = std::make_unique<ScopedTensorHandle>(ConstTensor(info1, &tensor));
303 
304  input->GetOutputSlot().SetTensorInfo(info0);
305  constant->GetOutputSlot().SetTensorInfo(info1);
306  mul->GetOutputSlot().SetTensorInfo(outputInfo);
307 
308  input->GetOutputSlot().Connect(mul->GetInputSlot(0));
309  constant->GetOutputSlot().Connect(mul->GetInputSlot(1));
310  mul->GetOutputSlot().Connect(output->GetInputSlot(0));
311 
312  BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(),
313  &IsLayerOfType<InputLayer>,
314  &IsLayerOfType<ConstantLayer>,
315  &IsLayerOfType<MultiplicationLayer>,
316  &IsLayerOfType<OutputLayer>));
317 
318  // Run optimizer
320 
321  // Broadcast reshape layer has not been added to the graph
322  BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(),
323  &IsLayerOfType<InputLayer>,
324  &IsLayerOfType<ConstantLayer>,
325  &IsLayerOfType<MultiplicationLayer>,
326  &IsLayerOfType<OutputLayer>));
327 
328  TensorShape expectedShape = TensorShape{ 1, 1, 1, 5 };
329  BOOST_TEST(constant->m_LayerOutput.get()->GetTensorInfo().GetShape() == expectedShape);
330 
331  BOOST_TEST(constant->m_LayerOutput.get()->GetTensorInfo().GetNumDimensions() == info0.GetNumDimensions());
332 
333  Layer* const reshapeLayer = GetFirstLayerWithName(graph, "Reshape_for:mul-0");
334  BOOST_TEST(!reshapeLayer);
335 }
A layer that the constant data can be bound to.
Optimizer::Optimizations MakeOptimizations(Args &&... args)
Definition: Optimizer.hpp:43
armnn::Layer * GetFirstLayerWithName(armnn::Graph &graph, const std::string &name)
Definition: GraphUtils.cpp:22
LayerT * AddLayer(Args &&... args)
Adds a new layer, of type LayerType, to the graph constructed with the arguments passed.
Definition: Graph.hpp:402
ConstIterator cbegin() const
Returns const iterator pointing to the beginning of the list. Lowercase for range-based for loops...
Definition: Graph.hpp:172
int Connect(InputSlot &destination)
Definition: Layer.cpp:83
static void Pass(Graph &graph, const Optimizations &optimizations)
Definition: Optimizer.cpp:16
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
Definition: Layer.hpp:316
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: OutputLayer.hpp:13
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:314
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: InputLayer.hpp:13
bool CheckSequence(const armnn::Graph::ConstIterator first, const armnn::Graph::ConstIterator last)
Definition: TestUtils.hpp:21
void SetTensorInfo(const TensorInfo &tensorInfo) override
Definition: Layer.cpp:58
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
Definition: Layer.hpp:318
ConstIterator cend() const
Returns const iterator pointing to the end of the list. Lowercase for range-based for loops...
Definition: Graph.hpp:174
OptimizeForType< Layer, AddBroadcastReshapeLayerImpl > AddBroadcastReshapeLayer
This layer represents a multiplication operation.

◆ BOOST_AUTO_TEST_CASE() [11/11]

BOOST_AUTO_TEST_CASE ( ReshapeParentConstAddLayerMultipleConnectionsTest  )

Definition at line 337 of file AddBroadcastReshapeLayerTests.cpp.

References Graph::AddLayer(), BOOST_AUTO_TEST_SUITE_END(), Graph::cbegin(), Graph::cend(), CheckSequence(), OutputSlot::Connect(), armnn::Float32, GetFirstLayerWithName(), Layer::GetInputSlot(), Layer::GetOutputSlot(), armnn::MakeOptimizations(), Optimizer::Pass(), and OutputSlot::SetTensorInfo().

338 {
339  // In this test case we recreate the situation where an Addition layer has
340  // a constant second term, e.g. [1,512] + [1]. The AddBroadcastReshapeLayer
341  // should modify the constant tensor info to match the number of dimensions.
342  // However, if this constant term is being reused elsewhere then we shouldn't
343  // modify it. Instead we insert a resize layer.
344 
345  // What we'll do is have two sequential add layers both using the same const tensor.
346  Graph graph;
347  const TensorInfo inputInfo({ 1, 512 }, DataType::Float32);
348  const TensorInfo constantTermInfo({ 1 }, DataType::Float32);
349  const TensorInfo outputInfo({ 1, 512 }, DataType::Float32);
350 
351  auto input = graph.AddLayer<InputLayer>(0, "input");
352  auto constant = graph.AddLayer<ConstantLayer>("constant");
353  auto add1 = graph.AddLayer<AdditionLayer>("add1");
354  auto add2 = graph.AddLayer<AdditionLayer>("add2");
355  auto output = graph.AddLayer<OutputLayer>(0, "output");
356 
357  input->GetOutputSlot().SetTensorInfo(inputInfo);
358  constant->GetOutputSlot().SetTensorInfo(constantTermInfo);
359  float tensor[] = { 2.0f };
360  constant->m_LayerOutput = std::make_unique<ScopedTensorHandle>(ConstTensor(constantTermInfo, &tensor));
361  add1->GetOutputSlot().SetTensorInfo(outputInfo);
362 
363  input->GetOutputSlot().Connect(add1->GetInputSlot(0));
364  constant->GetOutputSlot().Connect(add1->GetInputSlot(1));
365  add1->GetOutputSlot().Connect(add2->GetInputSlot(0));
366  add2->GetOutputSlot().Connect(output->GetInputSlot(0));
367  // This second connection should prevent the modification of the const output tensor.
368  constant->GetOutputSlot().Connect(add2->GetInputSlot(1));
369 
370  BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(),
371  &IsLayerOfType<InputLayer>,
372  &IsLayerOfType<ConstantLayer>,
373  &IsLayerOfType<AdditionLayer>,
374  &IsLayerOfType<AdditionLayer>,
375  &IsLayerOfType<OutputLayer>));
376 
377  // Run optimizer
379 
380  // Broadcast reshape should have been added before each addition layer.
381  BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(),
382  &IsLayerOfType<InputLayer>,
383  &IsLayerOfType<ConstantLayer>,
384  &IsLayerOfType<ReshapeLayer>,
385  &IsLayerOfType<ReshapeLayer>,
386  &IsLayerOfType<AdditionLayer>,
387  &IsLayerOfType<AdditionLayer>,
388  &IsLayerOfType<OutputLayer>));
389 
390  // Ensure the output shape of the constant hasn't changed.
391  BOOST_TEST(constant->m_LayerOutput.get()->GetTensorInfo().GetShape() == constantTermInfo.GetShape());
392  // There should be two extra reshape layers with appropriate names.
393  Layer* const reshapeLayer1 = GetFirstLayerWithName(graph, "Reshape_for:add1-1");
394  Layer* const reshapeLayer2 = GetFirstLayerWithName(graph, "Reshape_for:add2-1");
395  BOOST_TEST(reshapeLayer1);
396  BOOST_TEST(reshapeLayer2);
397 }
A layer that the constant data can be bound to.
Optimizer::Optimizations MakeOptimizations(Args &&... args)
Definition: Optimizer.hpp:43
armnn::Layer * GetFirstLayerWithName(armnn::Graph &graph, const std::string &name)
Definition: GraphUtils.cpp:22
LayerT * AddLayer(Args &&... args)
Adds a new layer, of type LayerType, to the graph constructed with the arguments passed.
Definition: Graph.hpp:402
ConstIterator cbegin() const
Returns const iterator pointing to the beginning of the list. Lowercase for range-based for loops...
Definition: Graph.hpp:172
int Connect(InputSlot &destination)
Definition: Layer.cpp:83
static void Pass(Graph &graph, const Optimizations &optimizations)
Definition: Optimizer.cpp:16
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
Definition: Layer.hpp:316
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: OutputLayer.hpp:13
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:314
This layer represents an addition operation.
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: InputLayer.hpp:13
bool CheckSequence(const armnn::Graph::ConstIterator first, const armnn::Graph::ConstIterator last)
Definition: TestUtils.hpp:21
void SetTensorInfo(const TensorInfo &tensorInfo) override
Definition: Layer.cpp:58
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
Definition: Layer.hpp:318
ConstIterator cend() const
Returns const iterator pointing to the end of the list. Lowercase for range-based for loops...
Definition: Graph.hpp:174
OptimizeForType< Layer, AddBroadcastReshapeLayerImpl > AddBroadcastReshapeLayer