ArmNN
 21.11
AddBroadcastReshapeLayerTests.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2020 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #include "../GraphUtils.hpp"
7 #include "../TestUtils.hpp"
8 
9 #include <Optimizer.hpp>
10 
11 #include <doctest/doctest.h>
12 
13 using namespace armnn;
14 
15 TEST_SUITE("Optimizer")
16 {
17 using namespace optimizations;
18 
19 void AddBroadcastReshapeLayerOptimizerTest(const TensorInfo& info0,
20  const TensorInfo& info1,
21  const TensorInfo& outputInfo,
22  const std::string& reshapeLayerName,
23  const TensorShape& expectedReshapeShape,
24  const DataType expectedDataType)
25 {
26  Graph graph;
27 
28  auto input0 = graph.AddLayer<InputLayer>(0, "input0");
29  auto input1 = graph.AddLayer<InputLayer>(1, "input1");
30  auto add = graph.AddLayer<AdditionLayer>("add");
31  auto output = graph.AddLayer<OutputLayer>(0, "output");
32  input0->GetOutputSlot().SetTensorInfo(info0);
33  input1->GetOutputSlot().SetTensorInfo(info1);
34  add->GetOutputSlot().SetTensorInfo(outputInfo);
35 
36  input0->GetOutputSlot().Connect(add->GetInputSlot(0));
37  input1->GetOutputSlot().Connect(add->GetInputSlot(1));
38  add->GetOutputSlot().Connect(output->GetInputSlot(0));
39 
40  CHECK(CheckSequence(graph.cbegin(), graph.cend(),
41  &IsLayerOfType<InputLayer>,
42  &IsLayerOfType<InputLayer>,
43  &IsLayerOfType<AdditionLayer>,
44  &IsLayerOfType<OutputLayer>));
45 
46  // Run optimizer
48 
49  // Broadcast reshape layer has been added to the graph correctly
50  CHECK(CheckSequence(graph.cbegin(), graph.cend(),
51  &IsLayerOfType<InputLayer>,
52  &IsLayerOfType<InputLayer>,
53  &IsLayerOfType<ReshapeLayer>,
54  &IsLayerOfType<AdditionLayer>,
55  &IsLayerOfType<OutputLayer>));
56 
57  Layer* const reshapeLayer = GetFirstLayerWithName(graph, reshapeLayerName);
58  CHECK(reshapeLayer);
59  auto addedReshapeTensorInfo = reshapeLayer->GetOutputSlot().GetTensorInfo();
60 
61  // Tensorshape and the data type are correct
62  CHECK((addedReshapeTensorInfo.GetShape() == expectedReshapeShape));
63  CHECK((addedReshapeTensorInfo.GetDataType() == expectedDataType));
64 }
65 
66 TEST_CASE("AddBroadcastReshapeLayerSimpleTest")
67 {
68  const TensorInfo info0({ 1, 2, 3, 5 }, DataType::Float32);
69  const TensorInfo info1({ 1 }, DataType::Float32);
70  AddBroadcastReshapeLayerOptimizerTest(info0, info1, info0, "Reshape_for:add-1",
71  TensorShape({ 1, 1, 1, 1 }),
73 }
74 
75 TEST_CASE("AddBroadcastReshapeLayer1DTest")
76 {
77  const TensorInfo info0({ 1, 2, 3, 5 }, DataType::Float32);
78  const TensorInfo info1({ 5 }, DataType::Float32);
79  const TensorInfo outputInfo({ 1, 1, 1, 5 }, DataType::Float32);
80  AddBroadcastReshapeLayerOptimizerTest(info0, info1, outputInfo, "Reshape_for:add-1",
81  TensorShape({ 1, 1, 1, 5 }),
83 }
84 
85 TEST_CASE("AddBroadcastReshapeLayer2DTest")
86 {
87  const TensorInfo info0({ 1, 2, 3, 5 }, DataType::Float32);
88  const TensorInfo info1({ 3, 5 }, DataType::Float32);
89  const TensorInfo outputInfo({ 1, 2, 3, 5 }, DataType::Float32);
90  AddBroadcastReshapeLayerOptimizerTest(info0, info1, outputInfo, "Reshape_for:add-1",
91  TensorShape({ 1, 1, 3, 5 }),
93 }
94 
95 TEST_CASE("AddBroadcastReshapeLayer3DTest")
96 {
97  const TensorInfo info0({ 2, 1, 1, 1 }, DataType::Float32);
98  const TensorInfo info1({ 3, 4, 5 }, DataType::Float32);
99  const TensorInfo outputInfo({ 2, 3, 4, 5 }, DataType::Float32);
100  AddBroadcastReshapeLayerOptimizerTest(info0, info1, outputInfo, "Reshape_for:add-1",
101  TensorShape({ 1, 3, 4, 5 }),
103 }
104 
105 TEST_CASE("AddBroadcastReshapeLayer3DMergedTest")
106 {
107  const TensorInfo info0({ 2, 3, 1, 1 }, DataType::Float32);
108  const TensorInfo info1({ 3, 4, 5 }, DataType::Float32);
109  const TensorInfo outputInfo({ 2, 3, 4, 5 }, DataType::Float32);
110  AddBroadcastReshapeLayerOptimizerTest(info0, info1, outputInfo, "Reshape_for:add-1",
111  TensorShape({ 1, 3, 4, 5 }),
113 }
114 
115 TEST_CASE("AddBroadcastReshapeLayerSubtractionTest")
116 {
117  Graph graph;
118  const TensorInfo info0({ 5 }, DataType::Float32);
119  const TensorInfo info1({ 1, 2, 3, 5 }, DataType::Float32);
120  const TensorInfo outputInfo({ 1, 2, 3, 5 }, DataType::Float32);
121 
122  auto input0 = graph.AddLayer<InputLayer>(0, "input0");
123  auto input1 = graph.AddLayer<InputLayer>(1, "input1");
124  auto sub = graph.AddLayer<SubtractionLayer>("sub");
125  auto output = graph.AddLayer<OutputLayer>(0, "output");
126  input0->GetOutputSlot().SetTensorInfo(info0);
127  input1->GetOutputSlot().SetTensorInfo(info1);
128  sub->GetOutputSlot().SetTensorInfo(outputInfo);
129 
130  input0->GetOutputSlot().Connect(sub->GetInputSlot(0));
131  input1->GetOutputSlot().Connect(sub->GetInputSlot(1));
132  sub->GetOutputSlot().Connect(output->GetInputSlot(0));
133 
134  CHECK(CheckSequence(graph.cbegin(), graph.cend(),
135  &IsLayerOfType<InputLayer>,
136  &IsLayerOfType<InputLayer>,
137  &IsLayerOfType<SubtractionLayer>,
138  &IsLayerOfType<OutputLayer>));
139 
140  // Run optimizer
142 
143  // Broadcast reshape layer has been added to the graph correctly
144  CHECK(CheckSequence(graph.cbegin(), graph.cend(),
145  &IsLayerOfType<InputLayer>,
146  &IsLayerOfType<InputLayer>,
147  &IsLayerOfType<ReshapeLayer>,
148  &IsLayerOfType<SubtractionLayer>,
149  &IsLayerOfType<OutputLayer>));
150 
151  Layer* const reshapeLayer = GetFirstLayerWithName(graph, "Reshape_for:sub-0");
152  CHECK(reshapeLayer);
153  auto addedReshapeTensorInfo = reshapeLayer->GetOutputSlot().GetTensorInfo();
154 
155  // Tensorshape and the data type are correct
156  CHECK((addedReshapeTensorInfo.GetShape() == TensorShape({ 1, 1, 1, 5 })));
157  CHECK((addedReshapeTensorInfo.GetDataType() == DataType::Float32));
158 }
159 
160 TEST_CASE("AddBroadcastReshapeLayerDivisionTest")
161 {
162  Graph graph;
163  const TensorInfo info0({ 1, 4, 5 }, DataType::QAsymmS8);
164  const TensorInfo info1({ 1, 2, 4, 5 }, DataType::QAsymmS8);
165  const TensorInfo outputInfo({ 1, 2, 4, 5 }, DataType::QAsymmS8);
166 
167  auto input0 = graph.AddLayer<InputLayer>(0, "input0");
168  auto input1 = graph.AddLayer<InputLayer>(1, "input1");
169  auto div = graph.AddLayer<DivisionLayer>("div");
170  auto output = graph.AddLayer<OutputLayer>(0, "output");
171  input0->GetOutputSlot().SetTensorInfo(info0);
172  input1->GetOutputSlot().SetTensorInfo(info1);
173  div->GetOutputSlot().SetTensorInfo(outputInfo);
174 
175  input0->GetOutputSlot().Connect(div->GetInputSlot(0));
176  input1->GetOutputSlot().Connect(div->GetInputSlot(1));
177  div->GetOutputSlot().Connect(output->GetInputSlot(0));
178 
179  CHECK(CheckSequence(graph.cbegin(), graph.cend(),
180  &IsLayerOfType<InputLayer>,
181  &IsLayerOfType<InputLayer>,
182  &IsLayerOfType<DivisionLayer>,
183  &IsLayerOfType<OutputLayer>));
184 
185  // Run optimizer
187 
188  // Broadcast reshape layer has been added to the graph correctly
189  CHECK(CheckSequence(graph.cbegin(), graph.cend(),
190  &IsLayerOfType<InputLayer>,
191  &IsLayerOfType<InputLayer>,
192  &IsLayerOfType<ReshapeLayer>,
193  &IsLayerOfType<DivisionLayer>,
194  &IsLayerOfType<OutputLayer>));
195 
196  Layer* const reshapeLayer = GetFirstLayerWithName(graph, "Reshape_for:div-0");
197  CHECK(reshapeLayer);
198  auto addedReshapeTensorInfo = reshapeLayer->GetOutputSlot().GetTensorInfo();
199 
200  // Tensorshape and the data type are correct
201  CHECK((addedReshapeTensorInfo.GetShape() == TensorShape({ 1, 1, 4, 5 })));
202  CHECK((addedReshapeTensorInfo.GetDataType() == DataType::QAsymmS8));
203 }
204 
205 TEST_CASE("AddBroadcastReshapeLayerMultiplicationTest")
206 {
207  Graph graph;
208  const TensorInfo info0({ 3, 5 }, DataType::QAsymmU8);
209  const TensorInfo info1({ 1, 2, 3, 5 }, DataType::QAsymmU8);
210  const TensorInfo outputInfo({ 1, 2, 3, 5 }, DataType::QAsymmU8);
211 
212  auto input0 = graph.AddLayer<InputLayer>(0, "input0");
213  auto input1 = graph.AddLayer<InputLayer>(1, "input1");
214  auto mul = graph.AddLayer<MultiplicationLayer>("mul");
215  auto output = graph.AddLayer<OutputLayer>(0, "output");
216  input0->GetOutputSlot().SetTensorInfo(info0);
217  input1->GetOutputSlot().SetTensorInfo(info1);
218  mul->GetOutputSlot().SetTensorInfo(outputInfo);
219 
220  input0->GetOutputSlot().Connect(mul->GetInputSlot(0));
221  input1->GetOutputSlot().Connect(mul->GetInputSlot(1));
222  mul->GetOutputSlot().Connect(output->GetInputSlot(0));
223 
224  CHECK(CheckSequence(graph.cbegin(), graph.cend(),
225  &IsLayerOfType<InputLayer>,
226  &IsLayerOfType<InputLayer>,
227  &IsLayerOfType<MultiplicationLayer>,
228  &IsLayerOfType<OutputLayer>));
229 
230  // Run optimizer
232 
233  // Broadcast reshape layer has been added to the graph correctly
234  CHECK(CheckSequence(graph.cbegin(), graph.cend(),
235  &IsLayerOfType<InputLayer>,
236  &IsLayerOfType<InputLayer>,
237  &IsLayerOfType<ReshapeLayer>,
238  &IsLayerOfType<MultiplicationLayer>,
239  &IsLayerOfType<OutputLayer>));
240 
241  Layer* const reshapeLayer = GetFirstLayerWithName(graph, "Reshape_for:mul-0");
242  CHECK(reshapeLayer);
243  auto addedReshapeTensorInfo = reshapeLayer->GetOutputSlot().GetTensorInfo();
244 
245  // Tensorshape and the data type are correct
246  CHECK((addedReshapeTensorInfo.GetShape() == TensorShape({ 1, 1, 3, 5 })));
247  CHECK((addedReshapeTensorInfo.GetDataType() == DataType::QAsymmU8));
248 }
249 
250 TEST_CASE("AddNoBroadcastReshapeLayerTest")
251 {
252  Graph graph;
253  const TensorInfo info0({ 1, 1, 1, 1 }, DataType::QAsymmU8);
254  const TensorInfo info1({ 1, 2, 3, 5 }, DataType::QAsymmU8);
255  const TensorInfo outputInfo({ 1, 2, 3, 5 }, DataType::QAsymmU8);
256 
257  auto input0 = graph.AddLayer<InputLayer>(0, "input0");
258  auto input1 = graph.AddLayer<InputLayer>(1, "input1");
259  auto mul = graph.AddLayer<MultiplicationLayer>("mul");
260  auto output = graph.AddLayer<OutputLayer>(0, "output");
261  input0->GetOutputSlot().SetTensorInfo(info0);
262  input1->GetOutputSlot().SetTensorInfo(info1);
263  mul->GetOutputSlot().SetTensorInfo(outputInfo);
264 
265  input0->GetOutputSlot().Connect(mul->GetInputSlot(0));
266  input1->GetOutputSlot().Connect(mul->GetInputSlot(1));
267  mul->GetOutputSlot().Connect(output->GetInputSlot(0));
268 
269  CHECK(CheckSequence(graph.cbegin(), graph.cend(),
270  &IsLayerOfType<InputLayer>,
271  &IsLayerOfType<InputLayer>,
272  &IsLayerOfType<MultiplicationLayer>,
273  &IsLayerOfType<OutputLayer>));
274 
275  // Run optimizer
277 
278  // Broadcast reshape layer has not been added to the graph
279  CHECK(CheckSequence(graph.cbegin(), graph.cend(),
280  &IsLayerOfType<InputLayer>,
281  &IsLayerOfType<InputLayer>,
282  &IsLayerOfType<MultiplicationLayer>,
283  &IsLayerOfType<OutputLayer>));
284 
285  Layer* const reshapeLayer = GetFirstLayerWithName(graph, "Reshape_for:mul-0");
286  CHECK(!reshapeLayer);
287 }
288 
289 TEST_CASE("ReshapeParentConstLayerTest")
290 {
291  Graph graph;
292  const TensorInfo info0({ 1, 2, 3, 5 }, DataType::QAsymmU8);
293  const TensorInfo info1({ 5 }, DataType::QAsymmU8, 0.0f, 0, true);
294  const TensorInfo outputInfo({ 1, 2, 3, 5 }, DataType::QAsymmU8);
295 
296  auto input = graph.AddLayer<InputLayer>(0, "input");
297  auto constant = graph.AddLayer<ConstantLayer>("constant");
298  auto mul = graph.AddLayer<MultiplicationLayer>("mul");
299  auto output = graph.AddLayer<OutputLayer>(0, "output");
300 
301  uint8_t tensor[] = { 1, 1, 1, 1, 1 };
302 
303  constant->m_LayerOutput = std::make_unique<ScopedTensorHandle>(ConstTensor(info1, &tensor));
304 
305  input->GetOutputSlot().SetTensorInfo(info0);
306  constant->GetOutputSlot().SetTensorInfo(info1);
307  mul->GetOutputSlot().SetTensorInfo(outputInfo);
308 
309  input->GetOutputSlot().Connect(mul->GetInputSlot(0));
310  constant->GetOutputSlot().Connect(mul->GetInputSlot(1));
311  mul->GetOutputSlot().Connect(output->GetInputSlot(0));
312 
313  CHECK(CheckSequence(graph.cbegin(), graph.cend(),
314  &IsLayerOfType<InputLayer>,
315  &IsLayerOfType<ConstantLayer>,
316  &IsLayerOfType<MultiplicationLayer>,
317  &IsLayerOfType<OutputLayer>));
318 
319  // Run optimizer
321 
322  // Broadcast reshape layer has not been added to the graph
323  CHECK(CheckSequence(graph.cbegin(), graph.cend(),
324  &IsLayerOfType<InputLayer>,
325  &IsLayerOfType<ConstantLayer>,
326  &IsLayerOfType<MultiplicationLayer>,
327  &IsLayerOfType<OutputLayer>));
328 
329  TensorShape expectedShape = TensorShape{ 1, 1, 1, 5 };
330  CHECK(constant->m_LayerOutput.get()->GetTensorInfo().GetShape() == expectedShape);
331 
332  CHECK(constant->m_LayerOutput.get()->GetTensorInfo().GetNumDimensions() == info0.GetNumDimensions());
333 
334  Layer* const reshapeLayer = GetFirstLayerWithName(graph, "Reshape_for:mul-0");
335  CHECK(!reshapeLayer);
336 }
337 
338 TEST_CASE("ReshapeParentConstAddLayerMultipleConnectionsTest")
339 {
340  // In this test case we recreate the situation where an Addition layer has
341  // a constant second term, e.g. [1,512] + [1]. The AddBroadcastReshapeLayer
342  // should modify the constant tensor info to match the number of dimensions.
343  // However, if this constant term is being reused elsewhere then we shouldn't
344  // modify it. Instead we insert a resize layer.
345 
346  // What we'll do is have two sequential add layers both using the same const tensor.
347  Graph graph;
348  const TensorInfo inputInfo({ 1, 512 }, DataType::Float32);
349  const TensorInfo constantTermInfo({ 1 }, DataType::Float32, 0.0f, 0, true);
350  const TensorInfo outputInfo({ 1, 512 }, DataType::Float32);
351 
352  auto input = graph.AddLayer<InputLayer>(0, "input");
353  auto constant = graph.AddLayer<ConstantLayer>("constant");
354  auto add1 = graph.AddLayer<AdditionLayer>("add1");
355  auto add2 = graph.AddLayer<AdditionLayer>("add2");
356  auto output = graph.AddLayer<OutputLayer>(0, "output");
357 
358  input->GetOutputSlot().SetTensorInfo(inputInfo);
359  constant->GetOutputSlot().SetTensorInfo(constantTermInfo);
360  float tensor[] = { 2.0f };
361  constant->m_LayerOutput = std::make_unique<ScopedTensorHandle>(ConstTensor(constantTermInfo, &tensor));
362  add1->GetOutputSlot().SetTensorInfo(outputInfo);
363 
364  input->GetOutputSlot().Connect(add1->GetInputSlot(0));
365  constant->GetOutputSlot().Connect(add1->GetInputSlot(1));
366  add1->GetOutputSlot().Connect(add2->GetInputSlot(0));
367  add2->GetOutputSlot().Connect(output->GetInputSlot(0));
368  // This second connection should prevent the modification of the const output tensor.
369  constant->GetOutputSlot().Connect(add2->GetInputSlot(1));
370 
371  CHECK(CheckSequence(graph.cbegin(), graph.cend(),
372  &IsLayerOfType<InputLayer>,
373  &IsLayerOfType<ConstantLayer>,
374  &IsLayerOfType<AdditionLayer>,
375  &IsLayerOfType<AdditionLayer>,
376  &IsLayerOfType<OutputLayer>));
377 
378  // Run optimizer
380 
381  // Broadcast reshape should have been added before each addition layer.
382  CHECK(CheckSequence(graph.cbegin(), graph.cend(),
383  &IsLayerOfType<InputLayer>,
384  &IsLayerOfType<ConstantLayer>,
385  &IsLayerOfType<ReshapeLayer>,
386  &IsLayerOfType<ReshapeLayer>,
387  &IsLayerOfType<AdditionLayer>,
388  &IsLayerOfType<AdditionLayer>,
389  &IsLayerOfType<OutputLayer>));
390 
391  // Ensure the output shape of the constant hasn't changed.
392  CHECK(constant->m_LayerOutput.get()->GetTensorInfo().GetShape() == constantTermInfo.GetShape());
393  // There should be two extra reshape layers with appropriate names.
394  Layer* const reshapeLayer1 = GetFirstLayerWithName(graph, "Reshape_for:add1-1");
395  Layer* const reshapeLayer2 = GetFirstLayerWithName(graph, "Reshape_for:add2-1");
396  CHECK(reshapeLayer1);
397  CHECK(reshapeLayer2);
398 }
399 
400 
401 
402 }
TEST_SUITE("TestConstTensorLayerVisitor")
A layer that the constant data can be bound to.
Optimizer::Optimizations MakeOptimizations(Args &&... args)
Definition: Optimizer.hpp:43
armnn::Layer * GetFirstLayerWithName(armnn::Graph &graph, const std::string &name)
Definition: GraphUtils.cpp:22
LayerT * AddLayer(Args &&... args)
Adds a new layer, of type LayerType, to the graph constructed with the arguments passed.
Definition: Graph.hpp:417
ConstIterator cbegin() const
Returns const iterator pointing to the beginning of the list. Lowercase for range-based for loops...
Definition: Graph.hpp:175
int Connect(InputSlot &destination)
Definition: Layer.cpp:83
static void Pass(Graph &graph, const Optimizations &optimizations)
Definition: Optimizer.cpp:16
Copyright (c) 2021 ARM Limited and Contributors.
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
Definition: Layer.hpp:316
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: OutputLayer.hpp:13
DataType
Definition: Types.hpp:35
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:327
This layer represents an addition operation.
This layer represents a subtraction operation.
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: InputLayer.hpp:13
bool CheckSequence(const armnn::Graph::ConstIterator first, const armnn::Graph::ConstIterator last)
Definition: TestUtils.hpp:21
void SetTensorInfo(const TensorInfo &tensorInfo) override
Definition: Layer.cpp:58
This layer represents a division operation.
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
Definition: Layer.hpp:318
ConstIterator cend() const
Returns const iterator pointing to the end of the list. Lowercase for range-based for loops...
Definition: Graph.hpp:177
OptimizeForType< Layer, AddBroadcastReshapeLayerImpl > AddBroadcastReshapeLayer
This layer represents a multiplication operation.
const TensorInfo & GetTensorInfo() const override
Definition: Layer.cpp:63
unsigned int GetNumDimensions() const
Definition: Tensor.hpp:195