ArmNN
 20.02
RefEndToEndTests.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
7 
24 
25 #include <boost/test/unit_test.hpp>
26 #include <boost/test/execution_monitor.hpp>
27 
28 BOOST_AUTO_TEST_SUITE(RefEndToEnd)
29 
31 
32 // Abs
33 BOOST_AUTO_TEST_CASE(RefAbsEndToEndTestFloat32)
34 {
35  std::vector<float> expectedOutput =
36  {
37  1.f, 1.f, 1.f, 1.f, 5.f, 5.f, 5.f, 5.f,
38  3.f, 3.f, 3.f, 3.f, 4.f, 4.f, 4.f, 4.f
39  };
40 
41  ElementwiseUnarySimpleEndToEnd<armnn::DataType::Float32>(defaultBackends,
42  UnaryOperation::Abs,
43  expectedOutput);
44 }
45 
46 BOOST_AUTO_TEST_CASE(RefAbsEndToEndTestUint8)
47 {
48  // Note the expected output will be implicitly quantized by the below test function
49  std::vector<float> expectedOutput =
50  {
51  1.f, 1.f, 1.f, 1.f, 5.f, 5.f, 5.f, 5.f,
52  3.f, 3.f, 3.f, 3.f, 4.f, 4.f, 4.f, 4.f
53  };
54 
55  ElementwiseUnarySimpleEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends,
56  UnaryOperation::Abs,
57  expectedOutput);
58 }
59 
60 BOOST_AUTO_TEST_CASE(RefAbsEndToEndTestInt16)
61 {
62  // Note the expected output will be implicitly quantized by the below test function
63  std::vector<float> expectedOutput =
64  {
65  1.f, 1.f, 1.f, 1.f, 5.f, 5.f, 5.f, 5.f,
66  3.f, 3.f, 3.f, 3.f, 4.f, 4.f, 4.f, 4.f
67  };
68 
69  ElementwiseUnarySimpleEndToEnd<armnn::DataType::QSymmS16>(defaultBackends,
70  UnaryOperation::Abs,
71  expectedOutput);
72 }
73 
74 // Constant
75 BOOST_AUTO_TEST_CASE(ConstantUsage_Ref_Float32)
76 {
77  BOOST_TEST(ConstantUsageFloat32Test(defaultBackends));
78 }
79 
80 BOOST_AUTO_TEST_CASE(ConstantUsage_Ref_Uint8)
81 {
82  BOOST_TEST(ConstantUsageUint8Test(defaultBackends));
83 }
84 
86 {
87  using namespace armnn;
88 
89  // Create runtime in which test will run
92 
93  // Builds up the structure of the network.
95 
96  IConnectableLayer* input = net->AddInputLayer(0, "input");
97  IConnectableLayer* softmax = net->AddSoftmaxLayer(SoftmaxDescriptor(), "softmax");
98  IConnectableLayer* output = net->AddOutputLayer(0, "output");
99 
100  input->GetOutputSlot(0).Connect(softmax->GetInputSlot(0));
101  softmax->GetOutputSlot(0).Connect(output->GetInputSlot(0));
102 
103  // Sets the tensors in the network.
104  TensorInfo inputTensorInfo(TensorShape({1, 5}), DataType::QAsymmU8);
105  inputTensorInfo.SetQuantizationOffset(100);
106  inputTensorInfo.SetQuantizationScale(10000.0f);
107  input->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
108 
109  TensorInfo outputTensorInfo(TensorShape({1, 5}), DataType::QAsymmU8);
110  outputTensorInfo.SetQuantizationOffset(0);
111  outputTensorInfo.SetQuantizationScale(1.0f/255.0f);
112  softmax->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
113 
114  // optimize the network
115  IOptimizedNetworkPtr optNet = Optimize(*net, defaultBackends, runtime->GetDeviceSpec());
116 
117  // Loads it into the runtime.
118  NetworkId netId;
119  auto error = runtime->LoadNetwork(netId, std::move(optNet));
120  BOOST_TEST(error == Status::Success);
121 
122  // Creates structures for input & output.
123  std::vector<uint8_t> inputData
124  {
125  1, 10, 3, 200, 5 // Some inputs - one of which is sufficiently larger than the others to saturate softmax.
126  };
127  std::vector<uint8_t> outputData(5);
128 
129  armnn::InputTensors inputTensors
130  {
131  {0, armnn::ConstTensor(runtime->GetInputTensorInfo(netId, 0), inputData.data())}
132  };
133  armnn::OutputTensors outputTensors
134  {
135  {0, armnn::Tensor(runtime->GetOutputTensorInfo(netId, 0), outputData.data())}
136  };
137 
138  // Does the inference.
139  runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
140 
141  // Checks the results.
142  BOOST_TEST(outputData[0] == 0);
143  BOOST_TEST(outputData[1] == 0);
144  BOOST_TEST(outputData[2] == 0);
145  BOOST_TEST(outputData[3] == 255); // softmax has been saturated.
146  BOOST_TEST(outputData[4] == 0);
147 }
148 
150 {
151  // This test was designed to match "AddTwo" in android nn/runtime/test/TestTrivialModel.cpp.
152 
153  using namespace armnn;
154 
155  // Create runtime in which test will run
158 
159  // Builds up the structure of the network.
161 
162  IConnectableLayer* input1 = net->AddInputLayer(0);
163  IConnectableLayer* input2 = net->AddInputLayer(1);
164  IConnectableLayer* add = net->AddAdditionLayer();
165  IConnectableLayer* output = net->AddOutputLayer(0);
166 
167  input1->GetOutputSlot(0).Connect(add->GetInputSlot(0));
168  input2->GetOutputSlot(0).Connect(add->GetInputSlot(1));
169  add->GetOutputSlot(0).Connect(output->GetInputSlot(0));
170 
171  // Sets the tensors in the network.
172  TensorInfo tensorInfo(TensorShape({3, 4}), DataType::Float32);
173  input1->GetOutputSlot(0).SetTensorInfo(tensorInfo);
174  input2->GetOutputSlot(0).SetTensorInfo(tensorInfo);
175  add->GetOutputSlot(0).SetTensorInfo(tensorInfo);
176 
177  // optimize the network
178  IOptimizedNetworkPtr optNet = Optimize(*net, defaultBackends, runtime->GetDeviceSpec());
179 
180  // Loads it into the runtime.
181  NetworkId netId;
182  runtime->LoadNetwork(netId, std::move(optNet));
183 
184  // Creates structures for input & output - matching android nn test.
185  std::vector<float> input1Data
186  {
187  1.f, 2.f, 3.f, 4.f, 5.f, 6.f, 7.f, 8.f, 9.f, 10.f, 11.f, 12.f
188  };
189  std::vector<float> input2Data
190  {
191  100.f, 200.f, 300.f, 400.f, 500.f, 600.f, 700.f, 800.f, 900.f, 1000.f, 1100.f, 1200.f
192  };
193  std::vector<float> outputData(12);
194 
195  InputTensors inputTensors
196  {
197  {0,armnn::ConstTensor(runtime->GetInputTensorInfo(netId, 0), input1Data.data())},
198  {1,armnn::ConstTensor(runtime->GetInputTensorInfo(netId, 0), input2Data.data())}
199  };
200  OutputTensors outputTensors
201  {
202  {0,armnn::Tensor(runtime->GetOutputTensorInfo(netId, 0), outputData.data())}
203  };
204 
205  // Does the inference.
206  runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
207 
208  // Checks the results
209  BOOST_TEST(outputData[0] == 101);
210  BOOST_TEST(outputData[1] == 202);
211  BOOST_TEST(outputData[2] == 303);
212  BOOST_TEST(outputData[3] == 404);
213  BOOST_TEST(outputData[4] == 505);
214  BOOST_TEST(outputData[5] == 606);
215  BOOST_TEST(outputData[6] == 707);
216  BOOST_TEST(outputData[7] == 808);
217  BOOST_TEST(outputData[8] == 909);
218  BOOST_TEST(outputData[9] == 1010);
219  BOOST_TEST(outputData[10] == 1111);
220  BOOST_TEST(outputData[11] == 1212);
221 }
222 
223 BOOST_AUTO_TEST_CASE(MultipleOutputs)
224 {
225  using namespace armnn;
226 
227  // Create runtime in which test will run
230 
231  // Builds up the structure of the network.
233 
234  IConnectableLayer* input = net->AddInputLayer(0);
235 
236  // ReLu1
237  ActivationDescriptor activation1Descriptor;
238  activation1Descriptor.m_Function = ActivationFunction::BoundedReLu;
239  activation1Descriptor.m_A = 1.f;
240  activation1Descriptor.m_B = -1.f;
241  IConnectableLayer* activation1 = net->AddActivationLayer(activation1Descriptor);
242 
243  // ReLu6
244  ActivationDescriptor activation2Descriptor;
245  activation2Descriptor.m_Function = ActivationFunction::BoundedReLu;
246  activation2Descriptor.m_A = 6.0f;
247  IConnectableLayer* activation2 = net->AddActivationLayer(activation2Descriptor);
248 
249  // BoundedReLu(min=2, max=5)
250  ActivationDescriptor activation3Descriptor;
251  activation3Descriptor.m_Function = ActivationFunction::BoundedReLu;
252  activation3Descriptor.m_A = 5.0f;
253  activation3Descriptor.m_B = 2.0f;
254  IConnectableLayer* activation3 = net->AddActivationLayer(activation3Descriptor);
255 
256  IConnectableLayer* output1 = net->AddOutputLayer(0);
257  IConnectableLayer* output2 = net->AddOutputLayer(1);
258  IConnectableLayer* output3 = net->AddOutputLayer(2);
259 
260  input->GetOutputSlot(0).Connect(activation1->GetInputSlot(0));
261  input->GetOutputSlot(0).Connect(activation2->GetInputSlot(0));
262  input->GetOutputSlot(0).Connect(activation3->GetInputSlot(0));
263 
264  activation1->GetOutputSlot(0).Connect(output1->GetInputSlot(0));
265  activation2->GetOutputSlot(0).Connect(output2->GetInputSlot(0));
266  activation3->GetOutputSlot(0).Connect(output3->GetInputSlot(0));
267 
268  // Sets the tensors in the network.
269  TensorInfo tensorInfo(TensorShape({ 10 }), DataType::Float32);
270  input->GetOutputSlot(0).SetTensorInfo(tensorInfo);
271  activation1->GetOutputSlot(0).SetTensorInfo(tensorInfo);
272  activation2->GetOutputSlot(0).SetTensorInfo(tensorInfo);
273  activation3->GetOutputSlot(0).SetTensorInfo(tensorInfo);
274 
275  // optimize the network
276  IOptimizedNetworkPtr optNet = Optimize(*net, defaultBackends, runtime->GetDeviceSpec());
277 
278  // Loads it into the runtime.
279  NetworkId netId;
280  runtime->LoadNetwork(netId, std::move(optNet));
281 
282  // Creates structures for input & output.
283  const std::vector<float> inputData{ 3.f, 5.f, 2.f, 3.f, 7.f, 0.f, -2.f, -1.f, 3.f, 3.f };
284 
285  std::vector<float> output1Data(inputData.size());
286  std::vector<float> output2Data(inputData.size());
287  std::vector<float> output3Data(inputData.size());
288 
289  InputTensors inputTensors
290  {
291  {0,armnn::ConstTensor(runtime->GetInputTensorInfo(netId, 0), inputData.data())}
292  };
293  OutputTensors outputTensors
294  {
295  {0,armnn::Tensor(runtime->GetOutputTensorInfo(netId, 0), output1Data.data())},
296  {1,armnn::Tensor(runtime->GetOutputTensorInfo(netId, 1), output2Data.data())},
297  {2,armnn::Tensor(runtime->GetOutputTensorInfo(netId, 2), output3Data.data())}
298  };
299 
300  // Does the inference.
301  runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
302 
303  // Checks the results.
304  BOOST_TEST(output1Data == std::vector<float>({ 1.f, 1.f, 1.f, 1.f, 1.f, 0.f, -1.f, -1.f, 1.f, 1.f })); // ReLu1
305  BOOST_TEST(output2Data == std::vector<float>({ 3.f, 5.f, 2.f, 3.f, 6.f, 0.f, 0.f, 0.f, 3.f, 3.f })); // ReLu6
306  BOOST_TEST(output3Data == std::vector<float>({ 3.f, 5.f, 2.f, 3.f, 5.f, 2.f, 2.f, 2.f, 3.f, 3.f })); // [2, 5]
307 }
308 
310 {
311  using namespace armnn;
312 
313  // Create runtime in which test will run
316 
317  // Builds up the structure of the network.
319 
320  IConnectableLayer* input1 = net->AddInputLayer(0);
321  IConnectableLayer* input2 = net->AddInputLayer(1);
322  IConnectableLayer* min = net->AddMinimumLayer();
323  IConnectableLayer* output = net->AddOutputLayer(0);
324 
325  input1->GetOutputSlot(0).Connect(min->GetInputSlot(0));
326  input2->GetOutputSlot(0).Connect(min->GetInputSlot(1));
327  min->GetOutputSlot(0).Connect(output->GetInputSlot(0));
328 
329  // Sets the tensors in the network.
330  TensorInfo tensorInfo(TensorShape({1, 1, 1, 4}), DataType::Float32);
331  input1->GetOutputSlot(0).SetTensorInfo(tensorInfo);
332  input2->GetOutputSlot(0).SetTensorInfo(tensorInfo);
333  min->GetOutputSlot(0).SetTensorInfo(tensorInfo);
334 
335  // optimize the network
336  IOptimizedNetworkPtr optNet = Optimize(*net, defaultBackends, runtime->GetDeviceSpec());
337 
338  // Loads it into the runtime.
339  NetworkId netId;
340  runtime->LoadNetwork(netId, std::move(optNet));
341 
342  // Creates structures for input & output - matching android nn test.
343  std::vector<float> input1Data
344  {
345  1.0f, 2.0f, 3.0f, 4.0f
346  };
347  std::vector<float> input2Data
348  {
349  2.0f, 1.0f, 5.0f, 2.0f
350  };
351  std::vector<float> outputData(4);
352 
353  InputTensors inputTensors
354  {
355  {0,armnn::ConstTensor(runtime->GetInputTensorInfo(netId, 0), input1Data.data())},
356  {1,armnn::ConstTensor(runtime->GetInputTensorInfo(netId, 0), input2Data.data())}
357  };
358  OutputTensors outputTensors
359  {
360  {0,armnn::Tensor(runtime->GetOutputTensorInfo(netId, 0), outputData.data())}
361  };
362 
363  // Does the inference.
364  runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
365 
366  // Checks the results
367  BOOST_TEST(outputData[0] == 1);
368  BOOST_TEST(outputData[1] == 1);
369  BOOST_TEST(outputData[2] == 3);
370  BOOST_TEST(outputData[3] == 2);
371 }
372 
373 BOOST_AUTO_TEST_CASE(RefEqualSimpleEndToEndTest)
374 {
375  const std::vector<uint8_t> expectedOutput({ 1, 1, 1, 1, 0, 0, 0, 0,
376  0, 0, 0, 0, 1, 1, 1, 1 });
377 
378  ComparisonSimpleEndToEnd<armnn::DataType::Float32>(defaultBackends,
380  expectedOutput);
381 }
382 
383 BOOST_AUTO_TEST_CASE(RefGreaterSimpleEndToEndTest)
384 {
385  const std::vector<uint8_t> expectedOutput({ 0, 0, 0, 0, 1, 1, 1, 1,
386  0, 0, 0, 0, 0, 0, 0, 0 });
387 
388  ComparisonSimpleEndToEnd<armnn::DataType::Float32>(defaultBackends,
390  expectedOutput);
391 }
392 
393 BOOST_AUTO_TEST_CASE(RefEqualSimpleEndToEndUint8Test)
394 {
395  const std::vector<uint8_t> expectedOutput({ 1, 1, 1, 1, 0, 0, 0, 0,
396  0, 0, 0, 0, 1, 1, 1, 1 });
397 
398  ComparisonSimpleEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends,
400  expectedOutput);
401 }
402 
403 BOOST_AUTO_TEST_CASE(RefGreaterSimpleEndToEndUint8Test)
404 {
405  const std::vector<uint8_t> expectedOutput({ 0, 0, 0, 0, 1, 1, 1, 1,
406  0, 0, 0, 0, 0, 0, 0, 0 });
407 
408  ComparisonSimpleEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends,
410  expectedOutput);
411 }
412 
413 BOOST_AUTO_TEST_CASE(RefEqualBroadcastEndToEndTest)
414 {
415  const std::vector<uint8_t> expectedOutput({ 1, 0, 1, 1, 0, 0,
416  0, 0, 0, 0, 0, 0 });
417 
418  ComparisonBroadcastEndToEnd<armnn::DataType::Float32>(defaultBackends,
420  expectedOutput);
421 }
422 
423 BOOST_AUTO_TEST_CASE(RefGreaterBroadcastEndToEndTest)
424 {
425  const std::vector<uint8_t> expectedOutput({ 0, 1, 0, 0, 0, 1,
426  1, 1, 1, 1, 1, 1 });
427 
428  ComparisonBroadcastEndToEnd<armnn::DataType::Float32>(defaultBackends,
430  expectedOutput);
431 }
432 
433 BOOST_AUTO_TEST_CASE(RefEqualBroadcastEndToEndUint8Test)
434 {
435  const std::vector<uint8_t > expectedOutput({ 1, 0, 1, 1, 0, 0,
436  0, 0, 0, 0, 0, 0 });
437 
438  ComparisonBroadcastEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends,
440  expectedOutput);
441 }
442 
443 BOOST_AUTO_TEST_CASE(RefGreaterBroadcastEndToEndUint8Test)
444 {
445  const std::vector<uint8_t> expectedOutput({ 0, 1, 0, 0, 0, 1,
446  1, 1, 1, 1, 1, 1 });
447 
448  ComparisonBroadcastEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends,
450  expectedOutput);
451 }
452 
453 BOOST_AUTO_TEST_CASE(RefBatchToSpaceNdEndToEndFloat32NHWCTest)
454 {
455  BatchToSpaceNdEndToEnd<armnn::DataType::Float32>(defaultBackends, armnn::DataLayout::NHWC);
456 }
457 
458 BOOST_AUTO_TEST_CASE(RefBatchToSpaceNdEndToEndUint8NHWCTest)
459 {
460  BatchToSpaceNdEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends, armnn::DataLayout::NHWC);
461 }
462 
463 BOOST_AUTO_TEST_CASE(RefBatchToSpaceNdEndToEndQSymm16NHWCTest)
464 {
465  BatchToSpaceNdEndToEnd<armnn::DataType::QSymmS16>(defaultBackends, armnn::DataLayout::NHWC);
466 }
467 
468 BOOST_AUTO_TEST_CASE(RefBatchToSpaceNdEndToEndFloat32NCHWTest)
469 {
470  BatchToSpaceNdEndToEnd<armnn::DataType::Float32>(defaultBackends, armnn::DataLayout::NCHW);
471 }
472 
473 BOOST_AUTO_TEST_CASE(RefBatchToSpaceNdEndToEndUint8NCHWTest)
474 {
475  BatchToSpaceNdEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends, armnn::DataLayout::NCHW);
476 }
477 
478 BOOST_AUTO_TEST_CASE(RefBatchToSpaceNdEndToEndQSymm16NCHWTest)
479 {
480  BatchToSpaceNdEndToEnd<armnn::DataType::QSymmS16>(defaultBackends, armnn::DataLayout::NCHW);
481 }
482 
483 BOOST_AUTO_TEST_CASE(RefBatchToSpaceNdEndToEndComplexFloat32NHWCTest)
484 {
485  BatchToSpaceNdComplexEndToEnd<armnn::DataType::Float32>(defaultBackends, armnn::DataLayout::NHWC);
486 }
487 
488 BOOST_AUTO_TEST_CASE(RefBatchToSpaceNdEndToEndComplexUint8NHWCTest)
489 {
490  BatchToSpaceNdComplexEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends, armnn::DataLayout::NHWC);
491 }
492 
493 BOOST_AUTO_TEST_CASE(RefBatchToSpaceNdEndToEndComplexQSymm16NHWCTest)
494 {
495  BatchToSpaceNdComplexEndToEnd<armnn::DataType::QSymmS16>(defaultBackends, armnn::DataLayout::NHWC);
496 }
497 
498 BOOST_AUTO_TEST_CASE(RefBatchToSpaceNdEndToEndComplexFloat32NCHWTest)
499 {
500  BatchToSpaceNdComplexEndToEnd<armnn::DataType::Float32>(defaultBackends, armnn::DataLayout::NCHW);
501 }
502 
503 BOOST_AUTO_TEST_CASE(RefBatchToSpaceNdEndToEndComplexUint8NCHWTest)
504 {
505  BatchToSpaceNdComplexEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends, armnn::DataLayout::NCHW);
506 }
507 
508 BOOST_AUTO_TEST_CASE(RefBatchToSpaceNdEndToEndComplexQSymm16NCHWTest)
509 {
510  BatchToSpaceNdComplexEndToEnd<armnn::DataType::QSymmS16>(defaultBackends, armnn::DataLayout::NCHW);
511 }
512 
513 BOOST_AUTO_TEST_CASE(RefConcatEndToEndDim0Test)
514 {
515  ConcatDim0EndToEnd<armnn::DataType::Float32>(defaultBackends);
516 }
517 
518 BOOST_AUTO_TEST_CASE(RefConcatEndToEndDim0Uint8Test)
519 {
520  ConcatDim0EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
521 }
522 
523 BOOST_AUTO_TEST_CASE(RefConcatEndToEndDim1Test)
524 {
525  ConcatDim1EndToEnd<armnn::DataType::Float32>(defaultBackends);
526 }
527 
528 BOOST_AUTO_TEST_CASE(RefConcatEndToEndDim1Uint8Test)
529 {
530  ConcatDim1EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
531 }
532 
533 BOOST_AUTO_TEST_CASE(RefConcatEndToEndDim2Test)
534 {
535  ConcatDim2EndToEnd<armnn::DataType::Float32>(defaultBackends);
536 }
537 
538 BOOST_AUTO_TEST_CASE(RefConcatEndToEndDim2Uint8Test)
539 {
540  ConcatDim2EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
541 }
542 
543 BOOST_AUTO_TEST_CASE(RefConcatEndToEndDim3Test)
544 {
545  ConcatDim3EndToEnd<armnn::DataType::Float32>(defaultBackends);
546 }
547 
548 BOOST_AUTO_TEST_CASE(RefConcatEndToEndDim3Uint8Test)
549 {
550  ConcatDim3EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
551 }
552 
553 BOOST_AUTO_TEST_CASE(RefGatherFloatTest)
554 {
555  GatherEndToEnd<armnn::DataType::Float32>(defaultBackends);
556 }
557 
558 BOOST_AUTO_TEST_CASE(RefGatherUint8Test)
559 {
560  GatherEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
561 }
562 
563 BOOST_AUTO_TEST_CASE(RefGatherInt16Test)
564 {
565  GatherEndToEnd<armnn::DataType::QSymmS16>(defaultBackends);
566 }
567 
568 BOOST_AUTO_TEST_CASE(RefGatherMultiDimFloatTest)
569 {
570  GatherMultiDimEndToEnd<armnn::DataType::Float32>(defaultBackends);
571 }
572 
573 BOOST_AUTO_TEST_CASE(RefGatherMultiDimUint8Test)
574 {
575  GatherMultiDimEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
576 }
577 
578 BOOST_AUTO_TEST_CASE(RefGatherMultiDimInt16Test)
579 {
580  GatherMultiDimEndToEnd<armnn::DataType::QSymmS16>(defaultBackends);
581 }
582 
583 // DepthToSpace
584 BOOST_AUTO_TEST_CASE(DephtToSpaceEndToEndNchwFloat32)
585 {
586  DepthToSpaceEndToEnd<armnn::DataType::Float32>(defaultBackends, armnn::DataLayout::NCHW);
587 }
588 
589 BOOST_AUTO_TEST_CASE(DephtToSpaceEndToEndNchwFloat16)
590 {
591  DepthToSpaceEndToEnd<armnn::DataType::Float16>(defaultBackends, armnn::DataLayout::NCHW);
592 }
593 
594 BOOST_AUTO_TEST_CASE(DephtToSpaceEndToEndNchwUint8)
595 {
596  DepthToSpaceEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends, armnn::DataLayout::NCHW);
597 }
598 
599 BOOST_AUTO_TEST_CASE(DephtToSpaceEndToEndNchwInt16)
600 {
601  DepthToSpaceEndToEnd<armnn::DataType::QSymmS16>(defaultBackends, armnn::DataLayout::NCHW);
602 }
603 
604 BOOST_AUTO_TEST_CASE(DephtToSpaceEndToEndNhwcFloat32)
605 {
606  DepthToSpaceEndToEnd<armnn::DataType::Float32>(defaultBackends, armnn::DataLayout::NHWC);
607 }
608 
609 BOOST_AUTO_TEST_CASE(DephtToSpaceEndToEndNhwcFloat16)
610 {
611  DepthToSpaceEndToEnd<armnn::DataType::Float16>(defaultBackends, armnn::DataLayout::NHWC);
612 }
613 
614 BOOST_AUTO_TEST_CASE(DephtToSpaceEndToEndNhwcUint8)
615 {
616  DepthToSpaceEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends, armnn::DataLayout::NHWC);
617 }
618 
619 BOOST_AUTO_TEST_CASE(DephtToSpaceEndToEndNhwcInt16)
620 {
621  DepthToSpaceEndToEnd<armnn::DataType::QSymmS16>(defaultBackends, armnn::DataLayout::NHWC);
622 }
623 
624 // Dequantize
625 BOOST_AUTO_TEST_CASE(DequantizeEndToEndSimpleTest)
626 {
627  DequantizeEndToEndSimple<armnn::DataType::QAsymmU8>(defaultBackends);
628 }
629 
630 BOOST_AUTO_TEST_CASE(DequantizeEndToEndOffsetTest)
631 {
632  DequantizeEndToEndOffset<armnn::DataType::QAsymmU8>(defaultBackends);
633 }
634 
635 BOOST_AUTO_TEST_CASE(DequantizeEndToEndSimpleInt16Test)
636 {
637  DequantizeEndToEndSimple<armnn::DataType::QSymmS16>(defaultBackends);
638 }
639 
640 BOOST_AUTO_TEST_CASE(DequantizeEndToEndOffsetInt16Test)
641 {
642  DequantizeEndToEndOffset<armnn::DataType::QSymmS16>(defaultBackends);
643 }
644 
645 BOOST_AUTO_TEST_CASE(RefDetectionPostProcessRegularNmsTest)
646 {
647  std::vector<float> boxEncodings({
648  0.0f, 0.0f, 0.0f, 0.0f,
649  0.0f, 1.0f, 0.0f, 0.0f,
650  0.0f, -1.0f, 0.0f, 0.0f,
651  0.0f, 0.0f, 0.0f, 0.0f,
652  0.0f, 1.0f, 0.0f, 0.0f,
653  0.0f, 0.0f, 0.0f, 0.0f
654  });
655  std::vector<float> scores({
656  0.0f, 0.9f, 0.8f,
657  0.0f, 0.75f, 0.72f,
658  0.0f, 0.6f, 0.5f,
659  0.0f, 0.93f, 0.95f,
660  0.0f, 0.5f, 0.4f,
661  0.0f, 0.3f, 0.2f
662  });
663  std::vector<float> anchors({
664  0.5f, 0.5f, 1.0f, 1.0f,
665  0.5f, 0.5f, 1.0f, 1.0f,
666  0.5f, 0.5f, 1.0f, 1.0f,
667  0.5f, 10.5f, 1.0f, 1.0f,
668  0.5f, 10.5f, 1.0f, 1.0f,
669  0.5f, 100.5f, 1.0f, 1.0f
670  });
671  DetectionPostProcessRegularNmsEndToEnd<armnn::DataType::Float32>(defaultBackends, boxEncodings, scores, anchors);
672 }
673 
674 inline void QuantizeData(uint8_t* quant, const float* dequant, const TensorInfo& info)
675 {
676  for (size_t i = 0; i < info.GetNumElements(); i++)
677  {
678  quant[i] = armnn::Quantize<uint8_t>(dequant[i], info.GetQuantizationScale(), info.GetQuantizationOffset());
679  }
680 }
681 
682 BOOST_AUTO_TEST_CASE(RefDetectionPostProcessRegularNmsUint8Test)
683 {
684  armnn::TensorInfo boxEncodingsInfo({ 1, 6, 4 }, armnn::DataType::Float32);
687 
688  boxEncodingsInfo.SetQuantizationScale(1.0f);
689  boxEncodingsInfo.SetQuantizationOffset(1);
694 
695  std::vector<float> boxEncodings({
696  0.0f, 0.0f, 0.0f, 0.0f,
697  0.0f, 1.0f, 0.0f, 0.0f,
698  0.0f, -1.0f, 0.0f, 0.0f,
699  0.0f, 0.0f, 0.0f, 0.0f,
700  0.0f, 1.0f, 0.0f, 0.0f,
701  0.0f, 0.0f, 0.0f, 0.0f
702  });
703  std::vector<float> scores({
704  0.0f, 0.9f, 0.8f,
705  0.0f, 0.75f, 0.72f,
706  0.0f, 0.6f, 0.5f,
707  0.0f, 0.93f, 0.95f,
708  0.0f, 0.5f, 0.4f,
709  0.0f, 0.3f, 0.2f
710  });
711  std::vector<float> anchors({
712  0.5f, 0.5f, 1.0f, 1.0f,
713  0.5f, 0.5f, 1.0f, 1.0f,
714  0.5f, 0.5f, 1.0f, 1.0f,
715  0.5f, 10.5f, 1.0f, 1.0f,
716  0.5f, 10.5f, 1.0f, 1.0f,
717  0.5f, 100.5f, 1.0f, 1.0f
718  });
719 
720  std::vector<uint8_t> qBoxEncodings(boxEncodings.size(), 0);
721  std::vector<uint8_t> qScores(scores.size(), 0);
722  std::vector<uint8_t> qAnchors(anchors.size(), 0);
723  QuantizeData(qBoxEncodings.data(), boxEncodings.data(), boxEncodingsInfo);
724  QuantizeData(qScores.data(), scores.data(), scoresInfo);
725  QuantizeData(qAnchors.data(), anchors.data(), anchorsInfo);
726  DetectionPostProcessRegularNmsEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends, qBoxEncodings,
727  qScores, qAnchors,
728  1.0f, 1, 0.01f, 0, 0.5f, 0);
729 }
730 
731 BOOST_AUTO_TEST_CASE(RefDetectionPostProcessFastNmsTest)
732 {
733  std::vector<float> boxEncodings({
734  0.0f, 0.0f, 0.0f, 0.0f,
735  0.0f, 1.0f, 0.0f, 0.0f,
736  0.0f, -1.0f, 0.0f, 0.0f,
737  0.0f, 0.0f, 0.0f, 0.0f,
738  0.0f, 1.0f, 0.0f, 0.0f,
739  0.0f, 0.0f, 0.0f, 0.0f
740  });
741  std::vector<float> scores({
742  0.0f, 0.9f, 0.8f,
743  0.0f, 0.75f, 0.72f,
744  0.0f, 0.6f, 0.5f,
745  0.0f, 0.93f, 0.95f,
746  0.0f, 0.5f, 0.4f,
747  0.0f, 0.3f, 0.2f
748  });
749  std::vector<float> anchors({
750  0.5f, 0.5f, 1.0f, 1.0f,
751  0.5f, 0.5f, 1.0f, 1.0f,
752  0.5f, 0.5f, 1.0f, 1.0f,
753  0.5f, 10.5f, 1.0f, 1.0f,
754  0.5f, 10.5f, 1.0f, 1.0f,
755  0.5f, 100.5f, 1.0f, 1.0f
756  });
757  DetectionPostProcessFastNmsEndToEnd<armnn::DataType::Float32>(defaultBackends, boxEncodings, scores, anchors);
758 }
759 
760 BOOST_AUTO_TEST_CASE(RefDetectionPostProcessFastNmsUint8Test)
761 {
762  armnn::TensorInfo boxEncodingsInfo({ 1, 6, 4 }, armnn::DataType::Float32);
765 
766  boxEncodingsInfo.SetQuantizationScale(1.0f);
767  boxEncodingsInfo.SetQuantizationOffset(1);
772 
773  std::vector<float> boxEncodings({
774  0.0f, 0.0f, 0.0f, 0.0f,
775  0.0f, 1.0f, 0.0f, 0.0f,
776  0.0f, -1.0f, 0.0f, 0.0f,
777  0.0f, 0.0f, 0.0f, 0.0f,
778  0.0f, 1.0f, 0.0f, 0.0f,
779  0.0f, 0.0f, 0.0f, 0.0f
780  });
781  std::vector<float> scores({
782  0.0f, 0.9f, 0.8f,
783  0.0f, 0.75f, 0.72f,
784  0.0f, 0.6f, 0.5f,
785  0.0f, 0.93f, 0.95f,
786  0.0f, 0.5f, 0.4f,
787  0.0f, 0.3f, 0.2f
788  });
789  std::vector<float> anchors({
790  0.5f, 0.5f, 1.0f, 1.0f,
791  0.5f, 0.5f, 1.0f, 1.0f,
792  0.5f, 0.5f, 1.0f, 1.0f,
793  0.5f, 10.5f, 1.0f, 1.0f,
794  0.5f, 10.5f, 1.0f, 1.0f,
795  0.5f, 100.5f, 1.0f, 1.0f
796  });
797 
798  std::vector<uint8_t> qBoxEncodings(boxEncodings.size(), 0);
799  std::vector<uint8_t> qScores(scores.size(), 0);
800  std::vector<uint8_t> qAnchors(anchors.size(), 0);
801  QuantizeData(qBoxEncodings.data(), boxEncodings.data(), boxEncodingsInfo);
802  QuantizeData(qScores.data(), scores.data(), scoresInfo);
803  QuantizeData(qAnchors.data(), anchors.data(), anchorsInfo);
804  DetectionPostProcessFastNmsEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends, qBoxEncodings,
805  qScores, qAnchors,
806  1.0f, 1, 0.01f, 0, 0.5f, 0);
807 }
808 
809 // LogSoftmax
810 BOOST_AUTO_TEST_CASE(RefLogSoftmaxEndToEndTest)
811 {
813 }
814 
815 BOOST_AUTO_TEST_CASE(RefPreluEndToEndTestFloat32)
816 {
817  PreluEndToEndNegativeTest<armnn::DataType::Float32>(defaultBackends);
818 }
819 
820 BOOST_AUTO_TEST_CASE(RefPreluEndToEndTestUint8)
821 {
822  PreluEndToEndPositiveTest<armnn::DataType::QAsymmU8>(defaultBackends);
823 }
824 
825 BOOST_AUTO_TEST_CASE(RefPreluEndToEndTestQSymm16)
826 {
827  PreluEndToEndPositiveTest<armnn::DataType::QSymmS16>(defaultBackends);
828 }
829 
830 BOOST_AUTO_TEST_CASE(RefSpaceToDepthNhwcEndToEndTest1)
831 {
833 }
834 
835 BOOST_AUTO_TEST_CASE(RefSpaceToDepthNchwEndToEndTest1)
836 {
838 
839 }
840 
841 BOOST_AUTO_TEST_CASE(RefSpaceToDepthNhwcEndToEndTest2)
842 {
844 }
845 
846 BOOST_AUTO_TEST_CASE(RefSpaceToDepthNchwEndToEndTest2)
847 {
849 }
850 
851 BOOST_AUTO_TEST_CASE(RefSplitter1dEndToEndTest)
852 {
853  Splitter1dEndToEnd<armnn::DataType::Float32>(defaultBackends);
854 }
855 
856 BOOST_AUTO_TEST_CASE(RefSplitter1dEndToEndUint8Test)
857 {
858  Splitter1dEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
859 }
860 
861 BOOST_AUTO_TEST_CASE(RefSplitter2dDim0EndToEndTest)
862 {
863  Splitter2dDim0EndToEnd<armnn::DataType::Float32>(defaultBackends);
864 }
865 
866 BOOST_AUTO_TEST_CASE(RefSplitter2dDim1EndToEndTest)
867 {
868  Splitter2dDim1EndToEnd<armnn::DataType::Float32>(defaultBackends);
869 }
870 
871 BOOST_AUTO_TEST_CASE(RefSplitter2dDim0EndToEndUint8Test)
872 {
873  Splitter2dDim0EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
874 }
875 
876 BOOST_AUTO_TEST_CASE(RefSplitter2dDim1EndToEndUint8Test)
877 {
878  Splitter2dDim1EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
879 }
880 
881 BOOST_AUTO_TEST_CASE(RefSplitter3dDim0EndToEndTest)
882 {
883  Splitter3dDim0EndToEnd<armnn::DataType::Float32>(defaultBackends);
884 }
885 
886 BOOST_AUTO_TEST_CASE(RefSplitter3dDim1EndToEndTest)
887 {
888  Splitter3dDim1EndToEnd<armnn::DataType::Float32>(defaultBackends);
889 }
890 
891 BOOST_AUTO_TEST_CASE(RefSplitter3dDim2EndToEndTest)
892 {
893  Splitter3dDim2EndToEnd<armnn::DataType::Float32>(defaultBackends);
894 }
895 
896 BOOST_AUTO_TEST_CASE(RefSplitter3dDim0EndToEndUint8Test)
897 {
898  Splitter3dDim0EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
899 }
900 
901 BOOST_AUTO_TEST_CASE(RefSplitter3dDim1EndToEndUint8Test)
902 {
903  Splitter3dDim1EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
904 }
905 
906 BOOST_AUTO_TEST_CASE(RefSplitter3dDim2EndToEndUint8Test)
907 {
908  Splitter3dDim2EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
909 }
910 
911 BOOST_AUTO_TEST_CASE(RefSplitter4dDim0EndToEndTest)
912 {
913  Splitter4dDim0EndToEnd<armnn::DataType::Float32>(defaultBackends);
914 }
915 
916 BOOST_AUTO_TEST_CASE(RefSplitter4dDim1EndToEndTest)
917 {
918  Splitter4dDim1EndToEnd<armnn::DataType::Float32>(defaultBackends);
919 }
920 
921 BOOST_AUTO_TEST_CASE(RefSplitter4dDim2EndToEndTest)
922 {
923  Splitter4dDim2EndToEnd<armnn::DataType::Float32>(defaultBackends);
924 }
925 
926 BOOST_AUTO_TEST_CASE(RefSplitter4dDim3EndToEndTest)
927 {
928  Splitter4dDim3EndToEnd<armnn::DataType::Float32>(defaultBackends);
929 }
930 
931 BOOST_AUTO_TEST_CASE(RefSplitter4dDim0EndToEndUint8Test)
932 {
933  Splitter4dDim0EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
934 }
935 
936 BOOST_AUTO_TEST_CASE(RefSplitter4dDim1EndToEndUint8Test)
937 {
938  Splitter4dDim1EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
939 }
940 
941 BOOST_AUTO_TEST_CASE(RefSplitter4dDim2EndToEndUint8Test)
942 {
943  Splitter4dDim2EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
944 }
945 
946 BOOST_AUTO_TEST_CASE(RefSplitter4dDim3EndToEndUint8Test)
947 {
948  Splitter4dDim3EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
949 }
950 
951 // TransposeConvolution2d
952 BOOST_AUTO_TEST_CASE(RefTransposeConvolution2dEndToEndFloatNchwTest)
953 {
954  TransposeConvolution2dEndToEnd<armnn::DataType::Float32, armnn::DataType::Float32>(
956 }
957 
958 BOOST_AUTO_TEST_CASE(RefTransposeConvolution2dEndToEndUint8NchwTest)
959 {
960  TransposeConvolution2dEndToEnd<armnn::DataType::QAsymmU8, armnn::DataType::Signed32>(
962 }
963 
964 BOOST_AUTO_TEST_CASE(RefTransposeConvolution2dEndToEndInt16NchwTest)
965 {
966  TransposeConvolution2dEndToEnd<armnn::DataType::QSymmS16, armnn::DataType::Signed32>(
968 }
969 
970 BOOST_AUTO_TEST_CASE(RefTransposeConvolution2dEndToEndFloatNhwcTest)
971 {
972  TransposeConvolution2dEndToEnd<armnn::DataType::Float32, armnn::DataType::Float32>(
974 }
975 
976 BOOST_AUTO_TEST_CASE(RefTransposeConvolution2dEndToEndUint8NhwcTest)
977 {
978  TransposeConvolution2dEndToEnd<armnn::DataType::QAsymmU8, armnn::DataType::Signed32>(
980 }
981 
982 BOOST_AUTO_TEST_CASE(RefTransposeConvolution2dEndToEndInt16NhwcTest)
983 {
984  TransposeConvolution2dEndToEnd<armnn::DataType::QSymmS16, armnn::DataType::Signed32>(
986 }
987 
988 // Resize Bilinear
989 BOOST_AUTO_TEST_CASE(RefResizeBilinearEndToEndFloatNchwTest)
990 {
991  ResizeBilinearEndToEnd<armnn::DataType::Float32>(defaultBackends, armnn::DataLayout::NCHW);
992 }
993 
994 BOOST_AUTO_TEST_CASE(RefResizeBilinearEndToEndUint8NchwTest)
995 {
996  ResizeBilinearEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends, armnn::DataLayout::NCHW);
997 }
998 
999 BOOST_AUTO_TEST_CASE(RefResizeBilinearEndToEndInt16NchwTest)
1000 {
1001  ResizeBilinearEndToEnd<armnn::DataType::QSymmS16>(defaultBackends, armnn::DataLayout::NCHW);
1002 }
1003 
1004 BOOST_AUTO_TEST_CASE(RefResizeBilinearEndToEndFloatNhwcTest)
1005 {
1006  ResizeBilinearEndToEnd<armnn::DataType::Float32>(defaultBackends, armnn::DataLayout::NHWC);
1007 }
1008 
1009 BOOST_AUTO_TEST_CASE(RefResizeBilinearEndToEndUint8NhwcTest)
1010 {
1011  ResizeBilinearEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends, armnn::DataLayout::NHWC);
1012 }
1013 
1014 BOOST_AUTO_TEST_CASE(RefResizeBilinearEndToEndInt16NhwcTest)
1015 {
1016  ResizeBilinearEndToEnd<armnn::DataType::QSymmS16>(defaultBackends, armnn::DataLayout::NHWC);
1017 }
1018 
1019 // Resize NearestNeighbor
1020 BOOST_AUTO_TEST_CASE(RefResizeNearestNeighborEndToEndFloatNchwTest)
1021 {
1022  ResizeNearestNeighborEndToEnd<armnn::DataType::Float32>(defaultBackends, armnn::DataLayout::NCHW);
1023 }
1024 
1025 BOOST_AUTO_TEST_CASE(RefResizeNearestNeighborEndToEndUint8NchwTest)
1026 {
1027  ResizeNearestNeighborEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends, armnn::DataLayout::NCHW);
1028 }
1029 
1030 BOOST_AUTO_TEST_CASE(RefResizeNearestNeighborEndToEndInt16NchwTest)
1031 {
1032  ResizeNearestNeighborEndToEnd<armnn::DataType::QSymmS16>(defaultBackends, armnn::DataLayout::NCHW);
1033 }
1034 
1035 BOOST_AUTO_TEST_CASE(RefResizeNearestNeighborEndToEndFloatNhwcTest)
1036 {
1037  ResizeNearestNeighborEndToEnd<armnn::DataType::Float32>(defaultBackends, armnn::DataLayout::NHWC);
1038 }
1039 
1040 BOOST_AUTO_TEST_CASE(RefResizeNearestNeighborEndToEndUint8NhwcTest)
1041 {
1042  ResizeNearestNeighborEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends, armnn::DataLayout::NHWC);
1043 }
1044 
1045 BOOST_AUTO_TEST_CASE(RefResizeNearestNeighborEndToEndInt16NhwcTest)
1046 {
1047  ResizeNearestNeighborEndToEnd<armnn::DataType::QSymmS16>(defaultBackends, armnn::DataLayout::NHWC);
1048 }
1049 
1050 // InstanceNormalization
1051 BOOST_AUTO_TEST_CASE(RefInstanceNormalizationNhwcEndToEndTest1)
1052 {
1054 }
1055 
1056 BOOST_AUTO_TEST_CASE(RefInstanceNormalizationNchwEndToEndTest1)
1057 {
1059 }
1060 
1061 BOOST_AUTO_TEST_CASE(RefInstanceNormalizationNhwcEndToEndTest2)
1062 {
1064 }
1065 
1066 BOOST_AUTO_TEST_CASE(RefInstanceNormalizationNchwEndToEndTest2)
1067 {
1069 }
1070 
1071 // ArgMinMax
1072 BOOST_AUTO_TEST_CASE(RefArgMaxSimpleTest)
1073 {
1074  ArgMaxEndToEndSimple<armnn::DataType::Float32>(defaultBackends);
1075 }
1076 
1077 BOOST_AUTO_TEST_CASE(RefArgMaxSimpleUint8Test)
1078 {
1079  ArgMaxEndToEndSimple<armnn::DataType::QAsymmU8>(defaultBackends);
1080 }
1081 
1082 BOOST_AUTO_TEST_CASE(RefArgMinSimpleTest)
1083 {
1084  ArgMinEndToEndSimple<armnn::DataType::Float32>(defaultBackends);
1085 }
1086 
1087 BOOST_AUTO_TEST_CASE(RefArgMinSimpleUint8Test)
1088 {
1089  ArgMinEndToEndSimple<armnn::DataType::QAsymmU8>(defaultBackends);
1090 }
1091 
1092 BOOST_AUTO_TEST_CASE(RefArgMaxAxis0Test)
1093 {
1094  ArgMaxAxis0EndToEnd<armnn::DataType::Float32>(defaultBackends);
1095 }
1096 
1097 BOOST_AUTO_TEST_CASE(RefArgMaxAxis0Uint8Test)
1098 {
1099  ArgMaxAxis0EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
1100 }
1101 
1102 BOOST_AUTO_TEST_CASE(RefArgMinAxis0Test)
1103 {
1104  ArgMinAxis0EndToEnd<armnn::DataType::Float32>(defaultBackends);
1105 }
1106 
1107 BOOST_AUTO_TEST_CASE(RefArgMinAxis0Uint8Test)
1108 {
1109 
1110  ArgMinAxis0EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
1111 }
1112 
1113 BOOST_AUTO_TEST_CASE(RefArgMaxAxis1Test)
1114 {
1115  ArgMaxAxis1EndToEnd<armnn::DataType::Float32>(defaultBackends);
1116 }
1117 
1118 BOOST_AUTO_TEST_CASE(RefArgMaxAxis1Uint8Test)
1119 {
1120  ArgMaxAxis1EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
1121 }
1122 
1123 BOOST_AUTO_TEST_CASE(RefArgMinAxis1Test)
1124 {
1125  ArgMinAxis1EndToEnd<armnn::DataType::Float32>(defaultBackends);
1126 }
1127 
1128 BOOST_AUTO_TEST_CASE(RefArgMinAxis1Uint8Test)
1129 {
1130 
1131  ArgMinAxis1EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
1132 }
1133 
1134 BOOST_AUTO_TEST_CASE(RefArgMaxAxis2Test)
1135 {
1136  ArgMaxAxis2EndToEnd<armnn::DataType::Float32>(defaultBackends);
1137 }
1138 
1139 BOOST_AUTO_TEST_CASE(RefArgMaxAxis2Uint8Test)
1140 {
1141  ArgMaxAxis2EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
1142 }
1143 
1144 BOOST_AUTO_TEST_CASE(RefArgMinAxis2Test)
1145 {
1146  ArgMinAxis2EndToEnd<armnn::DataType::Float32>(defaultBackends);
1147 }
1148 
1149 BOOST_AUTO_TEST_CASE(RefArgMinAxis2Uint8Test)
1150 {
1151 
1152  ArgMinAxis2EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
1153 }
1154 
1155 BOOST_AUTO_TEST_CASE(RefArgMaxAxis3Test)
1156 {
1157  ArgMaxAxis3EndToEnd<armnn::DataType::Float32>(defaultBackends);
1158 }
1159 
1160 BOOST_AUTO_TEST_CASE(RefArgMaxAxis3Uint8Test)
1161 {
1162  ArgMaxAxis3EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
1163 }
1164 
1165 BOOST_AUTO_TEST_CASE(RefArgMinAxis3Test)
1166 {
1167  ArgMinAxis3EndToEnd<armnn::DataType::Float32>(defaultBackends);
1168 }
1169 
1170 BOOST_AUTO_TEST_CASE(RefArgMinAxis3Uint8Test)
1171 {
1172 
1173  ArgMinAxis3EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
1174 }
1175 
1176 #if !defined(__ANDROID__)
1177 // Only run these tests on non Android platforms
1178 BOOST_AUTO_TEST_CASE(RefImportNonAlignedPointerTest)
1179 {
1180  ImportNonAlignedInputPointerTest(defaultBackends);
1181 }
1182 
1183 BOOST_AUTO_TEST_CASE(RefExportNonAlignedPointerTest)
1184 {
1185  ExportNonAlignedOutputPointerTest(defaultBackends);
1186 }
1187 
1188 BOOST_AUTO_TEST_CASE(RefImportAlignedPointerTest)
1189 {
1190  ImportAlignedPointerTest(defaultBackends);
1191 }
1192 
1193 BOOST_AUTO_TEST_CASE(RefImportOnlyWorkload)
1194 {
1195  ImportOnlyWorkload(defaultBackends);
1196 }
1197 
1198 BOOST_AUTO_TEST_CASE(RefExportOnlyWorkload)
1199 {
1200  ExportOnlyWorkload(defaultBackends);
1201 }
1202 
1203 BOOST_AUTO_TEST_CASE(RefImportAndExportWorkload)
1204 {
1205  ImportAndExportWorkload(defaultBackends);
1206 }
1207 
1208 BOOST_AUTO_TEST_CASE(RefExportOutputWithSeveralOutputSlotConnectionsTest)
1209 {
1210  ExportOutputWithSeveralOutputSlotConnectionsTest(defaultBackends);
1211 }
1212 
1213 #endif
1214 
BOOST_AUTO_TEST_SUITE(TensorflowLiteParser)
void SpaceToDepthNchwEndToEndTest1(const std::vector< armnn::BackendId > &defaultBackends)
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:32
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:61
CPU Execution: Reference C++ kernels.
void InstanceNormalizationNhwcEndToEndTest1(const std::vector< armnn::BackendId > &defaultBackends)
std::vector< armnn::BackendId > defaultBackends
void LogSoftmaxEndToEndTest(const std::vector< armnn::BackendId > &defaultBackends)
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:24
armnn::TensorInfo anchorsInfo({ 6, 4 }, armnn::DataType::Float32)
BOOST_AUTO_TEST_CASE(RefAbsEndToEndTestFloat32)
std::vector< std::pair< LayerBindingId, class ConstTensor > > InputTensors
Definition: Tensor.hpp:225
void InstanceNormalizationNchwEndToEndTest1(const std::vector< armnn::BackendId > &defaultBackends)
std::vector< float > boxEncodings({ 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f })
int NetworkId
Definition: IRuntime.hpp:19
Copyright (c) 2020 ARM Limited.
void SpaceToDepthNhwcEndToEndTest2(const std::vector< armnn::BackendId > &defaultBackends)
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
void SpaceToDepthNhwcEndToEndTest1(const std::vector< armnn::BackendId > &defaultBackends)
A tensor defined by a TensorInfo (shape and data type) and a mutable backing store.
Definition: Tensor.hpp:191
void SpaceToDepthNchwEndToEndTest2(const std::vector< armnn::BackendId > &defaultBackends)
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:890
std::vector< uint8_t > qBoxEncodings(boxEncodings.size(), 0)
int32_t GetQuantizationOffset() const
Definition: Tensor.cpp:264
float GetQuantizationScale() const
Definition: Tensor.cpp:247
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:199
std::vector< std::pair< LayerBindingId, class Tensor > > OutputTensors
Definition: Tensor.hpp:226
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:566
void SetQuantizationScale(float scale)
Definition: Tensor.cpp:259
std::vector< uint8_t > qAnchors(anchors.size(), 0)
BOOST_AUTO_TEST_CASE(CheckConvolution2dLayer)
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
min(a, max(b, input)) ReLu1 & ReLu6.
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:37
BOOST_AUTO_TEST_SUITE_END()
std::vector< float > scores({ 0.0f, 0.9f, 0.8f, 0.0f, 0.75f, 0.72f, 0.0f, 0.6f, 0.5f, 0.0f, 0.93f, 0.95f, 0.0f, 0.5f, 0.4f, 0.0f, 0.3f, 0.2f })
void QuantizeData(uint8_t *quant, const float *dequant, const TensorInfo &info)
std::vector< uint8_t > qScores(scores.size(), 0)
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
armnn::TensorInfo scoresInfo({ 1, 6, 3 }, armnn::DataType::Float32)
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
void InstanceNormalizationNhwcEndToEndTest2(const std::vector< armnn::BackendId > &defaultBackends)
void SetQuantizationOffset(int32_t offset)
Definition: Tensor.cpp:275
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:101
virtual int Connect(IInputSlot &destination)=0
armnn::Runtime::CreationOptions::ExternalProfilingOptions options
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:39
A SoftmaxDescriptor for the SoftmaxLayer.
static INetworkPtr Create()
Definition: Network.cpp:49
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square).
Definition: Descriptors.hpp:35
unsigned int GetNumElements() const
Definition: Tensor.hpp:93
void InstanceNormalizationNchwEndToEndTest2(const std::vector< armnn::BackendId > &defaultBackends)
std::vector< float > anchors({ 0.5f, 0.5f, 1.0f, 1.0f, 0.5f, 0.5f, 1.0f, 1.0f, 0.5f, 0.5f, 1.0f, 1.0f, 0.5f, 10.5f, 1.0f, 1.0f, 0.5f, 10.5f, 1.0f, 1.0f, 0.5f, 100.5f, 1.0f, 1.0f })