ArmNN
 20.05
RefEndToEndTests.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
7 
26 
27 #include <boost/test/unit_test.hpp>
28 #include <boost/test/execution_monitor.hpp>
29 
30 BOOST_AUTO_TEST_SUITE(RefEndToEnd)
31 
33 
34 // Abs
35 BOOST_AUTO_TEST_CASE(RefAbsEndToEndTestFloat32)
36 {
37  std::vector<float> expectedOutput =
38  {
39  1.f, 1.f, 1.f, 1.f, 5.f, 5.f, 5.f, 5.f,
40  3.f, 3.f, 3.f, 3.f, 4.f, 4.f, 4.f, 4.f
41  };
42 
43  ElementwiseUnarySimpleEndToEnd<armnn::DataType::Float32>(defaultBackends,
44  UnaryOperation::Abs,
45  expectedOutput);
46 }
47 
48 BOOST_AUTO_TEST_CASE(RefAbsEndToEndTestUint8)
49 {
50  // Note the expected output will be implicitly quantized by the below test function
51  std::vector<float> expectedOutput =
52  {
53  1.f, 1.f, 1.f, 1.f, 5.f, 5.f, 5.f, 5.f,
54  3.f, 3.f, 3.f, 3.f, 4.f, 4.f, 4.f, 4.f
55  };
56 
57  ElementwiseUnarySimpleEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends,
58  UnaryOperation::Abs,
59  expectedOutput);
60 }
61 
62 BOOST_AUTO_TEST_CASE(RefAbsEndToEndTestInt16)
63 {
64  // Note the expected output will be implicitly quantized by the below test function
65  std::vector<float> expectedOutput =
66  {
67  1.f, 1.f, 1.f, 1.f, 5.f, 5.f, 5.f, 5.f,
68  3.f, 3.f, 3.f, 3.f, 4.f, 4.f, 4.f, 4.f
69  };
70 
71  ElementwiseUnarySimpleEndToEnd<armnn::DataType::QSymmS16>(defaultBackends,
72  UnaryOperation::Abs,
73  expectedOutput);
74 }
75 
76 // Constant
77 BOOST_AUTO_TEST_CASE(ConstantUsage_Ref_Float32)
78 {
79  BOOST_TEST(ConstantUsageFloat32Test(defaultBackends));
80 }
81 
82 BOOST_AUTO_TEST_CASE(ConstantUsage_Ref_Uint8)
83 {
84  BOOST_TEST(ConstantUsageUint8Test(defaultBackends));
85 }
86 
88 {
89  using namespace armnn;
90 
91  // Create runtime in which test will run
94 
95  // Builds up the structure of the network.
97 
98  IConnectableLayer* input = net->AddInputLayer(0, "input");
99  IConnectableLayer* softmax = net->AddSoftmaxLayer(SoftmaxDescriptor(), "softmax");
100  IConnectableLayer* output = net->AddOutputLayer(0, "output");
101 
102  input->GetOutputSlot(0).Connect(softmax->GetInputSlot(0));
103  softmax->GetOutputSlot(0).Connect(output->GetInputSlot(0));
104 
105  // Sets the tensors in the network.
106  TensorInfo inputTensorInfo(TensorShape({1, 5}), DataType::QAsymmU8);
107  inputTensorInfo.SetQuantizationOffset(100);
108  inputTensorInfo.SetQuantizationScale(10000.0f);
109  input->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
110 
111  TensorInfo outputTensorInfo(TensorShape({1, 5}), DataType::QAsymmU8);
112  outputTensorInfo.SetQuantizationOffset(0);
113  outputTensorInfo.SetQuantizationScale(1.0f/255.0f);
114  softmax->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
115 
116  // optimize the network
117  IOptimizedNetworkPtr optNet = Optimize(*net, defaultBackends, runtime->GetDeviceSpec());
118 
119  // Loads it into the runtime.
120  NetworkId netId;
121  auto error = runtime->LoadNetwork(netId, std::move(optNet));
122  BOOST_TEST(error == Status::Success);
123 
124  // Creates structures for input & output.
125  std::vector<uint8_t> inputData
126  {
127  1, 10, 3, 200, 5 // Some inputs - one of which is sufficiently larger than the others to saturate softmax.
128  };
129  std::vector<uint8_t> outputData(5);
130 
131  armnn::InputTensors inputTensors
132  {
133  {0, armnn::ConstTensor(runtime->GetInputTensorInfo(netId, 0), inputData.data())}
134  };
135  armnn::OutputTensors outputTensors
136  {
137  {0, armnn::Tensor(runtime->GetOutputTensorInfo(netId, 0), outputData.data())}
138  };
139 
140  // Does the inference.
141  runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
142 
143  // Checks the results.
144  BOOST_TEST(outputData[0] == 0);
145  BOOST_TEST(outputData[1] == 0);
146  BOOST_TEST(outputData[2] == 0);
147  BOOST_TEST(outputData[3] == 255); // softmax has been saturated.
148  BOOST_TEST(outputData[4] == 0);
149 }
150 
152 {
153  // This test was designed to match "AddTwo" in android nn/runtime/test/TestTrivialModel.cpp.
154 
155  using namespace armnn;
156 
157  // Create runtime in which test will run
160 
161  // Builds up the structure of the network.
163 
164  IConnectableLayer* input1 = net->AddInputLayer(0);
165  IConnectableLayer* input2 = net->AddInputLayer(1);
166  IConnectableLayer* add = net->AddAdditionLayer();
167  IConnectableLayer* output = net->AddOutputLayer(0);
168 
169  input1->GetOutputSlot(0).Connect(add->GetInputSlot(0));
170  input2->GetOutputSlot(0).Connect(add->GetInputSlot(1));
171  add->GetOutputSlot(0).Connect(output->GetInputSlot(0));
172 
173  // Sets the tensors in the network.
174  TensorInfo tensorInfo(TensorShape({3, 4}), DataType::Float32);
175  input1->GetOutputSlot(0).SetTensorInfo(tensorInfo);
176  input2->GetOutputSlot(0).SetTensorInfo(tensorInfo);
177  add->GetOutputSlot(0).SetTensorInfo(tensorInfo);
178 
179  // optimize the network
180  IOptimizedNetworkPtr optNet = Optimize(*net, defaultBackends, runtime->GetDeviceSpec());
181 
182  // Loads it into the runtime.
183  NetworkId netId;
184  runtime->LoadNetwork(netId, std::move(optNet));
185 
186  // Creates structures for input & output - matching android nn test.
187  std::vector<float> input1Data
188  {
189  1.f, 2.f, 3.f, 4.f, 5.f, 6.f, 7.f, 8.f, 9.f, 10.f, 11.f, 12.f
190  };
191  std::vector<float> input2Data
192  {
193  100.f, 200.f, 300.f, 400.f, 500.f, 600.f, 700.f, 800.f, 900.f, 1000.f, 1100.f, 1200.f
194  };
195  std::vector<float> outputData(12);
196 
197  InputTensors inputTensors
198  {
199  {0,armnn::ConstTensor(runtime->GetInputTensorInfo(netId, 0), input1Data.data())},
200  {1,armnn::ConstTensor(runtime->GetInputTensorInfo(netId, 0), input2Data.data())}
201  };
202  OutputTensors outputTensors
203  {
204  {0,armnn::Tensor(runtime->GetOutputTensorInfo(netId, 0), outputData.data())}
205  };
206 
207  // Does the inference.
208  runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
209 
210  // Checks the results
211  BOOST_TEST(outputData[0] == 101);
212  BOOST_TEST(outputData[1] == 202);
213  BOOST_TEST(outputData[2] == 303);
214  BOOST_TEST(outputData[3] == 404);
215  BOOST_TEST(outputData[4] == 505);
216  BOOST_TEST(outputData[5] == 606);
217  BOOST_TEST(outputData[6] == 707);
218  BOOST_TEST(outputData[7] == 808);
219  BOOST_TEST(outputData[8] == 909);
220  BOOST_TEST(outputData[9] == 1010);
221  BOOST_TEST(outputData[10] == 1111);
222  BOOST_TEST(outputData[11] == 1212);
223 }
224 
225 BOOST_AUTO_TEST_CASE(MultipleOutputs)
226 {
227  using namespace armnn;
228 
229  // Create runtime in which test will run
232 
233  // Builds up the structure of the network.
235 
236  IConnectableLayer* input = net->AddInputLayer(0);
237 
238  // ReLu1
239  ActivationDescriptor activation1Descriptor;
240  activation1Descriptor.m_Function = ActivationFunction::BoundedReLu;
241  activation1Descriptor.m_A = 1.f;
242  activation1Descriptor.m_B = -1.f;
243  IConnectableLayer* activation1 = net->AddActivationLayer(activation1Descriptor);
244 
245  // ReLu6
246  ActivationDescriptor activation2Descriptor;
247  activation2Descriptor.m_Function = ActivationFunction::BoundedReLu;
248  activation2Descriptor.m_A = 6.0f;
249  IConnectableLayer* activation2 = net->AddActivationLayer(activation2Descriptor);
250 
251  // BoundedReLu(min=2, max=5)
252  ActivationDescriptor activation3Descriptor;
253  activation3Descriptor.m_Function = ActivationFunction::BoundedReLu;
254  activation3Descriptor.m_A = 5.0f;
255  activation3Descriptor.m_B = 2.0f;
256  IConnectableLayer* activation3 = net->AddActivationLayer(activation3Descriptor);
257 
258  IConnectableLayer* output1 = net->AddOutputLayer(0);
259  IConnectableLayer* output2 = net->AddOutputLayer(1);
260  IConnectableLayer* output3 = net->AddOutputLayer(2);
261 
262  input->GetOutputSlot(0).Connect(activation1->GetInputSlot(0));
263  input->GetOutputSlot(0).Connect(activation2->GetInputSlot(0));
264  input->GetOutputSlot(0).Connect(activation3->GetInputSlot(0));
265 
266  activation1->GetOutputSlot(0).Connect(output1->GetInputSlot(0));
267  activation2->GetOutputSlot(0).Connect(output2->GetInputSlot(0));
268  activation3->GetOutputSlot(0).Connect(output3->GetInputSlot(0));
269 
270  // Sets the tensors in the network.
271  TensorInfo tensorInfo(TensorShape({ 10 }), DataType::Float32);
272  input->GetOutputSlot(0).SetTensorInfo(tensorInfo);
273  activation1->GetOutputSlot(0).SetTensorInfo(tensorInfo);
274  activation2->GetOutputSlot(0).SetTensorInfo(tensorInfo);
275  activation3->GetOutputSlot(0).SetTensorInfo(tensorInfo);
276 
277  // optimize the network
278  IOptimizedNetworkPtr optNet = Optimize(*net, defaultBackends, runtime->GetDeviceSpec());
279 
280  // Loads it into the runtime.
281  NetworkId netId;
282  runtime->LoadNetwork(netId, std::move(optNet));
283 
284  // Creates structures for input & output.
285  const std::vector<float> inputData{ 3.f, 5.f, 2.f, 3.f, 7.f, 0.f, -2.f, -1.f, 3.f, 3.f };
286 
287  std::vector<float> output1Data(inputData.size());
288  std::vector<float> output2Data(inputData.size());
289  std::vector<float> output3Data(inputData.size());
290 
291  InputTensors inputTensors
292  {
293  {0,armnn::ConstTensor(runtime->GetInputTensorInfo(netId, 0), inputData.data())}
294  };
295  OutputTensors outputTensors
296  {
297  {0,armnn::Tensor(runtime->GetOutputTensorInfo(netId, 0), output1Data.data())},
298  {1,armnn::Tensor(runtime->GetOutputTensorInfo(netId, 1), output2Data.data())},
299  {2,armnn::Tensor(runtime->GetOutputTensorInfo(netId, 2), output3Data.data())}
300  };
301 
302  // Does the inference.
303  runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
304 
305  // Checks the results.
306  BOOST_TEST(output1Data == std::vector<float>({ 1.f, 1.f, 1.f, 1.f, 1.f, 0.f, -1.f, -1.f, 1.f, 1.f })); // ReLu1
307  BOOST_TEST(output2Data == std::vector<float>({ 3.f, 5.f, 2.f, 3.f, 6.f, 0.f, 0.f, 0.f, 3.f, 3.f })); // ReLu6
308  BOOST_TEST(output3Data == std::vector<float>({ 3.f, 5.f, 2.f, 3.f, 5.f, 2.f, 2.f, 2.f, 3.f, 3.f })); // [2, 5]
309 }
310 
312 {
313  using namespace armnn;
314 
315  // Create runtime in which test will run
318 
319  // Builds up the structure of the network.
321 
322  IConnectableLayer* input1 = net->AddInputLayer(0);
323  IConnectableLayer* input2 = net->AddInputLayer(1);
324  IConnectableLayer* min = net->AddMinimumLayer();
325  IConnectableLayer* output = net->AddOutputLayer(0);
326 
327  input1->GetOutputSlot(0).Connect(min->GetInputSlot(0));
328  input2->GetOutputSlot(0).Connect(min->GetInputSlot(1));
329  min->GetOutputSlot(0).Connect(output->GetInputSlot(0));
330 
331  // Sets the tensors in the network.
332  TensorInfo tensorInfo(TensorShape({1, 1, 1, 4}), DataType::Float32);
333  input1->GetOutputSlot(0).SetTensorInfo(tensorInfo);
334  input2->GetOutputSlot(0).SetTensorInfo(tensorInfo);
335  min->GetOutputSlot(0).SetTensorInfo(tensorInfo);
336 
337  // optimize the network
338  IOptimizedNetworkPtr optNet = Optimize(*net, defaultBackends, runtime->GetDeviceSpec());
339 
340  // Loads it into the runtime.
341  NetworkId netId;
342  runtime->LoadNetwork(netId, std::move(optNet));
343 
344  // Creates structures for input & output - matching android nn test.
345  std::vector<float> input1Data
346  {
347  1.0f, 2.0f, 3.0f, 4.0f
348  };
349  std::vector<float> input2Data
350  {
351  2.0f, 1.0f, 5.0f, 2.0f
352  };
353  std::vector<float> outputData(4);
354 
355  InputTensors inputTensors
356  {
357  {0,armnn::ConstTensor(runtime->GetInputTensorInfo(netId, 0), input1Data.data())},
358  {1,armnn::ConstTensor(runtime->GetInputTensorInfo(netId, 0), input2Data.data())}
359  };
360  OutputTensors outputTensors
361  {
362  {0,armnn::Tensor(runtime->GetOutputTensorInfo(netId, 0), outputData.data())}
363  };
364 
365  // Does the inference.
366  runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
367 
368  // Checks the results
369  BOOST_TEST(outputData[0] == 1);
370  BOOST_TEST(outputData[1] == 1);
371  BOOST_TEST(outputData[2] == 3);
372  BOOST_TEST(outputData[3] == 2);
373 }
374 
375 BOOST_AUTO_TEST_CASE(RefEqualSimpleEndToEndTest)
376 {
377  const std::vector<uint8_t> expectedOutput({ 1, 1, 1, 1, 0, 0, 0, 0,
378  0, 0, 0, 0, 1, 1, 1, 1 });
379 
380  ComparisonSimpleEndToEnd<armnn::DataType::Float32>(defaultBackends,
382  expectedOutput);
383 }
384 
385 BOOST_AUTO_TEST_CASE(RefGreaterSimpleEndToEndTest)
386 {
387  const std::vector<uint8_t> expectedOutput({ 0, 0, 0, 0, 1, 1, 1, 1,
388  0, 0, 0, 0, 0, 0, 0, 0 });
389 
390  ComparisonSimpleEndToEnd<armnn::DataType::Float32>(defaultBackends,
392  expectedOutput);
393 }
394 
395 BOOST_AUTO_TEST_CASE(RefEqualSimpleEndToEndUint8Test)
396 {
397  const std::vector<uint8_t> expectedOutput({ 1, 1, 1, 1, 0, 0, 0, 0,
398  0, 0, 0, 0, 1, 1, 1, 1 });
399 
400  ComparisonSimpleEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends,
402  expectedOutput);
403 }
404 
405 BOOST_AUTO_TEST_CASE(RefGreaterSimpleEndToEndUint8Test)
406 {
407  const std::vector<uint8_t> expectedOutput({ 0, 0, 0, 0, 1, 1, 1, 1,
408  0, 0, 0, 0, 0, 0, 0, 0 });
409 
410  ComparisonSimpleEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends,
412  expectedOutput);
413 }
414 
415 BOOST_AUTO_TEST_CASE(RefEqualBroadcastEndToEndTest)
416 {
417  const std::vector<uint8_t> expectedOutput({ 1, 0, 1, 1, 0, 0,
418  0, 0, 0, 0, 0, 0 });
419 
420  ComparisonBroadcastEndToEnd<armnn::DataType::Float32>(defaultBackends,
422  expectedOutput);
423 }
424 
425 BOOST_AUTO_TEST_CASE(RefGreaterBroadcastEndToEndTest)
426 {
427  const std::vector<uint8_t> expectedOutput({ 0, 1, 0, 0, 0, 1,
428  1, 1, 1, 1, 1, 1 });
429 
430  ComparisonBroadcastEndToEnd<armnn::DataType::Float32>(defaultBackends,
432  expectedOutput);
433 }
434 
435 BOOST_AUTO_TEST_CASE(RefEqualBroadcastEndToEndUint8Test)
436 {
437  const std::vector<uint8_t > expectedOutput({ 1, 0, 1, 1, 0, 0,
438  0, 0, 0, 0, 0, 0 });
439 
440  ComparisonBroadcastEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends,
442  expectedOutput);
443 }
444 
445 BOOST_AUTO_TEST_CASE(RefGreaterBroadcastEndToEndUint8Test)
446 {
447  const std::vector<uint8_t> expectedOutput({ 0, 1, 0, 0, 0, 1,
448  1, 1, 1, 1, 1, 1 });
449 
450  ComparisonBroadcastEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends,
452  expectedOutput);
453 }
454 
455 BOOST_AUTO_TEST_CASE(RefBatchToSpaceNdEndToEndFloat32NHWCTest)
456 {
457  BatchToSpaceNdEndToEnd<armnn::DataType::Float32>(defaultBackends, armnn::DataLayout::NHWC);
458 }
459 
460 BOOST_AUTO_TEST_CASE(RefBatchToSpaceNdEndToEndUint8NHWCTest)
461 {
462  BatchToSpaceNdEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends, armnn::DataLayout::NHWC);
463 }
464 
465 BOOST_AUTO_TEST_CASE(RefBatchToSpaceNdEndToEndQSymm16NHWCTest)
466 {
467  BatchToSpaceNdEndToEnd<armnn::DataType::QSymmS16>(defaultBackends, armnn::DataLayout::NHWC);
468 }
469 
470 BOOST_AUTO_TEST_CASE(RefBatchToSpaceNdEndToEndFloat32NCHWTest)
471 {
472  BatchToSpaceNdEndToEnd<armnn::DataType::Float32>(defaultBackends, armnn::DataLayout::NCHW);
473 }
474 
475 BOOST_AUTO_TEST_CASE(RefBatchToSpaceNdEndToEndUint8NCHWTest)
476 {
477  BatchToSpaceNdEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends, armnn::DataLayout::NCHW);
478 }
479 
480 BOOST_AUTO_TEST_CASE(RefBatchToSpaceNdEndToEndQSymm16NCHWTest)
481 {
482  BatchToSpaceNdEndToEnd<armnn::DataType::QSymmS16>(defaultBackends, armnn::DataLayout::NCHW);
483 }
484 
485 BOOST_AUTO_TEST_CASE(RefBatchToSpaceNdEndToEndComplexFloat32NHWCTest)
486 {
487  BatchToSpaceNdComplexEndToEnd<armnn::DataType::Float32>(defaultBackends, armnn::DataLayout::NHWC);
488 }
489 
490 BOOST_AUTO_TEST_CASE(RefBatchToSpaceNdEndToEndComplexUint8NHWCTest)
491 {
492  BatchToSpaceNdComplexEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends, armnn::DataLayout::NHWC);
493 }
494 
495 BOOST_AUTO_TEST_CASE(RefBatchToSpaceNdEndToEndComplexQSymm16NHWCTest)
496 {
497  BatchToSpaceNdComplexEndToEnd<armnn::DataType::QSymmS16>(defaultBackends, armnn::DataLayout::NHWC);
498 }
499 
500 BOOST_AUTO_TEST_CASE(RefBatchToSpaceNdEndToEndComplexFloat32NCHWTest)
501 {
502  BatchToSpaceNdComplexEndToEnd<armnn::DataType::Float32>(defaultBackends, armnn::DataLayout::NCHW);
503 }
504 
505 BOOST_AUTO_TEST_CASE(RefBatchToSpaceNdEndToEndComplexUint8NCHWTest)
506 {
507  BatchToSpaceNdComplexEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends, armnn::DataLayout::NCHW);
508 }
509 
510 BOOST_AUTO_TEST_CASE(RefBatchToSpaceNdEndToEndComplexQSymm16NCHWTest)
511 {
512  BatchToSpaceNdComplexEndToEnd<armnn::DataType::QSymmS16>(defaultBackends, armnn::DataLayout::NCHW);
513 }
514 
515 BOOST_AUTO_TEST_CASE(RefConcatEndToEndDim0Test)
516 {
517  ConcatDim0EndToEnd<armnn::DataType::Float32>(defaultBackends);
518 }
519 
520 BOOST_AUTO_TEST_CASE(RefConcatEndToEndDim0Uint8Test)
521 {
522  ConcatDim0EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
523 }
524 
525 BOOST_AUTO_TEST_CASE(RefConcatEndToEndDim1Test)
526 {
527  ConcatDim1EndToEnd<armnn::DataType::Float32>(defaultBackends);
528 }
529 
530 BOOST_AUTO_TEST_CASE(RefConcatEndToEndDim1Uint8Test)
531 {
532  ConcatDim1EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
533 }
534 
535 BOOST_AUTO_TEST_CASE(RefConcatEndToEndDim2Test)
536 {
537  ConcatDim2EndToEnd<armnn::DataType::Float32>(defaultBackends);
538 }
539 
540 BOOST_AUTO_TEST_CASE(RefConcatEndToEndDim2Uint8Test)
541 {
542  ConcatDim2EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
543 }
544 
545 BOOST_AUTO_TEST_CASE(RefConcatEndToEndDim3Test)
546 {
547  ConcatDim3EndToEnd<armnn::DataType::Float32>(defaultBackends);
548 }
549 
550 BOOST_AUTO_TEST_CASE(RefConcatEndToEndDim3Uint8Test)
551 {
552  ConcatDim3EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
553 }
554 
555 BOOST_AUTO_TEST_CASE(RefEluEndToEndTestFloat32)
556 {
557  EluEndToEndTest<armnn::DataType::Float32>(defaultBackends);
558 }
559 
560 BOOST_AUTO_TEST_CASE(RefEluEndToEndTestFloat16)
561 {
562  EluEndToEndTest<armnn::DataType::Float16>(defaultBackends);
563 }
564 
565 BOOST_AUTO_TEST_CASE(RefEluEndToEndTestBFloat16)
566 {
567  EluEndToEndTest<armnn::DataType::BFloat16>(defaultBackends);
568 }
569 
570 BOOST_AUTO_TEST_CASE(RefEluEndToEndTestQAsymmS8)
571 {
572  EluEndToEndTest<armnn::DataType::QAsymmS8>(defaultBackends);
573 }
574 
575 BOOST_AUTO_TEST_CASE(RefEluEndToEndTestQAsymmU8)
576 {
577  EluEndToEndTest<armnn::DataType::QAsymmU8>(defaultBackends);
578 }
579 
580 BOOST_AUTO_TEST_CASE(RefEluEndToEndTestQSymmS16)
581 {
582  EluEndToEndTest<armnn::DataType::QSymmS16>(defaultBackends);
583 }
584 
585 BOOST_AUTO_TEST_CASE(RefGatherFloatTest)
586 {
587  GatherEndToEnd<armnn::DataType::Float32>(defaultBackends);
588 }
589 
590 BOOST_AUTO_TEST_CASE(RefGatherUint8Test)
591 {
592  GatherEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
593 }
594 
595 BOOST_AUTO_TEST_CASE(RefGatherInt16Test)
596 {
597  GatherEndToEnd<armnn::DataType::QSymmS16>(defaultBackends);
598 }
599 
600 BOOST_AUTO_TEST_CASE(RefGatherMultiDimFloatTest)
601 {
602  GatherMultiDimEndToEnd<armnn::DataType::Float32>(defaultBackends);
603 }
604 
605 BOOST_AUTO_TEST_CASE(RefGatherMultiDimUint8Test)
606 {
607  GatherMultiDimEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
608 }
609 
610 BOOST_AUTO_TEST_CASE(RefGatherMultiDimInt16Test)
611 {
612  GatherMultiDimEndToEnd<armnn::DataType::QSymmS16>(defaultBackends);
613 }
614 
615 // DepthToSpace
616 BOOST_AUTO_TEST_CASE(DephtToSpaceEndToEndNchwFloat32)
617 {
618  DepthToSpaceEndToEnd<armnn::DataType::Float32>(defaultBackends, armnn::DataLayout::NCHW);
619 }
620 
621 BOOST_AUTO_TEST_CASE(DephtToSpaceEndToEndNchwFloat16)
622 {
623  DepthToSpaceEndToEnd<armnn::DataType::Float16>(defaultBackends, armnn::DataLayout::NCHW);
624 }
625 
626 BOOST_AUTO_TEST_CASE(DephtToSpaceEndToEndNchwUint8)
627 {
628  DepthToSpaceEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends, armnn::DataLayout::NCHW);
629 }
630 
631 BOOST_AUTO_TEST_CASE(DephtToSpaceEndToEndNchwInt16)
632 {
633  DepthToSpaceEndToEnd<armnn::DataType::QSymmS16>(defaultBackends, armnn::DataLayout::NCHW);
634 }
635 
636 BOOST_AUTO_TEST_CASE(DephtToSpaceEndToEndNhwcFloat32)
637 {
638  DepthToSpaceEndToEnd<armnn::DataType::Float32>(defaultBackends, armnn::DataLayout::NHWC);
639 }
640 
641 BOOST_AUTO_TEST_CASE(DephtToSpaceEndToEndNhwcFloat16)
642 {
643  DepthToSpaceEndToEnd<armnn::DataType::Float16>(defaultBackends, armnn::DataLayout::NHWC);
644 }
645 
646 BOOST_AUTO_TEST_CASE(DephtToSpaceEndToEndNhwcUint8)
647 {
648  DepthToSpaceEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends, armnn::DataLayout::NHWC);
649 }
650 
651 BOOST_AUTO_TEST_CASE(DephtToSpaceEndToEndNhwcInt16)
652 {
653  DepthToSpaceEndToEnd<armnn::DataType::QSymmS16>(defaultBackends, armnn::DataLayout::NHWC);
654 }
655 
656 // Dequantize
657 BOOST_AUTO_TEST_CASE(DequantizeEndToEndSimpleTest)
658 {
659  DequantizeEndToEndSimple<armnn::DataType::QAsymmU8>(defaultBackends);
660 }
661 
662 BOOST_AUTO_TEST_CASE(DequantizeEndToEndOffsetTest)
663 {
664  DequantizeEndToEndOffset<armnn::DataType::QAsymmU8>(defaultBackends);
665 }
666 
667 BOOST_AUTO_TEST_CASE(DequantizeEndToEndSimpleInt16Test)
668 {
669  DequantizeEndToEndSimple<armnn::DataType::QSymmS16>(defaultBackends);
670 }
671 
672 BOOST_AUTO_TEST_CASE(DequantizeEndToEndOffsetInt16Test)
673 {
674  DequantizeEndToEndOffset<armnn::DataType::QSymmS16>(defaultBackends);
675 }
676 
677 BOOST_AUTO_TEST_CASE(RefDetectionPostProcessRegularNmsTest)
678 {
679  std::vector<float> boxEncodings({
680  0.0f, 0.0f, 0.0f, 0.0f,
681  0.0f, 1.0f, 0.0f, 0.0f,
682  0.0f, -1.0f, 0.0f, 0.0f,
683  0.0f, 0.0f, 0.0f, 0.0f,
684  0.0f, 1.0f, 0.0f, 0.0f,
685  0.0f, 0.0f, 0.0f, 0.0f
686  });
687  std::vector<float> scores({
688  0.0f, 0.9f, 0.8f,
689  0.0f, 0.75f, 0.72f,
690  0.0f, 0.6f, 0.5f,
691  0.0f, 0.93f, 0.95f,
692  0.0f, 0.5f, 0.4f,
693  0.0f, 0.3f, 0.2f
694  });
695  std::vector<float> anchors({
696  0.5f, 0.5f, 1.0f, 1.0f,
697  0.5f, 0.5f, 1.0f, 1.0f,
698  0.5f, 0.5f, 1.0f, 1.0f,
699  0.5f, 10.5f, 1.0f, 1.0f,
700  0.5f, 10.5f, 1.0f, 1.0f,
701  0.5f, 100.5f, 1.0f, 1.0f
702  });
703  DetectionPostProcessRegularNmsEndToEnd<armnn::DataType::Float32>(defaultBackends, boxEncodings, scores, anchors);
704 }
705 
706 inline void QuantizeData(uint8_t* quant, const float* dequant, const TensorInfo& info)
707 {
708  for (size_t i = 0; i < info.GetNumElements(); i++)
709  {
710  quant[i] = armnn::Quantize<uint8_t>(dequant[i], info.GetQuantizationScale(), info.GetQuantizationOffset());
711  }
712 }
713 
714 BOOST_AUTO_TEST_CASE(RefDetectionPostProcessRegularNmsUint8Test)
715 {
716  armnn::TensorInfo boxEncodingsInfo({ 1, 6, 4 }, armnn::DataType::Float32);
719 
720  boxEncodingsInfo.SetQuantizationScale(1.0f);
721  boxEncodingsInfo.SetQuantizationOffset(1);
726 
727  std::vector<float> boxEncodings({
728  0.0f, 0.0f, 0.0f, 0.0f,
729  0.0f, 1.0f, 0.0f, 0.0f,
730  0.0f, -1.0f, 0.0f, 0.0f,
731  0.0f, 0.0f, 0.0f, 0.0f,
732  0.0f, 1.0f, 0.0f, 0.0f,
733  0.0f, 0.0f, 0.0f, 0.0f
734  });
735  std::vector<float> scores({
736  0.0f, 0.9f, 0.8f,
737  0.0f, 0.75f, 0.72f,
738  0.0f, 0.6f, 0.5f,
739  0.0f, 0.93f, 0.95f,
740  0.0f, 0.5f, 0.4f,
741  0.0f, 0.3f, 0.2f
742  });
743  std::vector<float> anchors({
744  0.5f, 0.5f, 1.0f, 1.0f,
745  0.5f, 0.5f, 1.0f, 1.0f,
746  0.5f, 0.5f, 1.0f, 1.0f,
747  0.5f, 10.5f, 1.0f, 1.0f,
748  0.5f, 10.5f, 1.0f, 1.0f,
749  0.5f, 100.5f, 1.0f, 1.0f
750  });
751 
752  std::vector<uint8_t> qBoxEncodings(boxEncodings.size(), 0);
753  std::vector<uint8_t> qScores(scores.size(), 0);
754  std::vector<uint8_t> qAnchors(anchors.size(), 0);
755  QuantizeData(qBoxEncodings.data(), boxEncodings.data(), boxEncodingsInfo);
756  QuantizeData(qScores.data(), scores.data(), scoresInfo);
757  QuantizeData(qAnchors.data(), anchors.data(), anchorsInfo);
758  DetectionPostProcessRegularNmsEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends, qBoxEncodings,
759  qScores, qAnchors,
760  1.0f, 1, 0.01f, 0, 0.5f, 0);
761 }
762 
763 BOOST_AUTO_TEST_CASE(RefDetectionPostProcessFastNmsTest)
764 {
765  std::vector<float> boxEncodings({
766  0.0f, 0.0f, 0.0f, 0.0f,
767  0.0f, 1.0f, 0.0f, 0.0f,
768  0.0f, -1.0f, 0.0f, 0.0f,
769  0.0f, 0.0f, 0.0f, 0.0f,
770  0.0f, 1.0f, 0.0f, 0.0f,
771  0.0f, 0.0f, 0.0f, 0.0f
772  });
773  std::vector<float> scores({
774  0.0f, 0.9f, 0.8f,
775  0.0f, 0.75f, 0.72f,
776  0.0f, 0.6f, 0.5f,
777  0.0f, 0.93f, 0.95f,
778  0.0f, 0.5f, 0.4f,
779  0.0f, 0.3f, 0.2f
780  });
781  std::vector<float> anchors({
782  0.5f, 0.5f, 1.0f, 1.0f,
783  0.5f, 0.5f, 1.0f, 1.0f,
784  0.5f, 0.5f, 1.0f, 1.0f,
785  0.5f, 10.5f, 1.0f, 1.0f,
786  0.5f, 10.5f, 1.0f, 1.0f,
787  0.5f, 100.5f, 1.0f, 1.0f
788  });
789  DetectionPostProcessFastNmsEndToEnd<armnn::DataType::Float32>(defaultBackends, boxEncodings, scores, anchors);
790 }
791 
792 BOOST_AUTO_TEST_CASE(RefDetectionPostProcessFastNmsUint8Test)
793 {
794  armnn::TensorInfo boxEncodingsInfo({ 1, 6, 4 }, armnn::DataType::Float32);
797 
798  boxEncodingsInfo.SetQuantizationScale(1.0f);
799  boxEncodingsInfo.SetQuantizationOffset(1);
804 
805  std::vector<float> boxEncodings({
806  0.0f, 0.0f, 0.0f, 0.0f,
807  0.0f, 1.0f, 0.0f, 0.0f,
808  0.0f, -1.0f, 0.0f, 0.0f,
809  0.0f, 0.0f, 0.0f, 0.0f,
810  0.0f, 1.0f, 0.0f, 0.0f,
811  0.0f, 0.0f, 0.0f, 0.0f
812  });
813  std::vector<float> scores({
814  0.0f, 0.9f, 0.8f,
815  0.0f, 0.75f, 0.72f,
816  0.0f, 0.6f, 0.5f,
817  0.0f, 0.93f, 0.95f,
818  0.0f, 0.5f, 0.4f,
819  0.0f, 0.3f, 0.2f
820  });
821  std::vector<float> anchors({
822  0.5f, 0.5f, 1.0f, 1.0f,
823  0.5f, 0.5f, 1.0f, 1.0f,
824  0.5f, 0.5f, 1.0f, 1.0f,
825  0.5f, 10.5f, 1.0f, 1.0f,
826  0.5f, 10.5f, 1.0f, 1.0f,
827  0.5f, 100.5f, 1.0f, 1.0f
828  });
829 
830  std::vector<uint8_t> qBoxEncodings(boxEncodings.size(), 0);
831  std::vector<uint8_t> qScores(scores.size(), 0);
832  std::vector<uint8_t> qAnchors(anchors.size(), 0);
833  QuantizeData(qBoxEncodings.data(), boxEncodings.data(), boxEncodingsInfo);
834  QuantizeData(qScores.data(), scores.data(), scoresInfo);
835  QuantizeData(qAnchors.data(), anchors.data(), anchorsInfo);
836  DetectionPostProcessFastNmsEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends, qBoxEncodings,
837  qScores, qAnchors,
838  1.0f, 1, 0.01f, 0, 0.5f, 0);
839 }
840 
841 // HardSwish
842 BOOST_AUTO_TEST_CASE(RefHardSwishEndToEndTestFloat32)
843 {
844  HardSwishEndToEndTest<armnn::DataType::Float32>(defaultBackends);
845 }
846 
847 BOOST_AUTO_TEST_CASE(RefHardSwishEndToEndTestFloat16)
848 {
849  HardSwishEndToEndTest<armnn::DataType::Float16>(defaultBackends);
850 }
851 
852 BOOST_AUTO_TEST_CASE(RefHardSwishEndToEndTestBFloat16)
853 {
854 HardSwishEndToEndTest<armnn::DataType::BFloat16>(defaultBackends);
855 }
856 
857 BOOST_AUTO_TEST_CASE(RefHardSwishEndToEndTestQAsymmS8)
858 {
859  HardSwishEndToEndTest<armnn::DataType::QAsymmS8>(defaultBackends);
860 }
861 
862 BOOST_AUTO_TEST_CASE(RefHardSwishEndToEndTestQAsymmU8)
863 {
864  HardSwishEndToEndTest<armnn::DataType::QAsymmU8>(defaultBackends);
865 }
866 
867 BOOST_AUTO_TEST_CASE(RefHardSwishEndToEndTestQSymmS16)
868 {
869  HardSwishEndToEndTest<armnn::DataType::QSymmS16>(defaultBackends);
870 }
871 
872 // LogSoftmax
873 BOOST_AUTO_TEST_CASE(RefLogSoftmaxEndToEndTest)
874 {
876 }
877 
878 BOOST_AUTO_TEST_CASE(RefPreluEndToEndTestFloat32)
879 {
880  PreluEndToEndNegativeTest<armnn::DataType::Float32>(defaultBackends);
881 }
882 
883 BOOST_AUTO_TEST_CASE(RefPreluEndToEndTestUint8)
884 {
885  PreluEndToEndPositiveTest<armnn::DataType::QAsymmU8>(defaultBackends);
886 }
887 
888 BOOST_AUTO_TEST_CASE(RefPreluEndToEndTestQSymm16)
889 {
890  PreluEndToEndPositiveTest<armnn::DataType::QSymmS16>(defaultBackends);
891 }
892 
893 BOOST_AUTO_TEST_CASE(RefSpaceToDepthNhwcEndToEndTest1)
894 {
896 }
897 
898 BOOST_AUTO_TEST_CASE(RefSpaceToDepthNchwEndToEndTest1)
899 {
901 
902 }
903 
904 BOOST_AUTO_TEST_CASE(RefSpaceToDepthNhwcEndToEndTest2)
905 {
907 }
908 
909 BOOST_AUTO_TEST_CASE(RefSpaceToDepthNchwEndToEndTest2)
910 {
912 }
913 
914 BOOST_AUTO_TEST_CASE(RefSplitter1dEndToEndTest)
915 {
916  Splitter1dEndToEnd<armnn::DataType::Float32>(defaultBackends);
917 }
918 
919 BOOST_AUTO_TEST_CASE(RefSplitter1dEndToEndUint8Test)
920 {
921  Splitter1dEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
922 }
923 
924 BOOST_AUTO_TEST_CASE(RefSplitter2dDim0EndToEndTest)
925 {
926  Splitter2dDim0EndToEnd<armnn::DataType::Float32>(defaultBackends);
927 }
928 
929 BOOST_AUTO_TEST_CASE(RefSplitter2dDim1EndToEndTest)
930 {
931  Splitter2dDim1EndToEnd<armnn::DataType::Float32>(defaultBackends);
932 }
933 
934 BOOST_AUTO_TEST_CASE(RefSplitter2dDim0EndToEndUint8Test)
935 {
936  Splitter2dDim0EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
937 }
938 
939 BOOST_AUTO_TEST_CASE(RefSplitter2dDim1EndToEndUint8Test)
940 {
941  Splitter2dDim1EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
942 }
943 
944 BOOST_AUTO_TEST_CASE(RefSplitter3dDim0EndToEndTest)
945 {
946  Splitter3dDim0EndToEnd<armnn::DataType::Float32>(defaultBackends);
947 }
948 
949 BOOST_AUTO_TEST_CASE(RefSplitter3dDim1EndToEndTest)
950 {
951  Splitter3dDim1EndToEnd<armnn::DataType::Float32>(defaultBackends);
952 }
953 
954 BOOST_AUTO_TEST_CASE(RefSplitter3dDim2EndToEndTest)
955 {
956  Splitter3dDim2EndToEnd<armnn::DataType::Float32>(defaultBackends);
957 }
958 
959 BOOST_AUTO_TEST_CASE(RefSplitter3dDim0EndToEndUint8Test)
960 {
961  Splitter3dDim0EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
962 }
963 
964 BOOST_AUTO_TEST_CASE(RefSplitter3dDim1EndToEndUint8Test)
965 {
966  Splitter3dDim1EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
967 }
968 
969 BOOST_AUTO_TEST_CASE(RefSplitter3dDim2EndToEndUint8Test)
970 {
971  Splitter3dDim2EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
972 }
973 
974 BOOST_AUTO_TEST_CASE(RefSplitter4dDim0EndToEndTest)
975 {
976  Splitter4dDim0EndToEnd<armnn::DataType::Float32>(defaultBackends);
977 }
978 
979 BOOST_AUTO_TEST_CASE(RefSplitter4dDim1EndToEndTest)
980 {
981  Splitter4dDim1EndToEnd<armnn::DataType::Float32>(defaultBackends);
982 }
983 
984 BOOST_AUTO_TEST_CASE(RefSplitter4dDim2EndToEndTest)
985 {
986  Splitter4dDim2EndToEnd<armnn::DataType::Float32>(defaultBackends);
987 }
988 
989 BOOST_AUTO_TEST_CASE(RefSplitter4dDim3EndToEndTest)
990 {
991  Splitter4dDim3EndToEnd<armnn::DataType::Float32>(defaultBackends);
992 }
993 
994 BOOST_AUTO_TEST_CASE(RefSplitter4dDim0EndToEndUint8Test)
995 {
996  Splitter4dDim0EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
997 }
998 
999 BOOST_AUTO_TEST_CASE(RefSplitter4dDim1EndToEndUint8Test)
1000 {
1001  Splitter4dDim1EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
1002 }
1003 
1004 BOOST_AUTO_TEST_CASE(RefSplitter4dDim2EndToEndUint8Test)
1005 {
1006  Splitter4dDim2EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
1007 }
1008 
1009 BOOST_AUTO_TEST_CASE(RefSplitter4dDim3EndToEndUint8Test)
1010 {
1011  Splitter4dDim3EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
1012 }
1013 
1014 // TransposeConvolution2d
1015 BOOST_AUTO_TEST_CASE(RefTransposeConvolution2dEndToEndFloatNchwTest)
1016 {
1017  TransposeConvolution2dEndToEnd<armnn::DataType::Float32, armnn::DataType::Float32>(
1019 }
1020 
1021 BOOST_AUTO_TEST_CASE(RefTransposeConvolution2dEndToEndUint8NchwTest)
1022 {
1023  TransposeConvolution2dEndToEnd<armnn::DataType::QAsymmU8, armnn::DataType::Signed32>(
1025 }
1026 
1027 BOOST_AUTO_TEST_CASE(RefTransposeConvolution2dEndToEndInt16NchwTest)
1028 {
1029  TransposeConvolution2dEndToEnd<armnn::DataType::QSymmS16, armnn::DataType::Signed32>(
1031 }
1032 
1033 BOOST_AUTO_TEST_CASE(RefTransposeConvolution2dEndToEndFloatNhwcTest)
1034 {
1035  TransposeConvolution2dEndToEnd<armnn::DataType::Float32, armnn::DataType::Float32>(
1037 }
1038 
1039 BOOST_AUTO_TEST_CASE(RefTransposeConvolution2dEndToEndUint8NhwcTest)
1040 {
1041  TransposeConvolution2dEndToEnd<armnn::DataType::QAsymmU8, armnn::DataType::Signed32>(
1043 }
1044 
1045 BOOST_AUTO_TEST_CASE(RefTransposeConvolution2dEndToEndInt16NhwcTest)
1046 {
1047  TransposeConvolution2dEndToEnd<armnn::DataType::QSymmS16, armnn::DataType::Signed32>(
1049 }
1050 
1051 // Resize Bilinear
1052 BOOST_AUTO_TEST_CASE(RefResizeBilinearEndToEndFloatNchwTest)
1053 {
1054  ResizeBilinearEndToEnd<armnn::DataType::Float32>(defaultBackends, armnn::DataLayout::NCHW);
1055 }
1056 
1057 BOOST_AUTO_TEST_CASE(RefResizeBilinearEndToEndUint8NchwTest)
1058 {
1059  ResizeBilinearEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends, armnn::DataLayout::NCHW);
1060 }
1061 
1062 BOOST_AUTO_TEST_CASE(RefResizeBilinearEndToEndInt16NchwTest)
1063 {
1064  ResizeBilinearEndToEnd<armnn::DataType::QSymmS16>(defaultBackends, armnn::DataLayout::NCHW);
1065 }
1066 
1067 BOOST_AUTO_TEST_CASE(RefResizeBilinearEndToEndFloatNhwcTest)
1068 {
1069  ResizeBilinearEndToEnd<armnn::DataType::Float32>(defaultBackends, armnn::DataLayout::NHWC);
1070 }
1071 
1072 BOOST_AUTO_TEST_CASE(RefResizeBilinearEndToEndUint8NhwcTest)
1073 {
1074  ResizeBilinearEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends, armnn::DataLayout::NHWC);
1075 }
1076 
1077 BOOST_AUTO_TEST_CASE(RefResizeBilinearEndToEndInt16NhwcTest)
1078 {
1079  ResizeBilinearEndToEnd<armnn::DataType::QSymmS16>(defaultBackends, armnn::DataLayout::NHWC);
1080 }
1081 
1082 // Resize NearestNeighbor
1083 BOOST_AUTO_TEST_CASE(RefResizeNearestNeighborEndToEndFloatNchwTest)
1084 {
1085  ResizeNearestNeighborEndToEnd<armnn::DataType::Float32>(defaultBackends, armnn::DataLayout::NCHW);
1086 }
1087 
1088 BOOST_AUTO_TEST_CASE(RefResizeNearestNeighborEndToEndUint8NchwTest)
1089 {
1090  ResizeNearestNeighborEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends, armnn::DataLayout::NCHW);
1091 }
1092 
1093 BOOST_AUTO_TEST_CASE(RefResizeNearestNeighborEndToEndInt16NchwTest)
1094 {
1095  ResizeNearestNeighborEndToEnd<armnn::DataType::QSymmS16>(defaultBackends, armnn::DataLayout::NCHW);
1096 }
1097 
1098 BOOST_AUTO_TEST_CASE(RefResizeNearestNeighborEndToEndFloatNhwcTest)
1099 {
1100  ResizeNearestNeighborEndToEnd<armnn::DataType::Float32>(defaultBackends, armnn::DataLayout::NHWC);
1101 }
1102 
1103 BOOST_AUTO_TEST_CASE(RefResizeNearestNeighborEndToEndUint8NhwcTest)
1104 {
1105  ResizeNearestNeighborEndToEnd<armnn::DataType::QAsymmU8>(defaultBackends, armnn::DataLayout::NHWC);
1106 }
1107 
1108 BOOST_AUTO_TEST_CASE(RefResizeNearestNeighborEndToEndInt16NhwcTest)
1109 {
1110  ResizeNearestNeighborEndToEnd<armnn::DataType::QSymmS16>(defaultBackends, armnn::DataLayout::NHWC);
1111 }
1112 
1113 // InstanceNormalization
1114 BOOST_AUTO_TEST_CASE(RefInstanceNormalizationNhwcEndToEndTest1)
1115 {
1117 }
1118 
1119 BOOST_AUTO_TEST_CASE(RefInstanceNormalizationNchwEndToEndTest1)
1120 {
1122 }
1123 
1124 BOOST_AUTO_TEST_CASE(RefInstanceNormalizationNhwcEndToEndTest2)
1125 {
1127 }
1128 
1129 BOOST_AUTO_TEST_CASE(RefInstanceNormalizationNchwEndToEndTest2)
1130 {
1132 }
1133 
1134 // ArgMinMax
1135 BOOST_AUTO_TEST_CASE(RefArgMaxSimpleTest)
1136 {
1137  ArgMaxEndToEndSimple<armnn::DataType::Float32>(defaultBackends);
1138 }
1139 
1140 BOOST_AUTO_TEST_CASE(RefArgMaxSimpleUint8Test)
1141 {
1142  ArgMaxEndToEndSimple<armnn::DataType::QAsymmU8>(defaultBackends);
1143 }
1144 
1145 BOOST_AUTO_TEST_CASE(RefArgMinSimpleTest)
1146 {
1147  ArgMinEndToEndSimple<armnn::DataType::Float32>(defaultBackends);
1148 }
1149 
1150 BOOST_AUTO_TEST_CASE(RefArgMinSimpleUint8Test)
1151 {
1152  ArgMinEndToEndSimple<armnn::DataType::QAsymmU8>(defaultBackends);
1153 }
1154 
1155 BOOST_AUTO_TEST_CASE(RefArgMaxAxis0Test)
1156 {
1157  ArgMaxAxis0EndToEnd<armnn::DataType::Float32>(defaultBackends);
1158 }
1159 
1160 BOOST_AUTO_TEST_CASE(RefArgMaxAxis0Uint8Test)
1161 {
1162  ArgMaxAxis0EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
1163 }
1164 
1165 BOOST_AUTO_TEST_CASE(RefArgMinAxis0Test)
1166 {
1167  ArgMinAxis0EndToEnd<armnn::DataType::Float32>(defaultBackends);
1168 }
1169 
1170 BOOST_AUTO_TEST_CASE(RefArgMinAxis0Uint8Test)
1171 {
1172 
1173  ArgMinAxis0EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
1174 }
1175 
1176 BOOST_AUTO_TEST_CASE(RefArgMaxAxis1Test)
1177 {
1178  ArgMaxAxis1EndToEnd<armnn::DataType::Float32>(defaultBackends);
1179 }
1180 
1181 BOOST_AUTO_TEST_CASE(RefArgMaxAxis1Uint8Test)
1182 {
1183  ArgMaxAxis1EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
1184 }
1185 
1186 BOOST_AUTO_TEST_CASE(RefArgMinAxis1Test)
1187 {
1188  ArgMinAxis1EndToEnd<armnn::DataType::Float32>(defaultBackends);
1189 }
1190 
1191 BOOST_AUTO_TEST_CASE(RefArgMinAxis1Uint8Test)
1192 {
1193 
1194  ArgMinAxis1EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
1195 }
1196 
1197 BOOST_AUTO_TEST_CASE(RefArgMaxAxis2Test)
1198 {
1199  ArgMaxAxis2EndToEnd<armnn::DataType::Float32>(defaultBackends);
1200 }
1201 
1202 BOOST_AUTO_TEST_CASE(RefArgMaxAxis2Uint8Test)
1203 {
1204  ArgMaxAxis2EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
1205 }
1206 
1207 BOOST_AUTO_TEST_CASE(RefArgMinAxis2Test)
1208 {
1209  ArgMinAxis2EndToEnd<armnn::DataType::Float32>(defaultBackends);
1210 }
1211 
1212 BOOST_AUTO_TEST_CASE(RefArgMinAxis2Uint8Test)
1213 {
1214 
1215  ArgMinAxis2EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
1216 }
1217 
1218 BOOST_AUTO_TEST_CASE(RefArgMaxAxis3Test)
1219 {
1220  ArgMaxAxis3EndToEnd<armnn::DataType::Float32>(defaultBackends);
1221 }
1222 
1223 BOOST_AUTO_TEST_CASE(RefArgMaxAxis3Uint8Test)
1224 {
1225  ArgMaxAxis3EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
1226 }
1227 
1228 BOOST_AUTO_TEST_CASE(RefArgMinAxis3Test)
1229 {
1230  ArgMinAxis3EndToEnd<armnn::DataType::Float32>(defaultBackends);
1231 }
1232 
1233 BOOST_AUTO_TEST_CASE(RefArgMinAxis3Uint8Test)
1234 {
1235 
1236  ArgMinAxis3EndToEnd<armnn::DataType::QAsymmU8>(defaultBackends);
1237 }
1238 
1239 BOOST_AUTO_TEST_CASE(RefQLstmEndToEndTest)
1240 {
1242 }
1243 
1244 #if !defined(__ANDROID__)
1245 // Only run these tests on non Android platforms
1246 BOOST_AUTO_TEST_CASE(RefImportNonAlignedPointerTest)
1247 {
1248  ImportNonAlignedInputPointerTest(defaultBackends);
1249 }
1250 
1251 BOOST_AUTO_TEST_CASE(RefExportNonAlignedPointerTest)
1252 {
1253  ExportNonAlignedOutputPointerTest(defaultBackends);
1254 }
1255 
1256 BOOST_AUTO_TEST_CASE(RefImportAlignedPointerTest)
1257 {
1258  ImportAlignedPointerTest(defaultBackends);
1259 }
1260 
1261 BOOST_AUTO_TEST_CASE(RefImportOnlyWorkload)
1262 {
1263  ImportOnlyWorkload(defaultBackends);
1264 }
1265 
1266 BOOST_AUTO_TEST_CASE(RefExportOnlyWorkload)
1267 {
1268  ExportOnlyWorkload(defaultBackends);
1269 }
1270 
1271 BOOST_AUTO_TEST_CASE(RefImportAndExportWorkload)
1272 {
1273  ImportAndExportWorkload(defaultBackends);
1274 }
1275 
1276 BOOST_AUTO_TEST_CASE(RefExportOutputWithSeveralOutputSlotConnectionsTest)
1277 {
1278  ExportOutputWithSeveralOutputSlotConnectionsTest(defaultBackends);
1279 }
1280 
1281 BOOST_AUTO_TEST_CASE(RefStridedSliceInvalidSliceEndToEndTest)
1282 {
1283  StridedSliceInvalidSliceEndToEndTest(defaultBackends);
1284 }
1285 
1286 #endif
1287 
BOOST_AUTO_TEST_SUITE(TensorflowLiteParser)
void SpaceToDepthNchwEndToEndTest1(const std::vector< armnn::BackendId > &defaultBackends)
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:31
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:61
CPU Execution: Reference C++ kernels.
void InstanceNormalizationNhwcEndToEndTest1(const std::vector< armnn::BackendId > &defaultBackends)
std::vector< armnn::BackendId > defaultBackends
void LogSoftmaxEndToEndTest(const std::vector< armnn::BackendId > &defaultBackends)
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:25
armnn::TensorInfo anchorsInfo({ 6, 4 }, armnn::DataType::Float32)
BOOST_AUTO_TEST_CASE(RefAbsEndToEndTestFloat32)
std::vector< std::pair< LayerBindingId, class ConstTensor > > InputTensors
Definition: Tensor.hpp:225
void InstanceNormalizationNchwEndToEndTest1(const std::vector< armnn::BackendId > &defaultBackends)
std::vector< float > boxEncodings({ 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f })
int NetworkId
Definition: IRuntime.hpp:20
Copyright (c) 2020 ARM Limited.
void QLstmEndToEnd(const std::vector< armnn::BackendId > &backends)
void SpaceToDepthNhwcEndToEndTest2(const std::vector< armnn::BackendId > &defaultBackends)
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
void SpaceToDepthNhwcEndToEndTest1(const std::vector< armnn::BackendId > &defaultBackends)
A tensor defined by a TensorInfo (shape and data type) and a mutable backing store.
Definition: Tensor.hpp:191
void SpaceToDepthNchwEndToEndTest2(const std::vector< armnn::BackendId > &defaultBackends)
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:1003
std::vector< uint8_t > qBoxEncodings(boxEncodings.size(), 0)
int32_t GetQuantizationOffset() const
Definition: Tensor.cpp:265
float GetQuantizationScale() const
Definition: Tensor.cpp:248
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:199
std::vector< std::pair< LayerBindingId, class Tensor > > OutputTensors
Definition: Tensor.hpp:226
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:573
void SetQuantizationScale(float scale)
Definition: Tensor.cpp:260
std::vector< uint8_t > qAnchors(anchors.size(), 0)
BOOST_AUTO_TEST_CASE(CheckConvolution2dLayer)
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:20
min(a, max(b, input)) ReLu1 & ReLu6.
float m_A
Alpha upper bound value used by the activation functions. (BoundedReLu, Linear, TanH, Elu).
Definition: Descriptors.hpp:45
BOOST_AUTO_TEST_SUITE_END()
std::vector< float > scores({ 0.0f, 0.9f, 0.8f, 0.0f, 0.75f, 0.72f, 0.0f, 0.6f, 0.5f, 0.0f, 0.93f, 0.95f, 0.0f, 0.5f, 0.4f, 0.0f, 0.3f, 0.2f })
void QuantizeData(uint8_t *quant, const float *dequant, const TensorInfo &info)
std::vector< uint8_t > qScores(scores.size(), 0)
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
armnn::TensorInfo scoresInfo({ 1, 6, 3 }, armnn::DataType::Float32)
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
void InstanceNormalizationNhwcEndToEndTest2(const std::vector< armnn::BackendId > &defaultBackends)
void SetQuantizationOffset(int32_t offset)
Definition: Tensor.cpp:276
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:101
virtual int Connect(IInputSlot &destination)=0
armnn::Runtime::CreationOptions::ExternalProfilingOptions options
float m_B
Beta lower bound value used by the activation functions. (BoundedReLu, Linear, TanH).
Definition: Descriptors.hpp:47
A SoftmaxDescriptor for the SoftmaxLayer.
static INetworkPtr Create()
Definition: Network.cpp:50
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:43
unsigned int GetNumElements() const
Definition: Tensor.hpp:93
void InstanceNormalizationNchwEndToEndTest2(const std::vector< armnn::BackendId > &defaultBackends)
std::vector< float > anchors({ 0.5f, 0.5f, 1.0f, 1.0f, 0.5f, 0.5f, 1.0f, 1.0f, 0.5f, 0.5f, 1.0f, 1.0f, 0.5f, 10.5f, 1.0f, 1.0f, 0.5f, 10.5f, 1.0f, 1.0f, 0.5f, 100.5f, 1.0f, 1.0f })