ArmNN
 21.08
RuntimeTests.cpp File Reference
#include <armnn/Descriptors.hpp>
#include <armnn/IRuntime.hpp>
#include <armnn/INetwork.hpp>
#include <Processes.hpp>
#include <Runtime.hpp>
#include <armnn/TypesUtils.hpp>
#include <common/include/LabelsAndEventClasses.hpp>
#include <test/ProfilingTestUtils.hpp>
#include <HeapProfiling.hpp>
#include <LeakChecking.hpp>
#include <doctest/doctest.h>
#include "RuntimeTests.hpp"
#include "TestUtils.hpp"

Go to the source code of this file.

Namespaces

 armnn
 Copyright (c) 2021 ARM Limited and Contributors.
 

Functions

void RuntimeLoadedNetworksReserve (armnn::RuntimeImpl *runtime)
 
 TEST_SUITE ("Runtime")
 

Function Documentation

◆ TEST_SUITE()

TEST_SUITE ( "Runtime"  )

Definition at line 37 of file RuntimeTests.cpp.

References BackendOptions::AddOption(), ARMNN_BYTES_LEAKED_IN_SCOPE, ARMNN_LEAK_CHECKER_IS_ACTIVE, ARMNN_LOCAL_LEAK_CHECKING_ONLY, ARMNN_NO_LEAKS_IN_SCOPE, ARMNN_OBJECTS_LEAKED_IN_SCOPE, ARMNN_SCOPED_LEAK_CHECKER, IOutputSlot::Connect(), armnn::CpuAcc, armnn::CpuRef, INetwork::Create(), IRuntime::Create(), RuntimeImpl::EnqueueWorkload(), armnn::Failure, armnn::Float32, ProfilingServiceRuntimeHelper::ForceTransitionToState(), GetBackendId(), armnnUtils::Processes::GetCurrentId(), RuntimeImpl::GetDeviceSpec(), IConnectableLayer::GetGuid(), IConnectableLayer::GetInputSlot(), RuntimeImpl::GetInputTensorInfo(), IConnectableLayer::GetOutputSlot(), RuntimeImpl::GetOutputTensorInfo(), ProfilingServiceRuntimeHelper::GetProfilingBufferManager(), armnn::GetProfilingService(), BufferManager::GetReadableBuffer(), armnn::IgnoreUnused(), RuntimeImpl::LoadNetwork(), IRuntime::CreationOptions::m_BackendOptions, IRuntime::CreationOptions::ExternalProfilingOptions::m_EnableProfiling, IRuntime::CreationOptions::m_ProfilingOptions, IRuntime::CreationOptions::ExternalProfilingOptions::m_TimelineEnabled, armnn::Optimize(), armnn::QAsymmU8, ProfilingService::ResetExternalProfilingOptions(), armnn::RuntimeLoadedNetworksReserve(), IOutputSlot::SetTensorInfo(), armnn::Success, TEST_CASE_FIXTURE(), armnn::profiling::ThreadIdSize, RuntimeImpl::UnloadNetwork(), VerifyPostOptimisationStructureTestImpl(), VerifyTimelineEntityBinaryPacketData(), VerifyTimelineEventBinaryPacket(), VerifyTimelineHeaderBinary(), VerifyTimelineLabelBinaryPacketData(), and VerifyTimelineRelationshipBinaryPacketData().

38 {
39 TEST_CASE("RuntimeUnloadNetwork")
40 {
41  // build 2 mock-networks and load them into the runtime
44 
45  // Mock network 1.
46  armnn::NetworkId networkIdentifier1 = 1;
48  mockNetwork1->AddInputLayer(0, "test layer");
49  std::vector<armnn::BackendId> backends = { armnn::Compute::CpuRef };
50  runtime->LoadNetwork(networkIdentifier1, Optimize(*mockNetwork1, backends, runtime->GetDeviceSpec()));
51 
52  // Mock network 2.
53  armnn::NetworkId networkIdentifier2 = 2;
55  mockNetwork2->AddInputLayer(0, "test layer");
56  runtime->LoadNetwork(networkIdentifier2, Optimize(*mockNetwork2, backends, runtime->GetDeviceSpec()));
57 
58  // Unloads one by its networkID.
59  CHECK(runtime->UnloadNetwork(networkIdentifier1) == armnn::Status::Success);
60 
61  CHECK(runtime->UnloadNetwork(networkIdentifier1) == armnn::Status::Failure);
62 }
63 
64 // Note: the current builds we don't do valgrind and gperftools based leak checking at the same
65 // time, so in practice WITH_VALGRIND and ARMNN_LEAK_CHECKING_ENABLED are exclusive. The
66 // valgrind tests can stay for x86 builds, but on hikey Valgrind is just way too slow
67 // to be integrated into the CI system.
68 
69 #ifdef ARMNN_LEAK_CHECKING_ENABLED
70 
71 struct DisableGlobalLeakChecking
72 {
73  DisableGlobalLeakChecking()
74  {
76  }
77 };
78 
79 TEST_CASE_FIXTURE(DisableGlobalLeakChecking, "RuntimeHeapMemoryUsageSanityChecks")
80 {
82  {
83  ARMNN_SCOPED_LEAK_CHECKER("Sanity_Check_Outer");
84  {
85  ARMNN_SCOPED_LEAK_CHECKER("Sanity_Check_Inner");
86  CHECK(ARMNN_NO_LEAKS_IN_SCOPE() == true);
87  std::unique_ptr<char[]> dummyAllocation(new char[1000]);
88  // "A leak of 1000 bytes is expected here. "
89  // "Please make sure environment variable: HEAPCHECK=draconian is set!"
90  CHECK((ARMNN_NO_LEAKS_IN_SCOPE() == false));
91  CHECK(ARMNN_BYTES_LEAKED_IN_SCOPE() == 1000);
92  CHECK(ARMNN_OBJECTS_LEAKED_IN_SCOPE() == 1);
93  }
94  CHECK(ARMNN_NO_LEAKS_IN_SCOPE());
95  CHECK(ARMNN_BYTES_LEAKED_IN_SCOPE() == 0);
96  CHECK(ARMNN_OBJECTS_LEAKED_IN_SCOPE() == 0);
97  }
98 }
99 
100 #endif // ARMNN_LEAK_CHECKING_ENABLED
101 
102 // Note: this part of the code is due to be removed when we fully trust the gperftools based results.
103 #ifdef WITH_VALGRIND
104 // Run with the following command to get all the amazing output (in the devenv/build folder) :)
105 // valgrind --leak-check=full --show-leak-kinds=all --log-file=Valgrind_Memcheck_Leak_Report.txt armnn/test/UnitTests
106 TEST_CASE("RuntimeMemoryLeak")
107 {
108  // From documentation:
109 
110  // This means that no pointer to the block can be found. The block is classified as "lost",
111  // because the programmer could not possibly have freed it at program exit, since no pointer to it exists.
112  unsigned long leakedBefore = 0;
113  unsigned long leakedAfter = 0;
114 
115  // A start-pointer or chain of start-pointers to the block is found. Since the block is still pointed at,
116  // the programmer could, at least in principle, have freed it before program exit.
117  // We want to test this in case memory is not freed as early as it could have been.
118  unsigned long reachableBefore = 0;
119  unsigned long reachableAfter = 0;
120 
121  // Needed as out params but we don't test them.
122  unsigned long dubious = 0;
123  unsigned long suppressed = 0;
124 
125  armnn::NetworkId networkIdentifier1 = 1;
126 
127  // ensure that runtime is large enough before checking for memory leaks
128  // otherwise when loading the network it will automatically reserve memory that won't be released until destruction
130  armnn::RuntimeImpl runtime(options);
132 
133  {
134  std::vector<armnn::BackendId> backends = { armnn::Compute::CpuRef };
135 
137  mockNetwork1->AddInputLayer(0, "test layer");
138 
139  // Warm-up load/unload pair to put the runtime in a stable state (memory-wise).
140  runtime.LoadNetwork(networkIdentifier1, Optimize(*mockNetwork1, backends, runtime.GetDeviceSpec()));
141  runtime.UnloadNetwork(networkIdentifier1);
142 
143  // Checks for leaks before we load the network and record them so that we can see the delta after unloading.
144  VALGRIND_DO_QUICK_LEAK_CHECK;
145  VALGRIND_COUNT_LEAKS(leakedBefore, dubious, reachableBefore, suppressed);
146 
147  // The actual test.
148  runtime.LoadNetwork(networkIdentifier1, Optimize(*mockNetwork1, backends, runtime.GetDeviceSpec()));
149  runtime.UnloadNetwork(networkIdentifier1);
150 
151  VALGRIND_DO_ADDED_LEAK_CHECK;
152  VALGRIND_COUNT_LEAKS(leakedAfter, dubious, reachableAfter, suppressed);
153  }
154 
155  // If we're not running under Valgrind, these vars will have been initialised to 0, so this will always pass.
156  CHECK(leakedBefore == leakedAfter);
157  CHECK(reachableBefore == reachableAfter);
158 
159  // These are needed because VALGRIND_COUNT_LEAKS is a macro that assigns to the parameters
160  // so they are assigned to, but still considered unused, causing a warning.
161  IgnoreUnused(dubious);
162  IgnoreUnused(suppressed);
163 }
164 #endif // WITH_VALGRIND
165 
166 TEST_CASE("RuntimeCpuRef")
167 {
168  using namespace armnn;
169 
170  // Create runtime in which test will run
173 
174  // build up the structure of the network
175  INetworkPtr net(INetwork::Create());
176 
177  IConnectableLayer* input = net->AddInputLayer(0);
178 
179  // This layer configuration isn't supported by CpuAcc, should be fall back to CpuRef.
180  NormalizationDescriptor descriptor;
181  IConnectableLayer* normalize = net->AddNormalizationLayer(descriptor);
182 
183  IConnectableLayer* output = net->AddOutputLayer(0);
184 
185  input->GetOutputSlot(0).Connect(normalize->GetInputSlot(0));
186  normalize->GetOutputSlot(0).Connect(output->GetInputSlot(0));
187 
188  input->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
189  normalize->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
190 
191  // optimize the network
192  std::vector<armnn::BackendId> backends = { armnn::Compute::CpuRef };
193  IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
194 
195  // Load it into the runtime. It should success.
196  armnn::NetworkId netId;
197  CHECK(runtime->LoadNetwork(netId, std::move(optNet)) == Status::Success);
198 }
199 
200 TEST_CASE("RuntimeFallbackToCpuRef")
201 {
202  using namespace armnn;
203 
204  // Create runtime in which test will run
207 
208  // build up the structure of the network
209  INetworkPtr net(INetwork::Create());
210 
211  IConnectableLayer* input = net->AddInputLayer(0);
212 
213  // This layer configuration isn't supported by CpuAcc, should be fall back to CpuRef.
214  NormalizationDescriptor descriptor;
215  IConnectableLayer* normalize = net->AddNormalizationLayer(descriptor);
216 
217  IConnectableLayer* output = net->AddOutputLayer(0);
218 
219  input->GetOutputSlot(0).Connect(normalize->GetInputSlot(0));
220  normalize->GetOutputSlot(0).Connect(output->GetInputSlot(0));
221 
222  input->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
223  normalize->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
224 
225  // Allow fallback to CpuRef.
226  std::vector<armnn::BackendId> backends = { armnn::Compute::CpuAcc, armnn::Compute::CpuRef };
227  // optimize the network
228  IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
229 
230  // Load it into the runtime. It should succeed.
231  armnn::NetworkId netId;
232  CHECK(runtime->LoadNetwork(netId, std::move(optNet)) == Status::Success);
233 }
234 
235 TEST_CASE("IVGCVSW_1929_QuantizedSoftmaxIssue")
236 {
237  // Test for issue reported by Chris Nix in https://jira.arm.com/browse/IVGCVSW-1929
238  using namespace armnn;
239 
240  // Create runtime in which test will run
243 
244  // build up the structure of the network
245  INetworkPtr net(INetwork::Create());
246  armnn::IConnectableLayer* input = net->AddInputLayer(0,"input");
247  armnn::IConnectableLayer* softmax = net->AddSoftmaxLayer(armnn::SoftmaxDescriptor(), "softmax");
248  armnn::IConnectableLayer* output = net->AddOutputLayer(0, "output");
249 
250  input->GetOutputSlot(0).Connect(softmax->GetInputSlot(0));
251  softmax->GetOutputSlot(0).Connect(output->GetInputSlot(0));
252 
255  1.0f / 255,
256  0));
257 
260 
261  std::vector<armnn::BackendId> backends = { armnn::Compute::CpuRef };
262  std::vector<std::string> errMessages;
263 
264  try
265  {
267  backends,
268  runtime->GetDeviceSpec(),
270  errMessages);
271  FAIL("An exception should have been thrown");
272  }
273  catch (const InvalidArgumentException&)
274  {
275  // Different exceptions are thrown on different backends
276  }
277  CHECK(errMessages.size() > 0);
278 }
279 
280 TEST_CASE("RuntimeBackendOptions")
281 {
282  using namespace armnn;
283 
284  IRuntime::CreationOptions creationOptions;
285  auto& backendOptions = creationOptions.m_BackendOptions;
286 
287 
288  // Define Options on explicit construction
289  BackendOptions options1("FakeBackend1",
290  {
291  { "Option1", 1.3f },
292  { "Option2", true }
293  });
294 
295  // Add an option after construction
296  options1.AddOption({ "Option3", "some_value" });
297 
298  // Add the options to CreationOptions struct
299  backendOptions.push_back(options1);
300 
301  // Add more Options via inplace explicit construction
302  backendOptions.emplace_back(BackendOptions{ "FakeBackend1",
303  {{ "Option4", 42 }}
304  });
305 
306 
307  // First group
308  CHECK(backendOptions[0].GetBackendId().Get() == "FakeBackend1");
309  CHECK(backendOptions[0].GetOption(0).GetName() == "Option1");
310  CHECK(backendOptions[0].GetOption(0).GetValue().IsFloat() == true);
311  CHECK(backendOptions[0].GetOption(0).GetValue().AsFloat() == 1.3f);
312 
313  CHECK(backendOptions[0].GetOption(1).GetName() == "Option2");
314  CHECK(backendOptions[0].GetOption(1).GetValue().IsBool() == true);
315  CHECK(backendOptions[0].GetOption(1).GetValue().AsBool() == true);
316 
317  CHECK(backendOptions[0].GetOption(2).GetName() == "Option3");
318  CHECK(backendOptions[0].GetOption(2).GetValue().IsString() == true);
319  CHECK(backendOptions[0].GetOption(2).GetValue().AsString() == "some_value");
320 
321  // Second group
322  CHECK(backendOptions[1].GetBackendId().Get() == "FakeBackend1");
323  CHECK(backendOptions[1].GetOption(0).GetName() == "Option4");
324  CHECK(backendOptions[1].GetOption(0).GetValue().IsInt() == true);
325  CHECK(backendOptions[1].GetOption(0).GetValue().AsInt() == 42);
326 }
327 
328 TEST_CASE("ProfilingDisable")
329 {
330  using namespace armnn;
331 
332  // Create runtime in which the test will run
334  armnn::RuntimeImpl runtime(options);
335 
336  // build up the structure of the network
337  INetworkPtr net(INetwork::Create());
338 
339  IConnectableLayer* input = net->AddInputLayer(0);
340 
341  // This layer configuration isn't supported by CpuAcc, should fall back to CpuRef.
342  NormalizationDescriptor descriptor;
343  IConnectableLayer* normalize = net->AddNormalizationLayer(descriptor);
344 
345  IConnectableLayer* output = net->AddOutputLayer(0);
346 
347  input->GetOutputSlot(0).Connect(normalize->GetInputSlot(0));
348  normalize->GetOutputSlot(0).Connect(output->GetInputSlot(0));
349 
350  input->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
351  normalize->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
352 
353  // optimize the network
354  std::vector<armnn::BackendId> backends = { armnn::Compute::CpuRef };
355  IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime.GetDeviceSpec());
356 
357  // Load it into the runtime. It should succeed.
358  armnn::NetworkId netId;
359  CHECK(runtime.LoadNetwork(netId, std::move(optNet)) == Status::Success);
360 
361  profiling::ProfilingServiceRuntimeHelper profilingServiceHelper(GetProfilingService(&runtime));
362  profiling::BufferManager& bufferManager = profilingServiceHelper.GetProfilingBufferManager();
363  auto readableBuffer = bufferManager.GetReadableBuffer();
364 
365  // Profiling is not enabled, the post-optimisation structure should not be created
366  CHECK(!readableBuffer);
367 }
368 
369 TEST_CASE("ProfilingEnableCpuRef")
370 {
371  using namespace armnn;
372  using namespace armnn::profiling;
373 
374  // Create runtime in which the test will run
376  options.m_ProfilingOptions.m_EnableProfiling = true;
377  options.m_ProfilingOptions.m_TimelineEnabled = true;
378 
379  armnn::RuntimeImpl runtime(options);
381 
382  profiling::ProfilingServiceRuntimeHelper profilingServiceHelper(GetProfilingService(&runtime));
383  profilingServiceHelper.ForceTransitionToState(ProfilingState::NotConnected);
384  profilingServiceHelper.ForceTransitionToState(ProfilingState::WaitingForAck);
385  profilingServiceHelper.ForceTransitionToState(ProfilingState::Active);
386 
387  // build up the structure of the network
388  INetworkPtr net(INetwork::Create());
389 
390  IConnectableLayer* input = net->AddInputLayer(0, "input");
391 
392  NormalizationDescriptor descriptor;
393  IConnectableLayer* normalize = net->AddNormalizationLayer(descriptor, "normalization");
394 
395  IConnectableLayer* output = net->AddOutputLayer(0, "output");
396 
397  input->GetOutputSlot(0).Connect(normalize->GetInputSlot(0));
398  normalize->GetOutputSlot(0).Connect(output->GetInputSlot(0));
399 
400  input->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
401  normalize->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
402 
403  // optimize the network
404  std::vector<armnn::BackendId> backends = { armnn::Compute::CpuRef };
405  IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime.GetDeviceSpec());
406 
407  ProfilingGuid optNetGuid = optNet->GetGuid();
408 
409  // Load it into the runtime. It should succeed.
410  armnn::NetworkId netId;
411  CHECK(runtime.LoadNetwork(netId, std::move(optNet)) == Status::Success);
412 
413  profiling::BufferManager& bufferManager = profilingServiceHelper.GetProfilingBufferManager();
414  auto readableBuffer = bufferManager.GetReadableBuffer();
415 
416  // Profiling is enabled, the post-optimisation structure should be created
417  CHECK(readableBuffer != nullptr);
418 
419  unsigned int size = readableBuffer->GetSize();
420 
421  const unsigned char* readableData = readableBuffer->GetReadableData();
422  CHECK(readableData != nullptr);
423 
424  unsigned int offset = 0;
425 
426  // Verify Header
427  VerifyTimelineHeaderBinary(readableData, offset, size - 8);
428 
429  // Post-optimisation network
430  // Network entity
431  VerifyTimelineEntityBinaryPacketData(optNetGuid, readableData, offset);
432 
433  // Entity - Type relationship
434  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::LabelLink,
435  EmptyOptional(),
436  optNetGuid,
437  LabelsAndEventClasses::NETWORK_GUID,
438  LabelsAndEventClasses::TYPE_GUID,
439  readableData,
440  offset);
441 
442  // Network - START OF LIFE
443  ProfilingGuid networkSolEventGuid = VerifyTimelineEventBinaryPacket(EmptyOptional(),
444  EmptyOptional(),
445  EmptyOptional(),
446  readableData,
447  offset);
448 
449  // Network - START OF LIFE event relationship
450  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::ExecutionLink,
451  EmptyOptional(),
452  optNetGuid,
453  networkSolEventGuid,
454  LabelsAndEventClasses::ARMNN_PROFILING_SOL_EVENT_CLASS,
455  readableData,
456  offset);
457 
458  // Process ID Label
459  int processID = armnnUtils::Processes::GetCurrentId();
460  std::stringstream ss;
461  ss << processID;
462  std::string processIdLabel = ss.str();
463  VerifyTimelineLabelBinaryPacketData(EmptyOptional(), processIdLabel, readableData, offset);
464 
465  // Entity - Process ID relationship
466  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::LabelLink,
467  EmptyOptional(),
468  optNetGuid,
469  EmptyOptional(),
470  LabelsAndEventClasses::PROCESS_ID_GUID,
471  readableData,
472  offset);
473 
474  // Input layer
475  // Input layer entity
476  VerifyTimelineEntityBinaryPacketData(input->GetGuid(), readableData, offset);
477 
478  // Name Entity
479  ProfilingGuid inputLabelGuid = VerifyTimelineLabelBinaryPacketData(EmptyOptional(), "input", readableData, offset);
480 
481  // Entity - Name relationship
482  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::LabelLink,
483  EmptyOptional(),
484  input->GetGuid(),
485  inputLabelGuid,
486  LabelsAndEventClasses::NAME_GUID,
487  readableData,
488  offset);
489 
490  // Entity - Type relationship
491  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::LabelLink,
492  EmptyOptional(),
493  input->GetGuid(),
494  LabelsAndEventClasses::LAYER_GUID,
495  LabelsAndEventClasses::TYPE_GUID,
496  readableData,
497  offset);
498 
499  // Network - Input layer relationship
500  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::RetentionLink,
501  EmptyOptional(),
502  optNetGuid,
503  input->GetGuid(),
504  LabelsAndEventClasses::CHILD_GUID,
505  readableData,
506  offset);
507 
508  // Normalization layer
509  // Normalization layer entity
510  VerifyTimelineEntityBinaryPacketData(normalize->GetGuid(), readableData, offset);
511 
512  // Name entity
513  ProfilingGuid normalizationLayerNameGuid = VerifyTimelineLabelBinaryPacketData(
514  EmptyOptional(), "normalization", readableData, offset);
515 
516  // Entity - Name relationship
517  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::LabelLink,
518  EmptyOptional(),
519  normalize->GetGuid(),
520  normalizationLayerNameGuid,
521  LabelsAndEventClasses::NAME_GUID,
522  readableData,
523  offset);
524 
525  // Entity - Type relationship
526  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::LabelLink,
527  EmptyOptional(),
528  normalize->GetGuid(),
529  LabelsAndEventClasses::LAYER_GUID,
530  LabelsAndEventClasses::TYPE_GUID,
531  readableData,
532  offset);
533 
534  // Network - Normalize layer relationship
535  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::RetentionLink,
536  EmptyOptional(),
537  optNetGuid,
538  normalize->GetGuid(),
539  LabelsAndEventClasses::CHILD_GUID,
540  readableData,
541  offset);
542 
543  // Input layer - Normalize layer relationship
544  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::RetentionLink,
545  EmptyOptional(),
546  input->GetGuid(),
547  normalize->GetGuid(),
548  LabelsAndEventClasses::CONNECTION_GUID,
549  readableData,
550  offset);
551 
552  // Normalization workload
553  // Normalization workload entity
554  ProfilingGuid normalizationWorkloadGuid = VerifyTimelineEntityBinaryPacketData(
555  EmptyOptional(), readableData, offset);
556 
557  // Entity - Type relationship
558  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::LabelLink,
559  EmptyOptional(),
560  normalizationWorkloadGuid,
561  LabelsAndEventClasses::WORKLOAD_GUID,
562  LabelsAndEventClasses::TYPE_GUID,
563  readableData,
564  offset);
565 
566  // BackendId entity
567  ProfilingGuid cpuRefLabelGuid = VerifyTimelineLabelBinaryPacketData(
568  EmptyOptional(), "CpuRef", readableData, offset);
569 
570  // Entity - BackendId relationship
571  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::LabelLink,
572  EmptyOptional(),
573  normalizationWorkloadGuid,
574  cpuRefLabelGuid,
575  LabelsAndEventClasses::BACKENDID_GUID,
576  readableData,
577  offset);
578 
579  // Normalize layer - Normalize workload relationship
580  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::RetentionLink,
581  EmptyOptional(),
582  normalize->GetGuid(),
583  normalizationWorkloadGuid,
584  LabelsAndEventClasses::CHILD_GUID,
585  readableData,
586  offset);
587 
588  // Output layer
589  // Output layer entity
590  VerifyTimelineEntityBinaryPacketData(output->GetGuid(), readableData, offset);
591 
592  // Name entity
593  ProfilingGuid outputLabelGuid = VerifyTimelineLabelBinaryPacketData(
594  EmptyOptional(), "output", readableData, offset);
595 
596  // Entity - Name relationship
597  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::LabelLink,
598  EmptyOptional(),
599  output->GetGuid(),
600  outputLabelGuid,
601  LabelsAndEventClasses::NAME_GUID,
602  readableData,
603  offset);
604 
605  // Entity - Type relationship
606  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::LabelLink,
607  EmptyOptional(),
608  output->GetGuid(),
609  LabelsAndEventClasses::LAYER_GUID,
610  LabelsAndEventClasses::TYPE_GUID,
611  readableData,
612  offset);
613 
614  // Network - Output layer relationship
615  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::RetentionLink,
616  EmptyOptional(),
617  optNetGuid,
618  output->GetGuid(),
619  LabelsAndEventClasses::CHILD_GUID,
620  readableData,
621  offset);
622 
623  // Normalize layer - Output layer relationship
624  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::RetentionLink,
625  EmptyOptional(),
626  normalize->GetGuid(),
627  output->GetGuid(),
628  LabelsAndEventClasses::CONNECTION_GUID,
629  readableData,
630  offset);
631 
632  bufferManager.MarkRead(readableBuffer);
633 
634  // Creates structures for input & output.
635  std::vector<float> inputData(16);
636  std::vector<float> outputData(16);
637 
638  InputTensors inputTensors
639  {
640  {0, ConstTensor(runtime.GetInputTensorInfo(netId, 0), inputData.data())}
641  };
642  OutputTensors outputTensors
643  {
644  {0, Tensor(runtime.GetOutputTensorInfo(netId, 0), outputData.data())}
645  };
646 
647  // Does the inference.
648  runtime.EnqueueWorkload(netId, inputTensors, outputTensors);
649 
650  // Get readable buffer for input workload
651  auto inputReadableBuffer = bufferManager.GetReadableBuffer();
652  CHECK(inputReadableBuffer != nullptr);
653 
654  // Get readable buffer for output workload
655  auto outputReadableBuffer = bufferManager.GetReadableBuffer();
656  CHECK(outputReadableBuffer != nullptr);
657 
658  // Get readable buffer for inference timeline
659  auto inferenceReadableBuffer = bufferManager.GetReadableBuffer();
660  CHECK(inferenceReadableBuffer != nullptr);
661 
662  // Validate input workload data
663  size = inputReadableBuffer->GetSize();
664  CHECK(size == 164);
665 
666  readableData = inputReadableBuffer->GetReadableData();
667  CHECK(readableData != nullptr);
668 
669  offset = 0;
670 
671  // Verify Header
672  VerifyTimelineHeaderBinary(readableData, offset, 156);
673 
674  // Input workload
675  // Input workload entity
676  ProfilingGuid inputWorkloadGuid = VerifyTimelineEntityBinaryPacketData(EmptyOptional(), readableData, offset);
677 
678  // Entity - Type relationship
679  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::LabelLink,
680  EmptyOptional(),
681  inputWorkloadGuid,
682  LabelsAndEventClasses::WORKLOAD_GUID,
683  LabelsAndEventClasses::TYPE_GUID,
684  readableData,
685  offset);
686 
687  // BackendId entity
688  ProfilingGuid CpuRefLabelGuid = VerifyTimelineLabelBinaryPacketData(
689  EmptyOptional(), "CpuRef", readableData, offset);
690 
691  // Entity - BackendId relationship
692  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::LabelLink,
693  EmptyOptional(),
694  inputWorkloadGuid,
695  CpuRefLabelGuid,
696  LabelsAndEventClasses::BACKENDID_GUID,
697  readableData,
698  offset);
699 
700  // Input layer - Input workload relationship
701  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::RetentionLink,
702  EmptyOptional(),
703  input->GetGuid(),
704  inputWorkloadGuid,
705  LabelsAndEventClasses::CHILD_GUID,
706  readableData,
707  offset);
708 
709  bufferManager.MarkRead(inputReadableBuffer);
710 
711  // Validate output workload data
712  size = outputReadableBuffer->GetSize();
713  CHECK(size == 164);
714 
715  readableData = outputReadableBuffer->GetReadableData();
716  CHECK(readableData != nullptr);
717 
718  offset = 0;
719 
720  // Verify Header
721  VerifyTimelineHeaderBinary(readableData, offset, 156);
722 
723  // Output workload
724  // Output workload entity
725  ProfilingGuid outputWorkloadGuid = VerifyTimelineEntityBinaryPacketData(EmptyOptional(), readableData, offset);
726 
727  // Entity - Type relationship
728  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::LabelLink,
729  EmptyOptional(),
730  outputWorkloadGuid,
731  LabelsAndEventClasses::WORKLOAD_GUID,
732  LabelsAndEventClasses::TYPE_GUID,
733  readableData,
734  offset);
735 
736  // BackendId entity
737  VerifyTimelineLabelBinaryPacketData(EmptyOptional(), "CpuRef", readableData, offset);
738 
739  // Entity - BackendId relationship
740  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::LabelLink,
741  EmptyOptional(),
742  outputWorkloadGuid,
743  CpuRefLabelGuid,
744  LabelsAndEventClasses::BACKENDID_GUID,
745  readableData,
746  offset);
747 
748  // Output layer - Output workload relationship
749  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::RetentionLink,
750  EmptyOptional(),
751  output->GetGuid(),
752  outputWorkloadGuid,
753  LabelsAndEventClasses::CHILD_GUID,
754  readableData,
755  offset);
756 
757  bufferManager.MarkRead(outputReadableBuffer);
758 
759  // Validate inference data
760  size = inferenceReadableBuffer->GetSize();
761  CHECK(size == 976 + 8 * ThreadIdSize);
762 
763  readableData = inferenceReadableBuffer->GetReadableData();
764  CHECK(readableData != nullptr);
765 
766  offset = 0;
767 
768  // Verify Header
769  VerifyTimelineHeaderBinary(readableData, offset, 968 + 8 * ThreadIdSize);
770 
771  // Inference timeline trace
772  // Inference entity
773  ProfilingGuid inferenceGuid = VerifyTimelineEntityBinaryPacketData(EmptyOptional(), readableData, offset);
774 
775  // Entity - Type relationship
776  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::LabelLink,
777  EmptyOptional(),
778  inferenceGuid,
779  LabelsAndEventClasses::INFERENCE_GUID,
780  LabelsAndEventClasses::TYPE_GUID,
781  readableData,
782  offset);
783 
784  // Network - Inference relationship
785  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::RetentionLink,
786  EmptyOptional(),
787  optNetGuid,
788  inferenceGuid,
789  LabelsAndEventClasses::EXECUTION_OF_GUID,
790  readableData,
791  offset);
792 
793  // Start Inference life
794  // Event packet - timeline, threadId, eventGuid
795  ProfilingGuid inferenceEventGuid = VerifyTimelineEventBinaryPacket(
796  EmptyOptional(), EmptyOptional(), EmptyOptional(), readableData, offset);
797 
798  // Inference - event relationship
799  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::ExecutionLink,
800  EmptyOptional(),
801  inferenceGuid,
802  inferenceEventGuid,
803  LabelsAndEventClasses::ARMNN_PROFILING_SOL_EVENT_CLASS,
804  readableData,
805  offset);
806 
807  // Execution
808  // Input workload execution
809  // Input workload execution entity
810  ProfilingGuid inputWorkloadExecutionGuid = VerifyTimelineEntityBinaryPacketData(
811  EmptyOptional(), readableData, offset);
812 
813  // Entity - Type relationship
814  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::LabelLink,
815  EmptyOptional(),
816  inputWorkloadExecutionGuid,
817  LabelsAndEventClasses::WORKLOAD_EXECUTION_GUID,
818  LabelsAndEventClasses::TYPE_GUID,
819  readableData,
820  offset);
821 
822  // Inference - Workload execution relationship
823  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::RetentionLink,
824  EmptyOptional(),
825  inferenceGuid,
826  inputWorkloadExecutionGuid,
827  LabelsAndEventClasses::CHILD_GUID,
828  readableData,
829  offset);
830 
831  // Workload - Workload execution relationship
832  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::RetentionLink,
833  EmptyOptional(),
834  inputWorkloadGuid,
835  inputWorkloadExecutionGuid,
836  LabelsAndEventClasses::EXECUTION_OF_GUID,
837  readableData,
838  offset);
839 
840  // Start Input workload execution life
841  // Event packet - timeline, threadId, eventGuid
842  ProfilingGuid inputWorkloadExecutionSOLEventId = VerifyTimelineEventBinaryPacket(
843  EmptyOptional(), EmptyOptional(), EmptyOptional(), readableData, offset);
844 
845  // Input workload execution - event relationship
846  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::ExecutionLink,
847  EmptyOptional(),
848  inputWorkloadExecutionGuid,
849  inputWorkloadExecutionSOLEventId,
850  LabelsAndEventClasses::ARMNN_PROFILING_SOL_EVENT_CLASS,
851  readableData,
852  offset);
853 
854  // End of Input workload execution life
855  // Event packet - timeline, threadId, eventGuid
856  ProfilingGuid inputWorkloadExecutionEOLEventId = VerifyTimelineEventBinaryPacket(
857  EmptyOptional(), EmptyOptional(), EmptyOptional(), readableData, offset);
858 
859  // Input workload execution - event relationship
860  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::ExecutionLink,
861  EmptyOptional(),
862  inputWorkloadExecutionGuid,
863  inputWorkloadExecutionEOLEventId,
864  LabelsAndEventClasses::ARMNN_PROFILING_EOL_EVENT_CLASS,
865  readableData,
866  offset);
867 
868  // Normalize workload execution
869  // Normalize workload execution entity
870  ProfilingGuid normalizeWorkloadExecutionGuid = VerifyTimelineEntityBinaryPacketData(
871  EmptyOptional(), readableData, offset);
872 
873  // Entity - Type relationship
874  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::LabelLink,
875  EmptyOptional(),
876  normalizeWorkloadExecutionGuid,
877  LabelsAndEventClasses::WORKLOAD_EXECUTION_GUID,
878  LabelsAndEventClasses::TYPE_GUID,
879  readableData,
880  offset);
881 
882  // Inference - Workload execution relationship
883  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::RetentionLink,
884  EmptyOptional(),
885  inferenceGuid,
886  normalizeWorkloadExecutionGuid,
887  LabelsAndEventClasses::CHILD_GUID,
888  readableData,
889  offset);
890 
891  // Workload - Workload execution relationship
892  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::RetentionLink,
893  EmptyOptional(),
894  normalizationWorkloadGuid,
895  normalizeWorkloadExecutionGuid,
896  LabelsAndEventClasses::EXECUTION_OF_GUID,
897  readableData,
898  offset);
899 
900  // Start Normalize workload execution life
901  // Event packet - timeline, threadId, eventGuid
902  ProfilingGuid normalizationWorkloadExecutionSOLEventGuid = VerifyTimelineEventBinaryPacket(
903  EmptyOptional(), EmptyOptional(), EmptyOptional(), readableData, offset);
904 
905  // Normalize workload execution - event relationship
906  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::ExecutionLink,
907  EmptyOptional(),
908  normalizeWorkloadExecutionGuid,
909  normalizationWorkloadExecutionSOLEventGuid,
910  LabelsAndEventClasses::ARMNN_PROFILING_SOL_EVENT_CLASS,
911  readableData,
912  offset);
913 
914  // End of Normalize workload execution life
915  // Event packet - timeline, threadId, eventGuid
916  ProfilingGuid normalizationWorkloadExecutionEOLEventGuid = VerifyTimelineEventBinaryPacket(
917  EmptyOptional(), EmptyOptional(), EmptyOptional(), readableData, offset);
918 
919  // Normalize workload execution - event relationship
920  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::ExecutionLink,
921  EmptyOptional(),
922  normalizeWorkloadExecutionGuid,
923  normalizationWorkloadExecutionEOLEventGuid,
924  LabelsAndEventClasses::ARMNN_PROFILING_EOL_EVENT_CLASS,
925  readableData,
926  offset);
927 
928  // Output workload execution
929  // Output workload execution entity
930  ProfilingGuid outputWorkloadExecutionGuid = VerifyTimelineEntityBinaryPacketData(
931  EmptyOptional(), readableData, offset);
932 
933  // Entity - Type relationship
934  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::LabelLink,
935  EmptyOptional(),
936  outputWorkloadExecutionGuid,
937  LabelsAndEventClasses::WORKLOAD_EXECUTION_GUID,
938  LabelsAndEventClasses::TYPE_GUID,
939  readableData,
940  offset);
941 
942  // Inference - Workload execution relationship
943  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::RetentionLink,
944  EmptyOptional(),
945  inferenceGuid,
946  outputWorkloadExecutionGuid,
947  LabelsAndEventClasses::CHILD_GUID,
948  readableData,
949  offset);
950 
951  // Workload - Workload execution relationship
952  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::RetentionLink,
953  EmptyOptional(),
954  outputWorkloadGuid,
955  outputWorkloadExecutionGuid,
956  LabelsAndEventClasses::EXECUTION_OF_GUID,
957  readableData,
958  offset);
959 
960  // Start Output workload execution life
961  // Event packet - timeline, threadId, eventGuid
962  ProfilingGuid outputWorkloadExecutionSOLEventGuid = VerifyTimelineEventBinaryPacket(
963  EmptyOptional(), EmptyOptional(), EmptyOptional(), readableData, offset);
964 
965  // Output workload execution - event relationship
966  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::ExecutionLink,
967  EmptyOptional(),
968  outputWorkloadExecutionGuid,
969  outputWorkloadExecutionSOLEventGuid,
970  LabelsAndEventClasses::ARMNN_PROFILING_SOL_EVENT_CLASS,
971  readableData,
972  offset);
973 
974  // End of Normalize workload execution life
975  // Event packet - timeline, threadId, eventGuid
976  ProfilingGuid outputWorkloadExecutionEOLEventGuid = VerifyTimelineEventBinaryPacket(
977  EmptyOptional(), EmptyOptional(), EmptyOptional(), readableData, offset);
978 
979  // Output workload execution - event relationship
980  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::ExecutionLink,
981  EmptyOptional(),
982  outputWorkloadExecutionGuid,
983  outputWorkloadExecutionEOLEventGuid,
984  LabelsAndEventClasses::ARMNN_PROFILING_EOL_EVENT_CLASS,
985  readableData,
986  offset);
987 
988  // End of Inference life
989  // Event packet - timeline, threadId, eventGuid
990  ProfilingGuid inferenceEOLEventGuid = VerifyTimelineEventBinaryPacket(
991  EmptyOptional(), EmptyOptional(), EmptyOptional(), readableData, offset);
992 
993  // Inference - event relationship
994  VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType::ExecutionLink,
995  EmptyOptional(),
996  inferenceGuid,
997  inferenceEOLEventGuid,
998  LabelsAndEventClasses::ARMNN_PROFILING_EOL_EVENT_CLASS,
999  readableData,
1000  offset);
1001 
1002  bufferManager.MarkRead(inferenceReadableBuffer);
1003 }
1004 
1005 TEST_CASE("ProfilingPostOptimisationStructureCpuRef")
1006 {
1007  VerifyPostOptimisationStructureTestImpl(armnn::Compute::CpuRef);
1008 }
1009 
1010 }
#define ARMNN_SCOPED_LEAK_CHECKER(TAG)
profiling::ProfilingService & GetProfilingService(armnn::RuntimeImpl *runtime)
Definition: TestUtils.cpp:35
#define ARMNN_LOCAL_LEAK_CHECKING_ONLY()
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:39
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:61
ProfilingGuid VerifyTimelineEntityBinaryPacketData(Optional< ProfilingGuid > guid, const unsigned char *readableData, unsigned int &offset)
void VerifyPostOptimisationStructureTestImpl(armnn::BackendId backendId)
CPU Execution: Reference C++ kernels.
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:30
std::vector< std::pair< LayerBindingId, class ConstTensor > > InputTensors
Definition: Tensor.hpp:360
Copyright (c) 2021 ARM Limited and Contributors.
void IgnoreUnused(Ts &&...)
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
#define ARMNN_LEAK_CHECKER_IS_ACTIVE()
A tensor defined by a TensorInfo (shape and data type) and a mutable backing store.
Definition: Tensor.hpp:319
#define ARMNN_OBJECTS_LEAKED_IN_SCOPE()
TEST_CASE_FIXTURE(ClContextControlFixture, "CopyBetweenNeonAndGpu")
std::vector< BackendOptions > m_BackendOptions
Pass backend specific options.
Definition: IRuntime.hpp:192
const char * GetBackendId()
void RuntimeLoadedNetworksReserve(armnn::RuntimeImpl *runtime)
void AddOption(BackendOption &&option)
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:1613
void VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType relationshipType, Optional< ProfilingGuid > relationshipGuid, Optional< ProfilingGuid > headGuid, Optional< ProfilingGuid > tailGuid, Optional< ProfilingGuid > attributeGuid, const unsigned char *readableData, unsigned int &offset)
virtual LayerGuid GetGuid() const =0
Returns the unique id of the layer.
int NetworkId
Definition: IRuntime.hpp:24
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:327
std::vector< std::pair< LayerBindingId, class Tensor > > OutputTensors
Definition: Tensor.hpp:361
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:173
void ResetExternalProfilingOptions(const ExternalProfilingOptions &options, bool resetProfilingService=false)
IPacketBufferPtr GetReadableBuffer() override
constexpr unsigned int ThreadIdSize
void VerifyTimelineHeaderBinary(const unsigned char *readableData, unsigned int &offset, uint32_t packetDataLength)
Struct for the users to pass backend specific options.
EmptyOptional is used to initialize the Optional class in case we want to have default value for an O...
Definition: Optional.hpp:32
CPU Execution: NEON: ArmCompute.
#define ARMNN_NO_LEAKS_IN_SCOPE()
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:172
ProfilingGuid VerifyTimelineEventBinaryPacket(Optional< uint64_t > timestamp, Optional< int > threadId, Optional< ProfilingGuid > eventGuid, const unsigned char *readableData, unsigned int &offset)
virtual int Connect(IInputSlot &destination)=0
ProfilingGuid VerifyTimelineLabelBinaryPacketData(Optional< ProfilingGuid > guid, const std::string &label, const unsigned char *readableData, unsigned int &offset)
A NormalizationDescriptor for the NormalizationLayer.
ExternalProfilingOptions m_ProfilingOptions
Definition: IRuntime.hpp:160
static INetworkPtr Create(NetworkOptions networkOptions={})
Definition: Network.cpp:530
A SoftmaxDescriptor for the SoftmaxLayer.
#define ARMNN_BYTES_LEAKED_IN_SCOPE()