ArmNN
 20.02
RuntimeTests.cpp File Reference
#include <armnn/Descriptors.hpp>
#include <armnn/IRuntime.hpp>
#include <armnn/INetwork.hpp>
#include <Runtime.hpp>
#include <armnn/TypesUtils.hpp>
#include <LabelsAndEventClasses.hpp>
#include <test/ProfilingTestUtils.hpp>
#include <HeapProfiling.hpp>
#include <LeakChecking.hpp>
#include <boost/test/unit_test.hpp>
#include "RuntimeTests.hpp"

Go to the source code of this file.

Namespaces

 armnn
 Copyright (c) 2020 ARM Limited.
 

Functions

void RuntimeLoadedNetworksReserve (armnn::Runtime *runtime)
 
 BOOST_AUTO_TEST_CASE (RuntimeUnloadNetwork)
 
 BOOST_AUTO_TEST_CASE (RuntimeCpuRef)
 
 BOOST_AUTO_TEST_CASE (RuntimeFallbackToCpuRef)
 
 BOOST_AUTO_TEST_CASE (IVGCVSW_1929_QuantizedSoftmaxIssue)
 
 BOOST_AUTO_TEST_CASE (RuntimeBackendOptions)
 
 BOOST_AUTO_TEST_CASE (ProfilingDisable)
 
 BOOST_AUTO_TEST_CASE (ProfilingEnableCpuRef)
 
 BOOST_AUTO_TEST_CASE (ProfilingPostOptimisationStructureCpuRef)
 

Function Documentation

◆ BOOST_AUTO_TEST_CASE() [1/8]

BOOST_AUTO_TEST_CASE ( RuntimeUnloadNetwork  )

Definition at line 37 of file RuntimeTests.cpp.

References ARMNN_BYTES_LEAKED_IN_SCOPE, ARMNN_LEAK_CHECKER_IS_ACTIVE, ARMNN_LOCAL_LEAK_CHECKING_ONLY, ARMNN_NO_LEAKS_IN_SCOPE, ARMNN_OBJECTS_LEAKED_IN_SCOPE, ARMNN_SCOPED_LEAK_CHECKER, armnn::BOOST_AUTO_TEST_CASE(), BOOST_GLOBAL_FIXTURE(), armnn::CpuRef, INetwork::Create(), IRuntime::Create(), armnn::Failure, Runtime::GetDeviceSpec(), armnn::IgnoreUnused(), Runtime::LoadNetwork(), armnn::Optimize(), options, armnn::RuntimeLoadedNetworksReserve(), armnn::Success, and Runtime::UnloadNetwork().

38 {
39  // build 2 mock-networks and load them into the runtime
42 
43  // Mock network 1.
44  armnn::NetworkId networkIdentifier1 = 1;
46  mockNetwork1->AddInputLayer(0, "test layer");
47  std::vector<armnn::BackendId> backends = { armnn::Compute::CpuRef };
48  runtime->LoadNetwork(networkIdentifier1, Optimize(*mockNetwork1, backends, runtime->GetDeviceSpec()));
49 
50  // Mock network 2.
51  armnn::NetworkId networkIdentifier2 = 2;
53  mockNetwork2->AddInputLayer(0, "test layer");
54  runtime->LoadNetwork(networkIdentifier2, Optimize(*mockNetwork2, backends, runtime->GetDeviceSpec()));
55 
56  // Unloads one by its networkID.
57  BOOST_TEST(runtime->UnloadNetwork(networkIdentifier1) == armnn::Status::Success);
58 
59  BOOST_TEST(runtime->UnloadNetwork(networkIdentifier1) == armnn::Status::Failure);
60 }
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:32
CPU Execution: Reference C++ kernels.
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:24
int NetworkId
Definition: IRuntime.hpp:19
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:890
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:101
armnn::Runtime::CreationOptions::ExternalProfilingOptions options
static INetworkPtr Create()
Definition: Network.cpp:49

◆ BOOST_AUTO_TEST_CASE() [2/8]

BOOST_AUTO_TEST_CASE ( RuntimeCpuRef  )

Definition at line 166 of file RuntimeTests.cpp.

References IOutputSlot::Connect(), armnn::CpuRef, INetwork::Create(), IRuntime::Create(), armnn::Float32, IConnectableLayer::GetInputSlot(), IConnectableLayer::GetOutputSlot(), armnn::Optimize(), options, IOutputSlot::SetTensorInfo(), and armnn::Success.

167 {
168  using namespace armnn;
169 
170  // Create runtime in which test will run
173 
174  // build up the structure of the network
176 
177  IConnectableLayer* input = net->AddInputLayer(0);
178 
179  // This layer configuration isn't supported by CpuAcc, should be fall back to CpuRef.
180  NormalizationDescriptor descriptor;
181  IConnectableLayer* normalize = net->AddNormalizationLayer(descriptor);
182 
183  IConnectableLayer* output = net->AddOutputLayer(0);
184 
185  input->GetOutputSlot(0).Connect(normalize->GetInputSlot(0));
186  normalize->GetOutputSlot(0).Connect(output->GetInputSlot(0));
187 
188  input->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
189  normalize->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
190 
191  // optimize the network
192  std::vector<armnn::BackendId> backends = { armnn::Compute::CpuRef };
193  IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
194 
195  // Load it into the runtime. It should success.
196  armnn::NetworkId netId;
197  BOOST_TEST(runtime->LoadNetwork(netId, std::move(optNet)) == Status::Success);
198 }
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:32
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:61
CPU Execution: Reference C++ kernels.
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:24
int NetworkId
Definition: IRuntime.hpp:19
Copyright (c) 2020 ARM Limited.
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:890
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:566
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:101
virtual int Connect(IInputSlot &destination)=0
armnn::Runtime::CreationOptions::ExternalProfilingOptions options
A NormalizationDescriptor for the NormalizationLayer.
static INetworkPtr Create()
Definition: Network.cpp:49

◆ BOOST_AUTO_TEST_CASE() [3/8]

BOOST_AUTO_TEST_CASE ( RuntimeFallbackToCpuRef  )

Definition at line 200 of file RuntimeTests.cpp.

References IOutputSlot::Connect(), armnn::CpuAcc, armnn::CpuRef, INetwork::Create(), IRuntime::Create(), armnn::Float32, IConnectableLayer::GetInputSlot(), IConnectableLayer::GetOutputSlot(), armnn::Optimize(), options, IOutputSlot::SetTensorInfo(), and armnn::Success.

201 {
202  using namespace armnn;
203 
204  // Create runtime in which test will run
207 
208  // build up the structure of the network
210 
211  IConnectableLayer* input = net->AddInputLayer(0);
212 
213  // This layer configuration isn't supported by CpuAcc, should be fall back to CpuRef.
214  NormalizationDescriptor descriptor;
215  IConnectableLayer* normalize = net->AddNormalizationLayer(descriptor);
216 
217  IConnectableLayer* output = net->AddOutputLayer(0);
218 
219  input->GetOutputSlot(0).Connect(normalize->GetInputSlot(0));
220  normalize->GetOutputSlot(0).Connect(output->GetInputSlot(0));
221 
222  input->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
223  normalize->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
224 
225  // Allow fallback to CpuRef.
226  std::vector<armnn::BackendId> backends = { armnn::Compute::CpuAcc, armnn::Compute::CpuRef };
227  // optimize the network
228  IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
229 
230  // Load it into the runtime. It should succeed.
231  armnn::NetworkId netId;
232  BOOST_TEST(runtime->LoadNetwork(netId, std::move(optNet)) == Status::Success);
233 }
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:32
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:61
CPU Execution: Reference C++ kernels.
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:24
int NetworkId
Definition: IRuntime.hpp:19
Copyright (c) 2020 ARM Limited.
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:890
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:566
CPU Execution: NEON: ArmCompute.
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:101
virtual int Connect(IInputSlot &destination)=0
armnn::Runtime::CreationOptions::ExternalProfilingOptions options
A NormalizationDescriptor for the NormalizationLayer.
static INetworkPtr Create()
Definition: Network.cpp:49

◆ BOOST_AUTO_TEST_CASE() [4/8]

BOOST_AUTO_TEST_CASE ( IVGCVSW_1929_QuantizedSoftmaxIssue  )

Definition at line 235 of file RuntimeTests.cpp.

References IOutputSlot::Connect(), armnn::CpuRef, INetwork::Create(), IRuntime::Create(), IConnectableLayer::GetInputSlot(), IConnectableLayer::GetOutputSlot(), armnn::Optimize(), options, armnn::QAsymmU8, and IOutputSlot::SetTensorInfo().

236 {
237  // Test for issue reported by Chris Nix in https://jira.arm.com/browse/IVGCVSW-1929
238  using namespace armnn;
239 
240  // Create runtime in which test will run
243 
244  // build up the structure of the network
246  armnn::IConnectableLayer* input = net->AddInputLayer(0,"input");
247  armnn::IConnectableLayer* softmax = net->AddSoftmaxLayer(armnn::SoftmaxDescriptor(), "softmax");
248  armnn::IConnectableLayer* output = net->AddOutputLayer(0, "output");
249 
250  input->GetOutputSlot(0).Connect(softmax->GetInputSlot(0));
251  softmax->GetOutputSlot(0).Connect(output->GetInputSlot(0));
252 
255  1.0f / 255,
256  0));
257 
260 
261  std::vector<armnn::BackendId> backends = { armnn::Compute::CpuRef };
262  std::vector<std::string> errMessages;
264  backends,
265  runtime->GetDeviceSpec(),
267  errMessages);
268 
269  BOOST_TEST(errMessages.size() == 1);
270  BOOST_TEST(errMessages[0] ==
271  "ERROR: output 0 of layer Softmax (softmax) is of type "
272  "Quantized 8 bit but its scale parameter has not been set");
273  BOOST_TEST(!optNet);
274 }
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:32
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:61
CPU Execution: Reference C++ kernels.
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:24
Copyright (c) 2020 ARM Limited.
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:890
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:566
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:101
virtual int Connect(IInputSlot &destination)=0
armnn::Runtime::CreationOptions::ExternalProfilingOptions options
A SoftmaxDescriptor for the SoftmaxLayer.
static INetworkPtr Create()
Definition: Network.cpp:49

◆ BOOST_AUTO_TEST_CASE() [5/8]

BOOST_AUTO_TEST_CASE ( RuntimeBackendOptions  )

Definition at line 276 of file RuntimeTests.cpp.

References BackendOptions::AddOption(), GetBackendId(), and IRuntime::CreationOptions::m_BackendOptions.

277 {
278  using namespace armnn;
279 
280  IRuntime::CreationOptions creationOptions;
281  auto& backendOptions = creationOptions.m_BackendOptions;
282 
283 
284  // Define Options on explicit construction
285  BackendOptions options1("FakeBackend1",
286  {
287  { "Option1", 1.3f },
288  { "Option2", true }
289  });
290 
291  // Add an option after construction
292  options1.AddOption({ "Option3", "some_value" });
293 
294  // Add the options to CreationOptions struct
295  backendOptions.push_back(options1);
296 
297  // Add more Options via inplace explicit construction
298  backendOptions.emplace_back(BackendOptions{ "FakeBackend1",
299  {{ "Option4", 42 }}
300  });
301 
302 
303  // First group
304  BOOST_TEST(backendOptions[0].GetBackendId().Get() == "FakeBackend1");
305  BOOST_TEST(backendOptions[0].GetOption(0).GetName() == "Option1");
306  BOOST_TEST(backendOptions[0].GetOption(0).GetValue().IsFloat() == true);
307  BOOST_TEST(backendOptions[0].GetOption(0).GetValue().AsFloat() == 1.3f);
308 
309  BOOST_TEST(backendOptions[0].GetOption(1).GetName() == "Option2");
310  BOOST_TEST(backendOptions[0].GetOption(1).GetValue().IsBool() == true);
311  BOOST_TEST(backendOptions[0].GetOption(1).GetValue().AsBool() == true);
312 
313  BOOST_TEST(backendOptions[0].GetOption(2).GetName() == "Option3");
314  BOOST_TEST(backendOptions[0].GetOption(2).GetValue().IsString() == true);
315  BOOST_TEST(backendOptions[0].GetOption(2).GetValue().AsString() == "some_value");
316 
317  // Second group
318  BOOST_TEST(backendOptions[1].GetBackendId().Get() == "FakeBackend1");
319  BOOST_TEST(backendOptions[1].GetOption(0).GetName() == "Option4");
320  BOOST_TEST(backendOptions[1].GetOption(0).GetValue().IsInt() == true);
321  BOOST_TEST(backendOptions[1].GetOption(0).GetValue().AsInt() == 42);
322 }
Copyright (c) 2020 ARM Limited.
std::vector< BackendOptions > m_BackendOptions
Pass backend specific options.
Definition: IRuntime.hpp:108
const char * GetBackendId()
void AddOption(BackendOption &&option)
Struct for the users to pass backend specific options.

◆ BOOST_AUTO_TEST_CASE() [6/8]

BOOST_AUTO_TEST_CASE ( ProfilingDisable  )

Definition at line 324 of file RuntimeTests.cpp.

References IOutputSlot::Connect(), armnn::CpuRef, INetwork::Create(), IRuntime::Create(), armnn::Float32, IConnectableLayer::GetInputSlot(), IConnectableLayer::GetOutputSlot(), BufferManager::GetReadableBuffer(), armnn::Optimize(), options, IOutputSlot::SetTensorInfo(), and armnn::Success.

325 {
326  using namespace armnn;
327 
328  // Create runtime in which the test will run
331 
332  // build up the structure of the network
334 
335  IConnectableLayer* input = net->AddInputLayer(0);
336 
337  // This layer configuration isn't supported by CpuAcc, should fall back to CpuRef.
338  NormalizationDescriptor descriptor;
339  IConnectableLayer* normalize = net->AddNormalizationLayer(descriptor);
340 
341  IConnectableLayer* output = net->AddOutputLayer(0);
342 
343  input->GetOutputSlot(0).Connect(normalize->GetInputSlot(0));
344  normalize->GetOutputSlot(0).Connect(output->GetInputSlot(0));
345 
346  input->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
347  normalize->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
348 
349  // optimize the network
350  std::vector<armnn::BackendId> backends = { armnn::Compute::CpuRef };
351  IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
352 
353  // Load it into the runtime. It should succeed.
354  armnn::NetworkId netId;
355  BOOST_TEST(runtime->LoadNetwork(netId, std::move(optNet)) == Status::Success);
356 
357  profiling::ProfilingServiceRuntimeHelper profilingServiceHelper;
358  profiling::BufferManager& bufferManager = profilingServiceHelper.GetProfilingBufferManager();
359  auto readableBuffer = bufferManager.GetReadableBuffer();
360 
361  // Profiling is not enabled, the post-optimisation structure should not be created
362  BOOST_TEST(!readableBuffer);
363 }
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:32
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:61
CPU Execution: Reference C++ kernels.
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:24
int NetworkId
Definition: IRuntime.hpp:19
Copyright (c) 2020 ARM Limited.
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:890
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:566
IPacketBufferPtr GetReadableBuffer() override
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:101
virtual int Connect(IInputSlot &destination)=0
armnn::Runtime::CreationOptions::ExternalProfilingOptions options
A NormalizationDescriptor for the NormalizationLayer.
static INetworkPtr Create()
Definition: Network.cpp:49

◆ BOOST_AUTO_TEST_CASE() [7/8]

BOOST_AUTO_TEST_CASE ( ProfilingEnableCpuRef  )

Definition at line 365 of file RuntimeTests.cpp.

References LabelsAndEventClasses::ARMNN_PROFILING_EOL_EVENT_CLASS, LabelsAndEventClasses::ARMNN_PROFILING_SOL_EVENT_CLASS, LabelsAndEventClasses::BACKENDID_GUID, BOOST_CHECK(), IOutputSlot::Connect(), LabelsAndEventClasses::CONNECTION_GUID, armnn::CpuRef, INetwork::Create(), IRuntime::Create(), armnn::profiling::DataLink, armnn::profiling::ExecutionLink, armnn::Float32, IConnectableLayer::GetGuid(), IConnectableLayer::GetInputSlot(), IConnectableLayer::GetOutputSlot(), BufferManager::GetReadableBuffer(), LabelsAndEventClasses::INFERENCE_GUID, armnn::profiling::LabelLink, IRuntime::CreationOptions::ExternalProfilingOptions::m_EnableProfiling, IRuntime::CreationOptions::m_ProfilingOptions, BufferManager::MarkRead(), LabelsAndEventClasses::NAME_GUID, LabelsAndEventClasses::NETWORK_GUID, armnn::Optimize(), options, armnn::profiling::RetentionLink, IOutputSlot::SetTensorInfo(), armnn::Success, LabelsAndEventClasses::TYPE_GUID, VerifyTimelineEntityBinaryPacketData(), VerifyTimelineEventBinaryPacket(), VerifyTimelineHeaderBinary(), VerifyTimelineLabelBinaryPacketData(), VerifyTimelineRelationshipBinaryPacketData(), and LabelsAndEventClasses::WORKLOAD_EXECUTION_GUID.

366 {
367  using namespace armnn;
368  using namespace armnn::profiling;
369 
370  // Create runtime in which the test will run
372  options.m_ProfilingOptions.m_EnableProfiling = true;
374 
375  // build up the structure of the network
377 
378  IConnectableLayer* input = net->AddInputLayer(0, "input");
379 
380  NormalizationDescriptor descriptor;
381  IConnectableLayer* normalize = net->AddNormalizationLayer(descriptor, "normalization");
382 
383  IConnectableLayer* output = net->AddOutputLayer(0, "output");
384 
385  input->GetOutputSlot(0).Connect(normalize->GetInputSlot(0));
386  normalize->GetOutputSlot(0).Connect(output->GetInputSlot(0));
387 
388  input->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
389  normalize->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
390 
391  // optimize the network
392  std::vector<armnn::BackendId> backends = { armnn::Compute::CpuRef };
393  IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
394 
395  ProfilingGuid optNetGuid = optNet->GetGuid();
396 
397  // Load it into the runtime. It should succeed.
398  armnn::NetworkId netId;
399  BOOST_TEST(runtime->LoadNetwork(netId, std::move(optNet)) == Status::Success);
400 
401  profiling::ProfilingServiceRuntimeHelper profilingServiceHelper;
402  profiling::BufferManager& bufferManager = profilingServiceHelper.GetProfilingBufferManager();
403  auto readableBuffer = bufferManager.GetReadableBuffer();
404 
405  // Profiling is enabled, the post-optimisation structure should be created
406  BOOST_CHECK(readableBuffer != nullptr);
407 
408  unsigned int size = readableBuffer->GetSize();
409  BOOST_CHECK(size == 1068);
410 
411  const unsigned char* readableData = readableBuffer->GetReadableData();
412  BOOST_CHECK(readableData != nullptr);
413 
414  unsigned int offset = 0;
415 
416  // Verify Header
417  VerifyTimelineHeaderBinary(readableData, offset, 1060);
418 
419  // Post-optimisation network
420  // Network entity
421  VerifyTimelineEntityBinaryPacketData(optNetGuid, readableData, offset
422  );
423 
424  // Entity - Type relationship
426  EmptyOptional(),
427  optNetGuid,
429  readableData,
430  offset);
431 
432  // Type label relationship
434  EmptyOptional(),
435  EmptyOptional(),
437  readableData,
438  offset);
439 
440  // Input layer
441  // Input layer entity
442  VerifyTimelineEntityBinaryPacketData(input->GetGuid(), readableData, offset);
443 
444  // Name Entity
445  VerifyTimelineLabelBinaryPacketData(EmptyOptional(), "input", readableData, offset);
446 
447  // Entity - Name relationship
449  EmptyOptional(),
450  input->GetGuid(),
451  EmptyOptional(),
452  readableData,
453  offset);
454 
455  // Name label relationship
457  EmptyOptional(),
458  EmptyOptional(),
460  readableData,
461  offset);
462 
463  // Entity - Type relationship
465  EmptyOptional(),
466  input->GetGuid(),
467  EmptyOptional(),
468  readableData,
469  offset);
470 
471  // Type label relationship
473  EmptyOptional(),
474  EmptyOptional(),
476  readableData,
477  offset);
478 
479  // Network - Input layer relationship
481  EmptyOptional(),
482  optNetGuid,
483  input->GetGuid(),
484  readableData,
485  offset);
486 
487  // Normalization layer
488  // Normalization layer entity
489  VerifyTimelineEntityBinaryPacketData(normalize->GetGuid(), readableData, offset);
490 
491  // Name entity
492  VerifyTimelineLabelBinaryPacketData(EmptyOptional(), "normalization", readableData, offset);
493 
494  // Entity - Name relationship
496  EmptyOptional(),
497  normalize->GetGuid(),
498  EmptyOptional(),
499  readableData,
500  offset);
501 
502  // Name label relationship
504  EmptyOptional(),
505  EmptyOptional(),
507  readableData,
508  offset);
509 
510  // Entity - Type relationship
512  EmptyOptional(),
513  normalize->GetGuid(),
514  EmptyOptional(),
515  readableData,
516  offset);
517 
518  // Type label relationship
520  EmptyOptional(),
521  EmptyOptional(),
523  readableData,
524  offset);
525 
526  // Network - Normalize layer relationship
528  EmptyOptional(),
529  optNetGuid,
530  normalize->GetGuid(),
531  readableData,
532  offset);
533 
534  // Input layer - Normalize layer relationship
536  EmptyOptional(),
537  input->GetGuid(),
538  normalize->GetGuid(),
539  readableData,
540  offset);
541 
542  // Entity - Type relationship
544  EmptyOptional(),
545  EmptyOptional(),
547  readableData,
548  offset);
549 
550  // Type label relationship
552  EmptyOptional(),
553  EmptyOptional(),
555  readableData,
556  offset);
557 
558  // Normalization workload
559  // Normalization workload entity
560  VerifyTimelineEntityBinaryPacketData(EmptyOptional(), readableData, offset);
561 
562  // Entity - Type relationship
564  EmptyOptional(),
565  EmptyOptional(),
566  EmptyOptional(),
567  readableData,
568  offset);
569 
570  // Type label relationship
572  EmptyOptional(),
573  EmptyOptional(),
575  readableData,
576  offset);
577 
578  // BackendId entity
579  VerifyTimelineLabelBinaryPacketData(EmptyOptional(), "CpuRef", readableData, offset);
580 
581  // Entity - BackendId relationship
583  EmptyOptional(),
584  EmptyOptional(),
585  EmptyOptional(),
586  readableData,
587  offset);
588 
589  // BackendId label relationship
591  EmptyOptional(),
592  EmptyOptional(),
594  readableData,
595  offset);
596 
597  // Normalize layer - Normalize workload relationship
599  EmptyOptional(),
600  normalize->GetGuid(),
601  EmptyOptional(),
602  readableData,
603  offset);
604 
605  // Output layer
606  // Output layer entity
607  VerifyTimelineEntityBinaryPacketData(output->GetGuid(), readableData, offset);
608 
609  // Name entity
610  VerifyTimelineLabelBinaryPacketData(EmptyOptional(), "output", readableData, offset);
611 
612  // Entity - Name relationship
614  EmptyOptional(),
615  output->GetGuid(),
616  EmptyOptional(),
617  readableData,
618  offset);
619 
620  // Name label relationship
622  EmptyOptional(),
623  EmptyOptional(),
625  readableData,
626  offset);
627 
628  // Entity - Type relationship
630  EmptyOptional(),
631  output->GetGuid(),
632  EmptyOptional(),
633  readableData,
634  offset);
635 
636  // Type label relationship
638  EmptyOptional(),
639  EmptyOptional(),
641  readableData,
642  offset);
643 
644  // Network - Output layer relationship
646  EmptyOptional(),
647  optNetGuid,
648  output->GetGuid(),
649  readableData,
650  offset);
651 
652  // Normalize layer - Output layer relationship
654  EmptyOptional(),
655  normalize->GetGuid(),
656  output->GetGuid(),
657  readableData,
658  offset);
659 
660  // Entity - Type relationship
662  EmptyOptional(),
663  EmptyOptional(),
665  readableData,
666  offset);
667 
668  // Type label relationship
670  EmptyOptional(),
671  EmptyOptional(),
673  readableData,
674  offset);
675 
676  bufferManager.MarkRead(readableBuffer);
677 
678  // Creates structures for input & output.
679  std::vector<float> inputData(16);
680  std::vector<float> outputData(16);
681 
682  InputTensors inputTensors
683  {
684  { 0, ConstTensor(runtime->GetInputTensorInfo(netId, 0), inputData.data()) }
685  };
686  OutputTensors outputTensors
687  {
688  { 0, Tensor(runtime->GetOutputTensorInfo(netId, 0), outputData.data()) }
689  };
690 
691  // Does the inference.
692  runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
693 
694  // Get readable buffer for inference timeline
695  auto inferenceReadableBuffer = bufferManager.GetReadableBuffer();
696  BOOST_CHECK(inferenceReadableBuffer != nullptr);
697 
698  // Get readable buffer for output workload
699  auto outputReadableBuffer = bufferManager.GetReadableBuffer();
700  BOOST_CHECK(outputReadableBuffer != nullptr);
701 
702  // Get readable buffer for input workload
703  auto inputReadableBuffer = bufferManager.GetReadableBuffer();
704  BOOST_CHECK(inputReadableBuffer != nullptr);
705 
706  // Validate input workload data
707  size = inputReadableBuffer->GetSize();
708  BOOST_CHECK(size == 204);
709 
710  readableData = inputReadableBuffer->GetReadableData();
711  BOOST_CHECK(readableData != nullptr);
712 
713  offset = 0;
714 
715  // Verify Header
716  VerifyTimelineHeaderBinary(readableData, offset, 196);
717 
718  // Input workload
719  // Input workload entity
720  VerifyTimelineEntityBinaryPacketData(EmptyOptional(), readableData, offset);
721 
722  // Entity - Type relationship
724  EmptyOptional(),
725  EmptyOptional(),
726  EmptyOptional(),
727  readableData,
728  offset);
729 
730  // Type label relationship
732  EmptyOptional(),
733  EmptyOptional(),
735  readableData,
736  offset);
737 
738  // BackendId entity
739  VerifyTimelineLabelBinaryPacketData(EmptyOptional(), "CpuRef", readableData, offset);
740 
741  // Entity - BackendId relationship
743  EmptyOptional(),
744  EmptyOptional(),
745  EmptyOptional(),
746  readableData,
747  offset);
748 
749  // BackendId label relationship
751  EmptyOptional(),
752  EmptyOptional(),
754  readableData,
755  offset);
756 
757  // Input layer - Input workload relationship
759  EmptyOptional(),
760  input->GetGuid(),
761  EmptyOptional(),
762  readableData,
763  offset);
764 
765  bufferManager.MarkRead(inputReadableBuffer);
766 
767  // Validate output workload data
768  size = outputReadableBuffer->GetSize();
769  BOOST_CHECK(size == 204);
770 
771  readableData = outputReadableBuffer->GetReadableData();
772  BOOST_CHECK(readableData != nullptr);
773 
774  offset = 0;
775 
776  // Verify Header
777  VerifyTimelineHeaderBinary(readableData, offset, 196);
778 
779  // Output workload
780  // Output workload entity
781  VerifyTimelineEntityBinaryPacketData(EmptyOptional(), readableData, offset);
782 
783  // Entity - Type relationship
785  EmptyOptional(),
786  EmptyOptional(),
787  EmptyOptional(),
788  readableData,
789  offset);
790 
791  // Type label relationship
793  EmptyOptional(),
794  EmptyOptional(),
796  readableData,
797  offset);
798 
799  // BackendId entity
800  VerifyTimelineLabelBinaryPacketData(EmptyOptional(), "CpuRef", readableData, offset);
801 
802  // Entity - BackendId relationship
804  EmptyOptional(),
805  EmptyOptional(),
806  EmptyOptional(),
807  readableData,
808  offset);
809 
810  // BackendId label relationship
812  EmptyOptional(),
813  EmptyOptional(),
815  readableData,
816  offset);
817 
818  // Output layer - Output workload relationship
820  EmptyOptional(),
821  output->GetGuid(),
822  EmptyOptional(),
823  readableData,
824  offset);
825 
826  bufferManager.MarkRead(outputReadableBuffer);
827 
828  // Validate inference data
829  size = inferenceReadableBuffer->GetSize();
830  BOOST_CHECK(size == 1272);
831 
832  readableData = inferenceReadableBuffer->GetReadableData();
833  BOOST_CHECK(readableData != nullptr);
834 
835  offset = 0;
836 
837  // Verify Header
838  VerifyTimelineHeaderBinary(readableData, offset, 1264);
839 
840  // Inference timeline trace
841  // Inference entity
842  VerifyTimelineEntityBinaryPacketData(EmptyOptional(), readableData, offset);
843 
844  // Entity - Type relationship
846  EmptyOptional(),
847  EmptyOptional(),
849  readableData,
850  offset);
851 
852  // Type label relationship
854  EmptyOptional(),
855  EmptyOptional(),
857  readableData,
858  offset);
859 
860  // Network - Inference relationship
862  EmptyOptional(),
863  optNetGuid,
864  EmptyOptional(),
865  readableData,
866  offset);
867 
868  // Start Inference life
869  // Event packet - timeline, threadId, eventGuid
871 
872  // Inference - event relationship
874  EmptyOptional(),
875  EmptyOptional(),
876  EmptyOptional(),
877  readableData,
878  offset);
879 
880  // Event - event class relationship
882  EmptyOptional(),
883  EmptyOptional(),
885  readableData,
886  offset);
887 
888  // Execution
889  // Input workload execution
890  // Input workload execution entity
891  VerifyTimelineEntityBinaryPacketData(EmptyOptional(), readableData, offset);
892 
893  // Entity - Type relationship
895  EmptyOptional(),
896  EmptyOptional(),
898  readableData,
899  offset);
900 
901  // Type label relationship
903  EmptyOptional(),
904  EmptyOptional(),
906  readableData,
907  offset);
908 
909  // Inference - Workload execution relationship
911  EmptyOptional(),
912  EmptyOptional(),
913  EmptyOptional(),
914  readableData,
915  offset);
916 
917  // Workload - Workload execution relationship
919  EmptyOptional(),
920  EmptyOptional(),
921  EmptyOptional(),
922  readableData,
923  offset);
924 
925  // Start Input workload execution life
926  // Event packet - timeline, threadId, eventGuid
928 
929  // Input workload execution - event relationship
931  EmptyOptional(),
932  EmptyOptional(),
933  EmptyOptional(),
934  readableData,
935  offset);
936 
937  // Event - event class relationship
939  EmptyOptional(),
940  EmptyOptional(),
942  readableData,
943  offset);
944 
945  // End of Input workload execution life
946  // Event packet - timeline, threadId, eventGuid
948 
949  // Input workload execution - event relationship
951  EmptyOptional(),
952  EmptyOptional(),
953  EmptyOptional(),
954  readableData,
955  offset);
956 
957  // Event - event class relationship
959  EmptyOptional(),
960  EmptyOptional(),
962  readableData,
963  offset);
964 
965  // Normalize workload execution
966  // Normalize workload execution entity
967  VerifyTimelineEntityBinaryPacketData(EmptyOptional(), readableData, offset);
968 
969  // Entity - Type relationship
971  EmptyOptional(),
972  EmptyOptional(),
974  readableData,
975  offset);
976 
977  // Type label relationship
979  EmptyOptional(),
980  EmptyOptional(),
982  readableData,
983  offset);
984 
985  // Inference - Workload execution relationship
987  EmptyOptional(),
988  EmptyOptional(),
989  EmptyOptional(),
990  readableData,
991  offset);
992 
993  // Workload - Workload execution relationship
995  EmptyOptional(),
996  EmptyOptional(),
997  EmptyOptional(),
998  readableData,
999  offset);
1000 
1001  // Start Normalize workload execution life
1002  // Event packet - timeline, threadId, eventGuid
1004 
1005  // Normalize workload execution - event relationship
1007  EmptyOptional(),
1008  EmptyOptional(),
1009  EmptyOptional(),
1010  readableData,
1011  offset);
1012 
1013  // Event - event class relationship
1015  EmptyOptional(),
1016  EmptyOptional(),
1018  readableData,
1019  offset);
1020 
1021  // End of Normalize workload execution life
1022  // Event packet - timeline, threadId, eventGuid
1024 
1025  // Normalize workload execution - event relationship
1027  EmptyOptional(),
1028  EmptyOptional(),
1029  EmptyOptional(),
1030  readableData,
1031  offset);
1032 
1033  // Event - event class relationship
1035  EmptyOptional(),
1036  EmptyOptional(),
1038  readableData,
1039  offset);
1040 
1041  // Output workload execution
1042  // Output workload execution entity
1043  VerifyTimelineEntityBinaryPacketData(EmptyOptional(), readableData, offset);
1044 
1045  // Entity - Type relationship
1047  EmptyOptional(),
1048  EmptyOptional(),
1050  readableData,
1051  offset);
1052 
1053  // Type label relationship
1055  EmptyOptional(),
1056  EmptyOptional(),
1058  readableData,
1059  offset);
1060 
1061  // Inference - Workload execution relationship
1063  EmptyOptional(),
1064  EmptyOptional(),
1065  EmptyOptional(),
1066  readableData,
1067  offset);
1068 
1069  // Workload - Workload execution relationship
1071  EmptyOptional(),
1072  EmptyOptional(),
1073  EmptyOptional(),
1074  readableData,
1075  offset);
1076 
1077  // Start Output workload execution life
1078  // Event packet - timeline, threadId, eventGuid
1080 
1081  // Output workload execution - event relationship
1083  EmptyOptional(),
1084  EmptyOptional(),
1085  EmptyOptional(),
1086  readableData,
1087  offset);
1088 
1089  // Event - event class relationship
1091  EmptyOptional(),
1092  EmptyOptional(),
1094  readableData,
1095  offset);
1096 
1097  // End of Normalize workload execution life
1098  // Event packet - timeline, threadId, eventGuid
1100 
1101  // Output workload execution - event relationship
1103  EmptyOptional(),
1104  EmptyOptional(),
1105  EmptyOptional(),
1106  readableData,
1107  offset);
1108 
1109  // Event - event class relationship
1111  EmptyOptional(),
1112  EmptyOptional(),
1114  readableData,
1115  offset);
1116 
1117  // End of Inference life
1118  // Event packet - timeline, threadId, eventGuid
1120 
1121  // Inference - event relationship
1123  EmptyOptional(),
1124  EmptyOptional(),
1125  EmptyOptional(),
1126  readableData,
1127  offset);
1128 
1129  // Event - event class relationship
1131  EmptyOptional(),
1132  EmptyOptional(),
1134  readableData,
1135  offset);
1136 
1137  bufferManager.MarkRead(inferenceReadableBuffer);
1138 }
static ARMNN_DLLEXPORT ProfilingStaticGuid INFERENCE_GUID
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:32
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:61
void VerifyTimelineEntityBinaryPacketData(Optional< ProfilingGuid > guid, const unsigned char *readableData, unsigned int &offset)
CPU Execution: Reference C++ kernels.
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:24
std::vector< std::pair< LayerBindingId, class ConstTensor > > InputTensors
Definition: Tensor.hpp:225
int NetworkId
Definition: IRuntime.hpp:19
void VerifyTimelineLabelBinaryPacketData(Optional< ProfilingGuid > guid, const std::string &label, const unsigned char *readableData, unsigned int &offset)
Copyright (c) 2020 ARM Limited.
Head execution start depends on Tail execution completion.
static ARMNN_DLLEXPORT ProfilingStaticGuid CONNECTION_GUID
BOOST_CHECK(profilingService.GetCurrentState()==ProfilingState::WaitingForAck)
static ARMNN_DLLEXPORT ProfilingStaticGuid WORKLOAD_EXECUTION_GUID
static ARMNN_DLLEXPORT ProfilingStaticGuid ARMNN_PROFILING_EOL_EVENT_CLASS
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
static ARMNN_DLLEXPORT ProfilingStaticGuid NAME_GUID
static ARMNN_DLLEXPORT ProfilingStaticGuid ARMNN_PROFILING_SOL_EVENT_CLASS
A tensor defined by a TensorInfo (shape and data type) and a mutable backing store.
Definition: Tensor.hpp:191
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:890
virtual LayerGuid GetGuid() const =0
Returns the unique id of the layer.
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:199
std::vector< std::pair< LayerBindingId, class Tensor > > OutputTensors
Definition: Tensor.hpp:226
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:566
IPacketBufferPtr GetReadableBuffer() override
void VerifyTimelineHeaderBinary(const unsigned char *readableData, unsigned int &offset, uint32_t packetDataLength)
static ARMNN_DLLEXPORT ProfilingStaticGuid NETWORK_GUID
void MarkRead(IPacketBufferPtr &packetBuffer) override
EmptyOptional is used to initialize the Optional class in case we want to have default value for an O...
Definition: Optional.hpp:32
void VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType relationshipType, Optional< ProfilingGuid > relationshipGuid, Optional< ProfilingGuid > headGuid, Optional< ProfilingGuid > tailGuid, const unsigned char *readableData, unsigned int &offset)
static ARMNN_DLLEXPORT ProfilingStaticGuid TYPE_GUID
void VerifyTimelineEventBinaryPacket(Optional< uint64_t > timestamp, Optional< std::thread::id > threadId, Optional< ProfilingGuid > eventGuid, const unsigned char *readableData, unsigned int &offset)
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:101
virtual int Connect(IInputSlot &destination)=0
armnn::Runtime::CreationOptions::ExternalProfilingOptions options
A NormalizationDescriptor for the NormalizationLayer.
ExternalProfilingOptions m_ProfilingOptions
Definition: IRuntime.hpp:76
static ARMNN_DLLEXPORT ProfilingStaticGuid BACKENDID_GUID
static INetworkPtr Create()
Definition: Network.cpp:49

◆ BOOST_AUTO_TEST_CASE() [8/8]

BOOST_AUTO_TEST_CASE ( ProfilingPostOptimisationStructureCpuRef  )

Definition at line 1140 of file RuntimeTests.cpp.

References BOOST_AUTO_TEST_SUITE_END(), armnn::CpuRef, and VerifyPostOptimisationStructureTestImpl().

1141 {
1143 }
void VerifyPostOptimisationStructureTestImpl(armnn::BackendId backendId)
CPU Execution: Reference C++ kernels.