ArmNN  NotReleased
RuntimeTests.cpp File Reference
#include <armnn/Descriptors.hpp>
#include <armnn/IRuntime.hpp>
#include <armnn/INetwork.hpp>
#include <Runtime.hpp>
#include <armnn/TypesUtils.hpp>
#include <LabelsAndEventClasses.hpp>
#include <test/ProfilingTestUtils.hpp>
#include <HeapProfiling.hpp>
#include <LeakChecking.hpp>
#include <boost/test/unit_test.hpp>
#include "RuntimeTests.hpp"

Go to the source code of this file.

Namespaces

 armnn
 

Functions

void RuntimeLoadedNetworksReserve (armnn::Runtime *runtime)
 
 BOOST_AUTO_TEST_CASE (RuntimeUnloadNetwork)
 
 BOOST_AUTO_TEST_CASE (RuntimeCpuRef)
 
 BOOST_AUTO_TEST_CASE (RuntimeFallbackToCpuRef)
 
 BOOST_AUTO_TEST_CASE (IVGCVSW_1929_QuantizedSoftmaxIssue)
 
 BOOST_AUTO_TEST_CASE (RuntimeBackendOptions)
 
 BOOST_AUTO_TEST_CASE (ProfilingDisable)
 
 BOOST_AUTO_TEST_CASE (ProfilingEnableCpuRef)
 
 BOOST_AUTO_TEST_CASE (ProfilingPostOptimisationStructureCpuRef)
 

Function Documentation

◆ BOOST_AUTO_TEST_CASE() [1/8]

BOOST_AUTO_TEST_CASE ( RuntimeUnloadNetwork  )

Definition at line 37 of file RuntimeTests.cpp.

References ARMNN_BYTES_LEAKED_IN_SCOPE, ARMNN_LEAK_CHECKER_IS_ACTIVE, ARMNN_LOCAL_LEAK_CHECKING_ONLY, ARMNN_NO_LEAKS_IN_SCOPE, ARMNN_OBJECTS_LEAKED_IN_SCOPE, ARMNN_SCOPED_LEAK_CHECKER, armnn::BOOST_AUTO_TEST_CASE(), BOOST_GLOBAL_FIXTURE(), armnn::CpuRef, INetwork::Create(), IRuntime::Create(), armnn::Failure, Runtime::GetDeviceSpec(), Runtime::LoadNetwork(), armnn::Optimize(), options, armnn::RuntimeLoadedNetworksReserve(), armnn::Success, and Runtime::UnloadNetwork().

38 {
39  // build 2 mock-networks and load them into the runtime
42 
43  // Mock network 1.
44  armnn::NetworkId networkIdentifier1 = 1;
46  mockNetwork1->AddInputLayer(0, "test layer");
47  std::vector<armnn::BackendId> backends = {armnn::Compute::CpuRef};
48  runtime->LoadNetwork(networkIdentifier1, Optimize(*mockNetwork1, backends, runtime->GetDeviceSpec()));
49 
50  // Mock network 2.
51  armnn::NetworkId networkIdentifier2 = 2;
53  mockNetwork2->AddInputLayer(0, "test layer");
54  runtime->LoadNetwork(networkIdentifier2, Optimize(*mockNetwork2, backends, runtime->GetDeviceSpec()));
55 
56  // Unloads one by its networkID.
57  BOOST_TEST(runtime->UnloadNetwork(networkIdentifier1) == armnn::Status::Success);
58 
59  BOOST_TEST(runtime->UnloadNetwork(networkIdentifier1) == armnn::Status::Failure);
60 }
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Definition: Network.cpp:807
static INetworkPtr Create()
Definition: Network.cpp:48
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:85
CPU Execution: Reference C++ kernels.
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:32
armnn::Runtime::CreationOptions::ExternalProfilingOptions options
int NetworkId
Definition: IRuntime.hpp:19
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:24

◆ BOOST_AUTO_TEST_CASE() [2/8]

BOOST_AUTO_TEST_CASE ( RuntimeCpuRef  )

Definition at line 166 of file RuntimeTests.cpp.

References IOutputSlot::Connect(), armnn::CpuRef, INetwork::Create(), IRuntime::Create(), armnn::Float32, IConnectableLayer::GetInputSlot(), IConnectableLayer::GetOutputSlot(), armnn::Optimize(), options, IOutputSlot::SetTensorInfo(), and armnn::Success.

167 {
168  using namespace armnn;
169 
170  // Create runtime in which test will run
173 
174  // build up the structure of the network
176 
177  IConnectableLayer* input = net->AddInputLayer(0);
178 
179  // This layer configuration isn't supported by CpuAcc, should be fall back to CpuRef.
180  NormalizationDescriptor descriptor;
181  IConnectableLayer* normalize = net->AddNormalizationLayer(descriptor);
182 
183  IConnectableLayer* output = net->AddOutputLayer(0);
184 
185  input->GetOutputSlot(0).Connect(normalize->GetInputSlot(0));
186  normalize->GetOutputSlot(0).Connect(output->GetInputSlot(0));
187 
188  input->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
189  normalize->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
190 
191  // optimize the network
192  std::vector<armnn::BackendId> backends = { armnn::Compute::CpuRef };
193  IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
194 
195  // Load it into the runtime. It should success.
196  armnn::NetworkId netId;
197  BOOST_TEST(runtime->LoadNetwork(netId, std::move(optNet)) == Status::Success);
198 }
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
A NormalizationDescriptor for the NormalizationLayer.
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Definition: Network.cpp:807
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
static INetworkPtr Create()
Definition: Network.cpp:48
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:85
CPU Execution: Reference C++ kernels.
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:32
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:61
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:544
virtual int Connect(IInputSlot &destination)=0
armnn::Runtime::CreationOptions::ExternalProfilingOptions options
int NetworkId
Definition: IRuntime.hpp:19
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:24
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0

◆ BOOST_AUTO_TEST_CASE() [3/8]

BOOST_AUTO_TEST_CASE ( RuntimeFallbackToCpuRef  )

Definition at line 200 of file RuntimeTests.cpp.

References IOutputSlot::Connect(), armnn::CpuAcc, armnn::CpuRef, INetwork::Create(), IRuntime::Create(), armnn::Float32, IConnectableLayer::GetInputSlot(), IConnectableLayer::GetOutputSlot(), armnn::Optimize(), options, IOutputSlot::SetTensorInfo(), and armnn::Success.

201 {
202  using namespace armnn;
203 
204  // Create runtime in which test will run
207 
208  // build up the structure of the network
210 
211  IConnectableLayer* input = net->AddInputLayer(0);
212 
213  // This layer configuration isn't supported by CpuAcc, should be fall back to CpuRef.
214  NormalizationDescriptor descriptor;
215  IConnectableLayer* normalize = net->AddNormalizationLayer(descriptor);
216 
217  IConnectableLayer* output = net->AddOutputLayer(0);
218 
219  input->GetOutputSlot(0).Connect(normalize->GetInputSlot(0));
220  normalize->GetOutputSlot(0).Connect(output->GetInputSlot(0));
221 
222  input->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
223  normalize->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
224 
225  // Allow fallback to CpuRef.
226  std::vector<armnn::BackendId> backends = { armnn::Compute::CpuAcc, armnn::Compute::CpuRef };
227  // optimize the network
228  IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
229 
230  // Load it into the runtime. It should succeed.
231  armnn::NetworkId netId;
232  BOOST_TEST(runtime->LoadNetwork(netId, std::move(optNet)) == Status::Success);
233 }
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
A NormalizationDescriptor for the NormalizationLayer.
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Definition: Network.cpp:807
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
static INetworkPtr Create()
Definition: Network.cpp:48
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:85
CPU Execution: Reference C++ kernels.
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:32
CPU Execution: NEON: ArmCompute.
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:61
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:544
virtual int Connect(IInputSlot &destination)=0
armnn::Runtime::CreationOptions::ExternalProfilingOptions options
int NetworkId
Definition: IRuntime.hpp:19
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:24
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0

◆ BOOST_AUTO_TEST_CASE() [4/8]

BOOST_AUTO_TEST_CASE ( IVGCVSW_1929_QuantizedSoftmaxIssue  )

Definition at line 235 of file RuntimeTests.cpp.

References IOutputSlot::Connect(), armnn::CpuRef, INetwork::Create(), IRuntime::Create(), IConnectableLayer::GetInputSlot(), IConnectableLayer::GetOutputSlot(), armnn::Optimize(), options, armnn::QAsymmU8, and IOutputSlot::SetTensorInfo().

236 {
237  // Test for issue reported by Chris Nix in https://jira.arm.com/browse/IVGCVSW-1929
238  using namespace armnn;
239 
240  // Create runtime in which test will run
243 
244  // build up the structure of the network
246  armnn::IConnectableLayer* input = net->AddInputLayer(
247  0,
248  "input"
249  );
250  armnn::IConnectableLayer* softmax = net->AddSoftmaxLayer(
252  "softmax"
253  );
254  armnn::IConnectableLayer* output = net->AddOutputLayer(
255  0,
256  "output"
257  );
258 
259  input->GetOutputSlot(0).Connect(softmax->GetInputSlot(0));
260  softmax->GetOutputSlot(0).Connect(output->GetInputSlot(0));
261 
263  armnn::TensorShape({ 1, 5 }),
265  1.0f/255,
266  0
267  ));
268 
270  armnn::TensorShape({ 1, 5 }),
272  ));
273 
274  std::vector<armnn::BackendId> backends = {armnn::Compute::CpuRef};
275  std::vector<std::string> errMessages;
277  *net,
278  backends,
279  runtime->GetDeviceSpec(),
281  errMessages
282  );
283 
284  BOOST_TEST(errMessages.size() == 1);
285  BOOST_TEST(errMessages[0] ==
286  "ERROR: output 0 of layer Softmax (softmax) is of type "
287  "Quantized 8 bit but its scale parameter has not been set");
288  BOOST_TEST(!optNet);
289 }
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Definition: Network.cpp:807
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
static INetworkPtr Create()
Definition: Network.cpp:48
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:85
CPU Execution: Reference C++ kernels.
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:32
A SoftmaxDescriptor for the SoftmaxLayer.
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:61
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:544
virtual int Connect(IInputSlot &destination)=0
armnn::Runtime::CreationOptions::ExternalProfilingOptions options
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:24
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0

◆ BOOST_AUTO_TEST_CASE() [5/8]

BOOST_AUTO_TEST_CASE ( RuntimeBackendOptions  )

Definition at line 291 of file RuntimeTests.cpp.

References BackendOptions::AddOption(), GetBackendId(), and IRuntime::CreationOptions::m_BackendOptions.

292 {
293  using namespace armnn;
294 
295  IRuntime::CreationOptions creationOptions;
296  auto& backendOptions = creationOptions.m_BackendOptions;
297 
298 
299  // Define Options on explicit construction
300  BackendOptions options1("FakeBackend1",
301  {
302  {"Option1", 1.3f},
303  {"Option2", true}
304  });
305 
306  // Add an option after construction
307  options1.AddOption({"Option3", "some_value"});
308 
309  // Add the options to CreationOptions struct
310  backendOptions.push_back(options1);
311 
312  // Add more Options via inplace explicit construction
313  backendOptions.emplace_back(
314  BackendOptions{"FakeBackend1",
315  {{"Option4", 42}}
316  });
317 
318 
319  // First group
320  BOOST_TEST(backendOptions[0].GetBackendId().Get() == "FakeBackend1");
321  BOOST_TEST(backendOptions[0].GetOption(0).GetName() == "Option1");
322  BOOST_TEST(backendOptions[0].GetOption(0).GetValue().IsFloat() == true);
323  BOOST_TEST(backendOptions[0].GetOption(0).GetValue().AsFloat() == 1.3f);
324 
325  BOOST_TEST(backendOptions[0].GetOption(1).GetName() == "Option2");
326  BOOST_TEST(backendOptions[0].GetOption(1).GetValue().IsBool() == true);
327  BOOST_TEST(backendOptions[0].GetOption(1).GetValue().AsBool() == true);
328 
329  BOOST_TEST(backendOptions[0].GetOption(2).GetName() == "Option3");
330  BOOST_TEST(backendOptions[0].GetOption(2).GetValue().IsString() == true);
331  BOOST_TEST(backendOptions[0].GetOption(2).GetValue().AsString() == "some_value");
332 
333  // Second group
334  BOOST_TEST(backendOptions[1].GetBackendId().Get() == "FakeBackend1");
335  BOOST_TEST(backendOptions[1].GetOption(0).GetName() == "Option4");
336  BOOST_TEST(backendOptions[1].GetOption(0).GetValue().IsInt() == true);
337  BOOST_TEST(backendOptions[1].GetOption(0).GetValue().AsInt() == 42);
338 }
void AddOption(BackendOption &&option)
Struct for the users to pass backend specific options.
std::vector< BackendOptions > m_BackendOptions
Definition: IRuntime.hpp:104
const char * GetBackendId()

◆ BOOST_AUTO_TEST_CASE() [6/8]

BOOST_AUTO_TEST_CASE ( ProfilingDisable  )

Definition at line 340 of file RuntimeTests.cpp.

References IOutputSlot::Connect(), armnn::CpuRef, INetwork::Create(), IRuntime::Create(), armnn::Float32, IConnectableLayer::GetInputSlot(), IConnectableLayer::GetOutputSlot(), BufferManager::GetReadableBuffer(), armnn::Optimize(), options, IOutputSlot::SetTensorInfo(), and armnn::Success.

341 {
342  using namespace armnn;
343 
344  // Create runtime in which the test will run
347 
348  // build up the structure of the network
350 
351  IConnectableLayer* input = net->AddInputLayer(0);
352 
353  // This layer configuration isn't supported by CpuAcc, should fall back to CpuRef.
354  NormalizationDescriptor descriptor;
355  IConnectableLayer* normalize = net->AddNormalizationLayer(descriptor);
356 
357  IConnectableLayer* output = net->AddOutputLayer(0);
358 
359  input->GetOutputSlot(0).Connect(normalize->GetInputSlot(0));
360  normalize->GetOutputSlot(0).Connect(output->GetInputSlot(0));
361 
362  input->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
363  normalize->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
364 
365  // optimize the network
366  std::vector<armnn::BackendId> backends = { armnn::Compute::CpuRef };
367  IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
368 
369  // Load it into the runtime. It should succeed.
370  armnn::NetworkId netId;
371  BOOST_TEST(runtime->LoadNetwork(netId, std::move(optNet)) == Status::Success);
372 
373  profiling::ProfilingServiceRuntimeHelper profilingServiceHelper;
374  profiling::BufferManager& bufferManager = profilingServiceHelper.GetProfilingBufferManager();
375  auto readableBuffer = bufferManager.GetReadableBuffer();
376 
377  // Profiling is not enabled, the post-optimisation structure should not be created
378  BOOST_TEST(!readableBuffer);
379 }
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
A NormalizationDescriptor for the NormalizationLayer.
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Definition: Network.cpp:807
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
IPacketBufferPtr GetReadableBuffer() override
static INetworkPtr Create()
Definition: Network.cpp:48
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:85
CPU Execution: Reference C++ kernels.
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:32
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:61
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:544
virtual int Connect(IInputSlot &destination)=0
armnn::Runtime::CreationOptions::ExternalProfilingOptions options
int NetworkId
Definition: IRuntime.hpp:19
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:24
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0

◆ BOOST_AUTO_TEST_CASE() [7/8]

BOOST_AUTO_TEST_CASE ( ProfilingEnableCpuRef  )

Definition at line 381 of file RuntimeTests.cpp.

References LabelsAndEventClasses::ARMNN_PROFILING_EOL_EVENT_CLASS, LabelsAndEventClasses::ARMNN_PROFILING_SOL_EVENT_CLASS, LabelsAndEventClasses::BACKENDID_GUID, BOOST_CHECK(), IOutputSlot::Connect(), LabelsAndEventClasses::CONNECTION_GUID, armnn::CpuRef, INetwork::Create(), IRuntime::Create(), DataLink, ExecutionLink, armnn::Float32, IConnectableLayer::GetGuid(), IConnectableLayer::GetInputSlot(), IConnectableLayer::GetOutputSlot(), BufferManager::GetReadableBuffer(), LabelsAndEventClasses::INFERENCE_GUID, LabelLink, IRuntime::CreationOptions::ExternalProfilingOptions::m_EnableProfiling, IRuntime::CreationOptions::m_ProfilingOptions, BufferManager::MarkRead(), LabelsAndEventClasses::NAME_GUID, LabelsAndEventClasses::NETWORK_GUID, armnn::Optimize(), options, RetentionLink, IOutputSlot::SetTensorInfo(), armnn::Success, LabelsAndEventClasses::TYPE_GUID, VerifyTimelineEntityBinaryPacket(), VerifyTimelineEventBinaryPacket(), VerifyTimelineLabelBinaryPacket(), VerifyTimelineRelationshipBinaryPacket(), and LabelsAndEventClasses::WORKLOAD_EXECUTION_GUID.

382 {
383  using namespace armnn;
384  using namespace armnn::profiling;
385 
386  // Create runtime in which the test will run
388  options.m_ProfilingOptions.m_EnableProfiling = true;
390 
391  // build up the structure of the network
393 
394  IConnectableLayer* input = net->AddInputLayer(0, "input");
395 
396  NormalizationDescriptor descriptor;
397  IConnectableLayer* normalize = net->AddNormalizationLayer(descriptor, "normalization");
398 
399  IConnectableLayer* output = net->AddOutputLayer(0, "output");
400 
401  input->GetOutputSlot(0).Connect(normalize->GetInputSlot(0));
402  normalize->GetOutputSlot(0).Connect(output->GetInputSlot(0));
403 
404  input->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
405  normalize->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
406 
407  // optimize the network
408  std::vector<armnn::BackendId> backends = { armnn::Compute::CpuRef };
409  IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
410 
411  ProfilingGuid optNetGuid = optNet->GetGuid();
412 
413  // Load it into the runtime. It should succeed.
414  armnn::NetworkId netId;
415  BOOST_TEST(runtime->LoadNetwork(netId, std::move(optNet)) == Status::Success);
416 
417  profiling::ProfilingServiceRuntimeHelper profilingServiceHelper;
418  profiling::BufferManager& bufferManager = profilingServiceHelper.GetProfilingBufferManager();
419  auto readableBuffer = bufferManager.GetReadableBuffer();
420 
421  // Profiling is enabled, the post-optimisation structure should be created
422  BOOST_CHECK(readableBuffer != nullptr);
423 
424  unsigned int size = readableBuffer->GetSize();
425  BOOST_CHECK(size == 1356);
426 
427  const unsigned char* readableData = readableBuffer->GetReadableData();
428  BOOST_CHECK(readableData != nullptr);
429 
430  unsigned int offset = 0;
431 
432  // Post-optimisation network
433  // Network entity
434  VerifyTimelineEntityBinaryPacket(optNetGuid, readableData, offset);
435 
436  // Entity - Type relationship
438  EmptyOptional(),
439  optNetGuid,
441  readableData,
442  offset);
443 
444  // Type label relationship
446  EmptyOptional(),
447  EmptyOptional(),
449  readableData,
450  offset);
451 
452  // Input layer
453  // Input layer entity
454  VerifyTimelineEntityBinaryPacket(input->GetGuid(), readableData, offset);
455 
456  // Name Entity
457  VerifyTimelineLabelBinaryPacket(EmptyOptional(), "input", readableData, offset);
458 
459  // Entity - Name relationship
461  EmptyOptional(),
462  input->GetGuid(),
463  EmptyOptional(),
464  readableData,
465  offset);
466 
467  // Name label relationship
469  EmptyOptional(),
470  EmptyOptional(),
472  readableData,
473  offset);
474 
475  // Entity - Type relationship
477  EmptyOptional(),
478  input->GetGuid(),
479  EmptyOptional(),
480  readableData,
481  offset);
482 
483  // Type label relationship
485  EmptyOptional(),
486  EmptyOptional(),
488  readableData,
489  offset);
490 
491  // Network - Input layer relationship
493  EmptyOptional(),
494  optNetGuid,
495  input->GetGuid(),
496  readableData,
497  offset);
498 
499  // Normalization layer
500  // Normalization layer entity
501  VerifyTimelineEntityBinaryPacket(normalize->GetGuid(), readableData, offset);
502 
503  // Name entity
504  VerifyTimelineLabelBinaryPacket(EmptyOptional(), "normalization", readableData, offset);
505 
506  // Entity - Name relationship
508  EmptyOptional(),
509  normalize->GetGuid(),
510  EmptyOptional(),
511  readableData,
512  offset);
513 
514  // Name label relationship
516  EmptyOptional(),
517  EmptyOptional(),
519  readableData,
520  offset);
521 
522  // Entity - Type relationship
524  EmptyOptional(),
525  normalize->GetGuid(),
526  EmptyOptional(),
527  readableData,
528  offset);
529 
530  // Type label relationship
532  EmptyOptional(),
533  EmptyOptional(),
535  readableData,
536  offset);
537 
538  // Network - Normalize layer relationship
540  EmptyOptional(),
541  optNetGuid,
542  normalize->GetGuid(),
543  readableData,
544  offset);
545 
546  // Input layer - Normalize layer relationship
548  EmptyOptional(),
549  input->GetGuid(),
550  normalize->GetGuid(),
551  readableData,
552  offset);
553 
554  // Entity - Type relationship
556  EmptyOptional(),
557  EmptyOptional(),
559  readableData,
560  offset);
561 
562  // Type label relationship
564  EmptyOptional(),
565  EmptyOptional(),
567  readableData,
568  offset);
569 
570  // Normalization workload
571  // Normalization workload entity
572  VerifyTimelineEntityBinaryPacket(EmptyOptional(), readableData, offset);
573 
574  // Entity - Type relationship
576  EmptyOptional(),
577  EmptyOptional(),
578  EmptyOptional(),
579  readableData,
580  offset);
581 
582  // Type label relationship
584  EmptyOptional(),
585  EmptyOptional(),
587  readableData,
588  offset);
589 
590  // BackendId entity
591  VerifyTimelineLabelBinaryPacket(EmptyOptional(), "CpuRef", readableData, offset);
592 
593  // Entity - BackendId relationship
595  EmptyOptional(),
596  EmptyOptional(),
597  EmptyOptional(),
598  readableData,
599  offset);
600 
601  // BackendId label relationship
603  EmptyOptional(),
604  EmptyOptional(),
606  readableData,
607  offset);
608 
609  // Normalize layer - Normalize workload relationship
611  EmptyOptional(),
612  normalize->GetGuid(),
613  EmptyOptional(),
614  readableData,
615  offset);
616 
617  // Output layer
618  // Output layer entity
619  VerifyTimelineEntityBinaryPacket(output->GetGuid(), readableData, offset);
620 
621  // Name entity
622  VerifyTimelineLabelBinaryPacket(EmptyOptional(), "output", readableData, offset);
623 
624  // Entity - Name relationship
626  EmptyOptional(),
627  output->GetGuid(),
628  EmptyOptional(),
629  readableData,
630  offset);
631 
632  // Name label relationship
634  EmptyOptional(),
635  EmptyOptional(),
637  readableData,
638  offset);
639 
640  // Entity - Type relationship
642  EmptyOptional(),
643  output->GetGuid(),
644  EmptyOptional(),
645  readableData,
646  offset);
647 
648  // Type label relationship
650  EmptyOptional(),
651  EmptyOptional(),
653  readableData,
654  offset);
655 
656  // Network - Output layer relationship
658  EmptyOptional(),
659  optNetGuid,
660  output->GetGuid(),
661  readableData,
662  offset);
663 
664  // Normalize layer - Output layer relationship
666  EmptyOptional(),
667  normalize->GetGuid(),
668  output->GetGuid(),
669  readableData,
670  offset);
671 
672  // Entity - Type relationship
674  EmptyOptional(),
675  EmptyOptional(),
677  readableData,
678  offset);
679 
680  // Type label relationship
682  EmptyOptional(),
683  EmptyOptional(),
685  readableData,
686  offset);
687 
688  bufferManager.MarkRead(readableBuffer);
689 
690  // Creates structures for input & output.
691  std::vector<float> inputData(16);
692  std::vector<float> outputData(16);
693 
694  InputTensors inputTensors
695  {
696  {0, ConstTensor(runtime->GetInputTensorInfo(netId, 0), inputData.data())}
697  };
698  OutputTensors outputTensors
699  {
700  {0, Tensor(runtime->GetOutputTensorInfo(netId, 0), outputData.data())}
701  };
702 
703  // Does the inference.
704  runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
705 
706  // Get readable buffer for inference timeline
707  auto inferenceReadableBuffer = bufferManager.GetReadableBuffer();
708  BOOST_CHECK(inferenceReadableBuffer != nullptr);
709 
710  // Get readable buffer for output workload
711  auto outputReadableBuffer = bufferManager.GetReadableBuffer();
712  BOOST_CHECK(outputReadableBuffer != nullptr);
713 
714  // Get readable buffer for input workload
715  auto inputReadableBuffer = bufferManager.GetReadableBuffer();
716  BOOST_CHECK(inputReadableBuffer != nullptr);
717 
718  // Validate input workload data
719  size = inputReadableBuffer->GetSize();
720  BOOST_CHECK(size == 252);
721 
722  readableData = inputReadableBuffer->GetReadableData();
723  BOOST_CHECK(readableData != nullptr);
724 
725  offset = 0;
726 
727  // Input workload
728  // Input workload entity
729  VerifyTimelineEntityBinaryPacket(EmptyOptional(), readableData, offset);
730 
731  // Entity - Type relationship
733  EmptyOptional(),
734  EmptyOptional(),
735  EmptyOptional(),
736  readableData,
737  offset);
738 
739  // Type label relationship
741  EmptyOptional(),
742  EmptyOptional(),
744  readableData,
745  offset);
746 
747  // BackendId entity
748  VerifyTimelineLabelBinaryPacket(EmptyOptional(), "CpuRef", readableData, offset);
749 
750  // Entity - BackendId relationship
752  EmptyOptional(),
753  EmptyOptional(),
754  EmptyOptional(),
755  readableData,
756  offset);
757 
758  // BackendId label relationship
760  EmptyOptional(),
761  EmptyOptional(),
763  readableData,
764  offset);
765 
766  // Input layer - Input workload relationship
768  EmptyOptional(),
769  input->GetGuid(),
770  EmptyOptional(),
771  readableData,
772  offset);
773 
774  bufferManager.MarkRead(inputReadableBuffer);
775 
776  // Validate output workload data
777  size = outputReadableBuffer->GetSize();
778  BOOST_CHECK(size == 252);
779 
780  readableData = outputReadableBuffer->GetReadableData();
781  BOOST_CHECK(readableData != nullptr);
782 
783  offset = 0;
784 
785  // Output workload
786  // Output workload entity
787  VerifyTimelineEntityBinaryPacket(EmptyOptional(), readableData, offset);
788 
789  // Entity - Type relationship
791  EmptyOptional(),
792  EmptyOptional(),
793  EmptyOptional(),
794  readableData,
795  offset);
796 
797  // Type label relationship
799  EmptyOptional(),
800  EmptyOptional(),
802  readableData,
803  offset);
804 
805  // BackendId entity
806  VerifyTimelineLabelBinaryPacket(EmptyOptional(), "CpuRef", readableData, offset);
807 
808  // Entity - BackendId relationship
810  EmptyOptional(),
811  EmptyOptional(),
812  EmptyOptional(),
813  readableData,
814  offset);
815 
816  // BackendId label relationship
818  EmptyOptional(),
819  EmptyOptional(),
821  readableData,
822  offset);
823 
824  // Output layer - Output workload relationship
826  EmptyOptional(),
827  output->GetGuid(),
828  EmptyOptional(),
829  readableData,
830  offset);
831 
832  bufferManager.MarkRead(outputReadableBuffer);
833 
834  // Validate inference data
835  size = inferenceReadableBuffer->GetSize();
836  BOOST_CHECK(size == 1608);
837 
838  readableData = inferenceReadableBuffer->GetReadableData();
839  BOOST_CHECK(readableData != nullptr);
840 
841  offset = 0;
842 
843  // Inference timeline trace
844  // Inference entity
845  VerifyTimelineEntityBinaryPacket(EmptyOptional(), readableData, offset);
846 
847  // Entity - Type relationship
849  EmptyOptional(),
850  EmptyOptional(),
852  readableData,
853  offset);
854 
855  // Type label relationship
857  EmptyOptional(),
858  EmptyOptional(),
860  readableData,
861  offset);
862 
863  // Network - Inference relationship
865  EmptyOptional(),
866  optNetGuid,
867  EmptyOptional(),
868  readableData,
869  offset);
870 
871  // Start Inference life
872  // Event packet - timeline, threadId, eventGuid
874 
875  // Inference - event relationship
877  EmptyOptional(),
878  EmptyOptional(),
879  EmptyOptional(),
880  readableData,
881  offset);
882 
883  // Event - event class relationship
885  EmptyOptional(),
886  EmptyOptional(),
888  readableData,
889  offset);
890 
891  // Execution
892  // Input workload execution
893  // Input workload execution entity
894  VerifyTimelineEntityBinaryPacket(EmptyOptional(), readableData, offset);
895 
896  // Entity - Type relationship
898  EmptyOptional(),
899  EmptyOptional(),
901  readableData,
902  offset);
903 
904  // Type label relationship
906  EmptyOptional(),
907  EmptyOptional(),
909  readableData,
910  offset);
911 
912  // Inference - Workload execution relationship
914  EmptyOptional(),
915  EmptyOptional(),
916  EmptyOptional(),
917  readableData,
918  offset);
919 
920  // Workload - Workload execution relationship
922  EmptyOptional(),
923  EmptyOptional(),
924  EmptyOptional(),
925  readableData,
926  offset);
927 
928  // Start Input workload execution life
929  // Event packet - timeline, threadId, eventGuid
931 
932  // Input workload execution - event relationship
934  EmptyOptional(),
935  EmptyOptional(),
936  EmptyOptional(),
937  readableData,
938  offset);
939 
940  // Event - event class relationship
942  EmptyOptional(),
943  EmptyOptional(),
945  readableData,
946  offset);
947 
948  // End of Input workload execution life
949  // Event packet - timeline, threadId, eventGuid
951 
952  // Input workload execution - event relationship
954  EmptyOptional(),
955  EmptyOptional(),
956  EmptyOptional(),
957  readableData,
958  offset);
959 
960  // Event - event class relationship
962  EmptyOptional(),
963  EmptyOptional(),
965  readableData,
966  offset);
967 
968  // Normalize workload execution
969  // Normalize workload execution entity
970  VerifyTimelineEntityBinaryPacket(EmptyOptional(), readableData, offset);
971 
972  // Entity - Type relationship
974  EmptyOptional(),
975  EmptyOptional(),
977  readableData,
978  offset);
979 
980  // Type label relationship
982  EmptyOptional(),
983  EmptyOptional(),
985  readableData,
986  offset);
987 
988  // Inference - Workload execution relationship
990  EmptyOptional(),
991  EmptyOptional(),
992  EmptyOptional(),
993  readableData,
994  offset);
995 
996  // Workload - Workload execution relationship
998  EmptyOptional(),
999  EmptyOptional(),
1000  EmptyOptional(),
1001  readableData,
1002  offset);
1003 
1004  // Start Normalize workload execution life
1005  // Event packet - timeline, threadId, eventGuid
1007 
1008  // Normalize workload execution - event relationship
1010  EmptyOptional(),
1011  EmptyOptional(),
1012  EmptyOptional(),
1013  readableData,
1014  offset);
1015 
1016  // Event - event class relationship
1018  EmptyOptional(),
1019  EmptyOptional(),
1021  readableData,
1022  offset);
1023 
1024  // End of Normalize workload execution life
1025  // Event packet - timeline, threadId, eventGuid
1027 
1028  // Normalize workload execution - event relationship
1030  EmptyOptional(),
1031  EmptyOptional(),
1032  EmptyOptional(),
1033  readableData,
1034  offset);
1035 
1036  // Event - event class relationship
1038  EmptyOptional(),
1039  EmptyOptional(),
1041  readableData,
1042  offset);
1043 
1044  // Output workload execution
1045  // Output workload execution entity
1046  VerifyTimelineEntityBinaryPacket(EmptyOptional(), readableData, offset);
1047 
1048  // Entity - Type relationship
1050  EmptyOptional(),
1051  EmptyOptional(),
1053  readableData,
1054  offset);
1055 
1056  // Type label relationship
1058  EmptyOptional(),
1059  EmptyOptional(),
1061  readableData,
1062  offset);
1063 
1064  // Inference - Workload execution relationship
1066  EmptyOptional(),
1067  EmptyOptional(),
1068  EmptyOptional(),
1069  readableData,
1070  offset);
1071 
1072  // Workload - Workload execution relationship
1074  EmptyOptional(),
1075  EmptyOptional(),
1076  EmptyOptional(),
1077  readableData,
1078  offset);
1079 
1080  // Start Output workload execution life
1081  // Event packet - timeline, threadId, eventGuid
1083 
1084  // Output workload execution - event relationship
1086  EmptyOptional(),
1087  EmptyOptional(),
1088  EmptyOptional(),
1089  readableData,
1090  offset);
1091 
1092  // Event - event class relationship
1094  EmptyOptional(),
1095  EmptyOptional(),
1097  readableData,
1098  offset);
1099 
1100  // End of Normalize workload execution life
1101  // Event packet - timeline, threadId, eventGuid
1103 
1104  // Output workload execution - event relationship
1106  EmptyOptional(),
1107  EmptyOptional(),
1108  EmptyOptional(),
1109  readableData,
1110  offset);
1111 
1112  // Event - event class relationship
1114  EmptyOptional(),
1115  EmptyOptional(),
1117  readableData,
1118  offset);
1119 
1120  // End of Inference life
1121  // Event packet - timeline, threadId, eventGuid
1123 
1124  // Inference - event relationship
1126  EmptyOptional(),
1127  EmptyOptional(),
1128  EmptyOptional(),
1129  readableData,
1130  offset);
1131 
1132  // Event - event class relationship
1134  EmptyOptional(),
1135  EmptyOptional(),
1137  readableData,
1138  offset);
1139 
1140  bufferManager.MarkRead(inferenceReadableBuffer);
1141 }
virtual LayerGuid GetGuid() const =0
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
A NormalizationDescriptor for the NormalizationLayer.
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:199
static ARMNN_DLLEXPORT ProfilingStaticGuid WORKLOAD_EXECUTION_GUID
std::vector< std::pair< LayerBindingId, class Tensor > > OutputTensors
Definition: Tensor.hpp:226
void VerifyTimelineLabelBinaryPacket(Optional< ProfilingGuid > guid, const std::string &label, const unsigned char *readableData, unsigned int &offset)
static ARMNN_DLLEXPORT ProfilingStaticGuid ARMNN_PROFILING_EOL_EVENT_CLASS
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Definition: Network.cpp:807
static ARMNN_DLLEXPORT ProfilingStaticGuid CONNECTION_GUID
ExternalProfilingOptions m_ProfilingOptions
Definition: IRuntime.hpp:76
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
static ARMNN_DLLEXPORT ProfilingStaticGuid INFERENCE_GUID
static ARMNN_DLLEXPORT ProfilingStaticGuid TYPE_GUID
IPacketBufferPtr GetReadableBuffer() override
void VerifyTimelineRelationshipBinaryPacket(ProfilingRelationshipType relationshipType, Optional< ProfilingGuid > relationshipGuid, Optional< ProfilingGuid > headGuid, Optional< ProfilingGuid > tailGuid, const unsigned char *readableData, unsigned int &offset)
void VerifyTimelineEventBinaryPacket(Optional< uint64_t > timestamp, Optional< std::thread::id > threadId, Optional< ProfilingGuid > eventGuid, const unsigned char *readableData, unsigned int &offset)
A tensor defined by a TensorInfo (shape and data type) and a mutable backing store.
Definition: Tensor.hpp:191
static ARMNN_DLLEXPORT ProfilingStaticGuid BACKENDID_GUID
static ARMNN_DLLEXPORT ProfilingStaticGuid NETWORK_GUID
std::vector< std::pair< LayerBindingId, class ConstTensor > > InputTensors
Definition: Tensor.hpp:225
static INetworkPtr Create()
Definition: Network.cpp:48
BOOST_CHECK(profilingService.GetCurrentState()==ProfilingState::WaitingForAck)
Head execution start depends on Tail execution completion.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:85
CPU Execution: Reference C++ kernels.
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:32
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:61
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:544
void MarkRead(IPacketBufferPtr &packetBuffer) override
static ARMNN_DLLEXPORT ProfilingStaticGuid ARMNN_PROFILING_SOL_EVENT_CLASS
virtual int Connect(IInputSlot &destination)=0
armnn::Runtime::CreationOptions::ExternalProfilingOptions options
int NetworkId
Definition: IRuntime.hpp:19
static ARMNN_DLLEXPORT ProfilingStaticGuid NAME_GUID
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:24
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
void VerifyTimelineEntityBinaryPacket(Optional< ProfilingGuid > guid, const unsigned char *readableData, unsigned int &offset)

◆ BOOST_AUTO_TEST_CASE() [8/8]

BOOST_AUTO_TEST_CASE ( ProfilingPostOptimisationStructureCpuRef  )

Definition at line 1143 of file RuntimeTests.cpp.

References BOOST_AUTO_TEST_SUITE_END(), armnn::CpuRef, and VerifyPostOptimisationStructureTestImpl().

1144 {
1146 }
void VerifyPostOptimisationStructureTestImpl(armnn::BackendId backendId)
CPU Execution: Reference C++ kernels.