ArmNN
 20.05
RuntimeTests.cpp File Reference
#include <armnn/Descriptors.hpp>
#include <armnn/IRuntime.hpp>
#include <armnn/INetwork.hpp>
#include <Runtime.hpp>
#include <armnn/TypesUtils.hpp>
#include <LabelsAndEventClasses.hpp>
#include <test/ProfilingTestUtils.hpp>
#include <HeapProfiling.hpp>
#include <LeakChecking.hpp>
#include <boost/test/unit_test.hpp>
#include "RuntimeTests.hpp"
#include "TestUtils.hpp"

Go to the source code of this file.

Namespaces

 armnn
 Copyright (c) 2020 ARM Limited.
 

Functions

void RuntimeLoadedNetworksReserve (armnn::Runtime *runtime)
 
 BOOST_AUTO_TEST_CASE (RuntimeUnloadNetwork)
 
 BOOST_AUTO_TEST_CASE (RuntimeCpuRef)
 
 BOOST_AUTO_TEST_CASE (RuntimeFallbackToCpuRef)
 
 BOOST_AUTO_TEST_CASE (IVGCVSW_1929_QuantizedSoftmaxIssue)
 
 BOOST_AUTO_TEST_CASE (RuntimeBackendOptions)
 
 BOOST_AUTO_TEST_CASE (ProfilingDisable)
 
 BOOST_AUTO_TEST_CASE (ProfilingEnableCpuRef)
 
 BOOST_AUTO_TEST_CASE (ProfilingPostOptimisationStructureCpuRef)
 

Function Documentation

◆ BOOST_AUTO_TEST_CASE() [1/8]

BOOST_AUTO_TEST_CASE ( RuntimeUnloadNetwork  )

Definition at line 38 of file RuntimeTests.cpp.

References ARMNN_BYTES_LEAKED_IN_SCOPE, ARMNN_LEAK_CHECKER_IS_ACTIVE, ARMNN_LOCAL_LEAK_CHECKING_ONLY, ARMNN_NO_LEAKS_IN_SCOPE, ARMNN_OBJECTS_LEAKED_IN_SCOPE, ARMNN_SCOPED_LEAK_CHECKER, armnn::BOOST_AUTO_TEST_CASE(), BOOST_GLOBAL_FIXTURE(), armnn::CpuRef, INetwork::Create(), IRuntime::Create(), armnn::Failure, Runtime::GetDeviceSpec(), armnn::IgnoreUnused(), Runtime::LoadNetwork(), armnn::Optimize(), options, armnn::RuntimeLoadedNetworksReserve(), armnn::Success, and Runtime::UnloadNetwork().

39 {
40  // build 2 mock-networks and load them into the runtime
43 
44  // Mock network 1.
45  armnn::NetworkId networkIdentifier1 = 1;
47  mockNetwork1->AddInputLayer(0, "test layer");
48  std::vector<armnn::BackendId> backends = { armnn::Compute::CpuRef };
49  runtime->LoadNetwork(networkIdentifier1, Optimize(*mockNetwork1, backends, runtime->GetDeviceSpec()));
50 
51  // Mock network 2.
52  armnn::NetworkId networkIdentifier2 = 2;
54  mockNetwork2->AddInputLayer(0, "test layer");
55  runtime->LoadNetwork(networkIdentifier2, Optimize(*mockNetwork2, backends, runtime->GetDeviceSpec()));
56 
57  // Unloads one by its networkID.
58  BOOST_TEST(runtime->UnloadNetwork(networkIdentifier1) == armnn::Status::Success);
59 
60  BOOST_TEST(runtime->UnloadNetwork(networkIdentifier1) == armnn::Status::Failure);
61 }
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:31
CPU Execution: Reference C++ kernels.
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:25
int NetworkId
Definition: IRuntime.hpp:20
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:1003
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:101
armnn::Runtime::CreationOptions::ExternalProfilingOptions options
static INetworkPtr Create()
Definition: Network.cpp:50

◆ BOOST_AUTO_TEST_CASE() [2/8]

BOOST_AUTO_TEST_CASE ( RuntimeCpuRef  )

Definition at line 167 of file RuntimeTests.cpp.

References IOutputSlot::Connect(), armnn::CpuRef, INetwork::Create(), IRuntime::Create(), armnn::Float32, IConnectableLayer::GetInputSlot(), IConnectableLayer::GetOutputSlot(), armnn::Optimize(), options, IOutputSlot::SetTensorInfo(), and armnn::Success.

168 {
169  using namespace armnn;
170 
171  // Create runtime in which test will run
174 
175  // build up the structure of the network
177 
178  IConnectableLayer* input = net->AddInputLayer(0);
179 
180  // This layer configuration isn't supported by CpuAcc, should be fall back to CpuRef.
181  NormalizationDescriptor descriptor;
182  IConnectableLayer* normalize = net->AddNormalizationLayer(descriptor);
183 
184  IConnectableLayer* output = net->AddOutputLayer(0);
185 
186  input->GetOutputSlot(0).Connect(normalize->GetInputSlot(0));
187  normalize->GetOutputSlot(0).Connect(output->GetInputSlot(0));
188 
189  input->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
190  normalize->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
191 
192  // optimize the network
193  std::vector<armnn::BackendId> backends = { armnn::Compute::CpuRef };
194  IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
195 
196  // Load it into the runtime. It should success.
197  armnn::NetworkId netId;
198  BOOST_TEST(runtime->LoadNetwork(netId, std::move(optNet)) == Status::Success);
199 }
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:31
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:61
CPU Execution: Reference C++ kernels.
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:25
int NetworkId
Definition: IRuntime.hpp:20
Copyright (c) 2020 ARM Limited.
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:1003
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:573
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:101
virtual int Connect(IInputSlot &destination)=0
armnn::Runtime::CreationOptions::ExternalProfilingOptions options
A NormalizationDescriptor for the NormalizationLayer.
static INetworkPtr Create()
Definition: Network.cpp:50

◆ BOOST_AUTO_TEST_CASE() [3/8]

BOOST_AUTO_TEST_CASE ( RuntimeFallbackToCpuRef  )

Definition at line 201 of file RuntimeTests.cpp.

References IOutputSlot::Connect(), armnn::CpuAcc, armnn::CpuRef, INetwork::Create(), IRuntime::Create(), armnn::Float32, IConnectableLayer::GetInputSlot(), IConnectableLayer::GetOutputSlot(), armnn::Optimize(), options, IOutputSlot::SetTensorInfo(), and armnn::Success.

202 {
203  using namespace armnn;
204 
205  // Create runtime in which test will run
208 
209  // build up the structure of the network
211 
212  IConnectableLayer* input = net->AddInputLayer(0);
213 
214  // This layer configuration isn't supported by CpuAcc, should be fall back to CpuRef.
215  NormalizationDescriptor descriptor;
216  IConnectableLayer* normalize = net->AddNormalizationLayer(descriptor);
217 
218  IConnectableLayer* output = net->AddOutputLayer(0);
219 
220  input->GetOutputSlot(0).Connect(normalize->GetInputSlot(0));
221  normalize->GetOutputSlot(0).Connect(output->GetInputSlot(0));
222 
223  input->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
224  normalize->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
225 
226  // Allow fallback to CpuRef.
227  std::vector<armnn::BackendId> backends = { armnn::Compute::CpuAcc, armnn::Compute::CpuRef };
228  // optimize the network
229  IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
230 
231  // Load it into the runtime. It should succeed.
232  armnn::NetworkId netId;
233  BOOST_TEST(runtime->LoadNetwork(netId, std::move(optNet)) == Status::Success);
234 }
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:31
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:61
CPU Execution: Reference C++ kernels.
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:25
int NetworkId
Definition: IRuntime.hpp:20
Copyright (c) 2020 ARM Limited.
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:1003
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:573
CPU Execution: NEON: ArmCompute.
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:101
virtual int Connect(IInputSlot &destination)=0
armnn::Runtime::CreationOptions::ExternalProfilingOptions options
A NormalizationDescriptor for the NormalizationLayer.
static INetworkPtr Create()
Definition: Network.cpp:50

◆ BOOST_AUTO_TEST_CASE() [4/8]

BOOST_AUTO_TEST_CASE ( IVGCVSW_1929_QuantizedSoftmaxIssue  )

Definition at line 236 of file RuntimeTests.cpp.

References IOutputSlot::Connect(), armnn::CpuRef, INetwork::Create(), IRuntime::Create(), IConnectableLayer::GetInputSlot(), IConnectableLayer::GetOutputSlot(), armnn::Optimize(), options, armnn::QAsymmU8, and IOutputSlot::SetTensorInfo().

237 {
238  // Test for issue reported by Chris Nix in https://jira.arm.com/browse/IVGCVSW-1929
239  using namespace armnn;
240 
241  // Create runtime in which test will run
244 
245  // build up the structure of the network
247  armnn::IConnectableLayer* input = net->AddInputLayer(0,"input");
248  armnn::IConnectableLayer* softmax = net->AddSoftmaxLayer(armnn::SoftmaxDescriptor(), "softmax");
249  armnn::IConnectableLayer* output = net->AddOutputLayer(0, "output");
250 
251  input->GetOutputSlot(0).Connect(softmax->GetInputSlot(0));
252  softmax->GetOutputSlot(0).Connect(output->GetInputSlot(0));
253 
256  1.0f / 255,
257  0));
258 
261 
262  std::vector<armnn::BackendId> backends = { armnn::Compute::CpuRef };
263  std::vector<std::string> errMessages;
265  backends,
266  runtime->GetDeviceSpec(),
268  errMessages);
269 
270  BOOST_TEST(errMessages.size() == 1);
271  BOOST_TEST(errMessages[0] ==
272  "ERROR: output 0 of layer Softmax (softmax) is of type "
273  "Quantized 8 bit but its scale parameter has not been set");
274  BOOST_TEST(!optNet);
275 }
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:31
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:61
CPU Execution: Reference C++ kernels.
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:25
Copyright (c) 2020 ARM Limited.
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:1003
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:573
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:101
virtual int Connect(IInputSlot &destination)=0
armnn::Runtime::CreationOptions::ExternalProfilingOptions options
A SoftmaxDescriptor for the SoftmaxLayer.
static INetworkPtr Create()
Definition: Network.cpp:50

◆ BOOST_AUTO_TEST_CASE() [5/8]

BOOST_AUTO_TEST_CASE ( RuntimeBackendOptions  )

Definition at line 277 of file RuntimeTests.cpp.

References BackendOptions::AddOption(), GetBackendId(), and IRuntime::CreationOptions::m_BackendOptions.

278 {
279  using namespace armnn;
280 
281  IRuntime::CreationOptions creationOptions;
282  auto& backendOptions = creationOptions.m_BackendOptions;
283 
284 
285  // Define Options on explicit construction
286  BackendOptions options1("FakeBackend1",
287  {
288  { "Option1", 1.3f },
289  { "Option2", true }
290  });
291 
292  // Add an option after construction
293  options1.AddOption({ "Option3", "some_value" });
294 
295  // Add the options to CreationOptions struct
296  backendOptions.push_back(options1);
297 
298  // Add more Options via inplace explicit construction
299  backendOptions.emplace_back(BackendOptions{ "FakeBackend1",
300  {{ "Option4", 42 }}
301  });
302 
303 
304  // First group
305  BOOST_TEST(backendOptions[0].GetBackendId().Get() == "FakeBackend1");
306  BOOST_TEST(backendOptions[0].GetOption(0).GetName() == "Option1");
307  BOOST_TEST(backendOptions[0].GetOption(0).GetValue().IsFloat() == true);
308  BOOST_TEST(backendOptions[0].GetOption(0).GetValue().AsFloat() == 1.3f);
309 
310  BOOST_TEST(backendOptions[0].GetOption(1).GetName() == "Option2");
311  BOOST_TEST(backendOptions[0].GetOption(1).GetValue().IsBool() == true);
312  BOOST_TEST(backendOptions[0].GetOption(1).GetValue().AsBool() == true);
313 
314  BOOST_TEST(backendOptions[0].GetOption(2).GetName() == "Option3");
315  BOOST_TEST(backendOptions[0].GetOption(2).GetValue().IsString() == true);
316  BOOST_TEST(backendOptions[0].GetOption(2).GetValue().AsString() == "some_value");
317 
318  // Second group
319  BOOST_TEST(backendOptions[1].GetBackendId().Get() == "FakeBackend1");
320  BOOST_TEST(backendOptions[1].GetOption(0).GetName() == "Option4");
321  BOOST_TEST(backendOptions[1].GetOption(0).GetValue().IsInt() == true);
322  BOOST_TEST(backendOptions[1].GetOption(0).GetValue().AsInt() == 42);
323 }
Copyright (c) 2020 ARM Limited.
std::vector< BackendOptions > m_BackendOptions
Pass backend specific options.
Definition: IRuntime.hpp:115
const char * GetBackendId()
void AddOption(BackendOption &&option)
Struct for the users to pass backend specific options.

◆ BOOST_AUTO_TEST_CASE() [6/8]

BOOST_AUTO_TEST_CASE ( ProfilingDisable  )

Definition at line 325 of file RuntimeTests.cpp.

References IOutputSlot::Connect(), armnn::CpuRef, INetwork::Create(), armnn::Float32, Runtime::GetDeviceSpec(), IConnectableLayer::GetInputSlot(), IConnectableLayer::GetOutputSlot(), armnn::GetProfilingService(), BufferManager::GetReadableBuffer(), Runtime::LoadNetwork(), armnn::Optimize(), options, IOutputSlot::SetTensorInfo(), and armnn::Success.

326 {
327  using namespace armnn;
328 
329  // Create runtime in which the test will run
331  armnn::Runtime runtime(options);
332 
333  // build up the structure of the network
335 
336  IConnectableLayer* input = net->AddInputLayer(0);
337 
338  // This layer configuration isn't supported by CpuAcc, should fall back to CpuRef.
339  NormalizationDescriptor descriptor;
340  IConnectableLayer* normalize = net->AddNormalizationLayer(descriptor);
341 
342  IConnectableLayer* output = net->AddOutputLayer(0);
343 
344  input->GetOutputSlot(0).Connect(normalize->GetInputSlot(0));
345  normalize->GetOutputSlot(0).Connect(output->GetInputSlot(0));
346 
347  input->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
348  normalize->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
349 
350  // optimize the network
351  std::vector<armnn::BackendId> backends = { armnn::Compute::CpuRef };
352  IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime.GetDeviceSpec());
353 
354  // Load it into the runtime. It should succeed.
355  armnn::NetworkId netId;
356  BOOST_TEST(runtime.LoadNetwork(netId, std::move(optNet)) == Status::Success);
357 
358  profiling::ProfilingServiceRuntimeHelper profilingServiceHelper(GetProfilingService(&runtime));
359  profiling::BufferManager& bufferManager = profilingServiceHelper.GetProfilingBufferManager();
360  auto readableBuffer = bufferManager.GetReadableBuffer();
361 
362  // Profiling is not enabled, the post-optimisation structure should not be created
363  BOOST_TEST(!readableBuffer);
364 }
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:61
CPU Execution: Reference C++ kernels.
int NetworkId
Definition: IRuntime.hpp:20
Copyright (c) 2020 ARM Limited.
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:1003
profiling::ProfilingService & GetProfilingService(armnn::Runtime *runtime)
Definition: TestUtils.cpp:25
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:573
IPacketBufferPtr GetReadableBuffer() override
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:101
virtual int Connect(IInputSlot &destination)=0
armnn::Runtime::CreationOptions::ExternalProfilingOptions options
A NormalizationDescriptor for the NormalizationLayer.
static INetworkPtr Create()
Definition: Network.cpp:50

◆ BOOST_AUTO_TEST_CASE() [7/8]

BOOST_AUTO_TEST_CASE ( ProfilingEnableCpuRef  )

Definition at line 366 of file RuntimeTests.cpp.

References LabelsAndEventClasses::ARMNN_PROFILING_EOL_EVENT_CLASS, LabelsAndEventClasses::ARMNN_PROFILING_SOL_EVENT_CLASS, LabelsAndEventClasses::BACKENDID_GUID, BOOST_CHECK(), IOutputSlot::Connect(), LabelsAndEventClasses::CONNECTION_GUID, armnn::CpuRef, INetwork::Create(), armnn::profiling::DataLink, Runtime::EnqueueWorkload(), armnn::profiling::ExecutionLink, armnn::Float32, ProfilingServiceRuntimeHelper::ForceTransitionToState(), Runtime::GetDeviceSpec(), IConnectableLayer::GetGuid(), IConnectableLayer::GetInputSlot(), Runtime::GetInputTensorInfo(), IConnectableLayer::GetOutputSlot(), Runtime::GetOutputTensorInfo(), ProfilingServiceRuntimeHelper::GetProfilingBufferManager(), armnn::GetProfilingService(), BufferManager::GetReadableBuffer(), LabelsAndEventClasses::INFERENCE_GUID, armnn::profiling::LabelLink, Runtime::LoadNetwork(), IRuntime::CreationOptions::ExternalProfilingOptions::m_EnableProfiling, IRuntime::CreationOptions::m_ProfilingOptions, IRuntime::CreationOptions::ExternalProfilingOptions::m_TimelineEnabled, LabelsAndEventClasses::NAME_GUID, LabelsAndEventClasses::NETWORK_GUID, armnn::Optimize(), options, ProfilingService::ResetExternalProfilingOptions(), armnn::profiling::RetentionLink, IOutputSlot::SetTensorInfo(), armnn::Success, armnn::profiling::ThreadIdSize, LabelsAndEventClasses::TYPE_GUID, VerifyTimelineEntityBinaryPacketData(), VerifyTimelineEventBinaryPacket(), VerifyTimelineHeaderBinary(), VerifyTimelineLabelBinaryPacketData(), VerifyTimelineRelationshipBinaryPacketData(), and LabelsAndEventClasses::WORKLOAD_EXECUTION_GUID.

367 {
368  using namespace armnn;
369  using namespace armnn::profiling;
370 
371  // Create runtime in which the test will run
373  options.m_ProfilingOptions.m_EnableProfiling = true;
374  options.m_ProfilingOptions.m_TimelineEnabled = true;
375 
376  armnn::Runtime runtime(options);
378 
379  profiling::ProfilingServiceRuntimeHelper profilingServiceHelper(GetProfilingService(&runtime));
380  profilingServiceHelper.ForceTransitionToState(ProfilingState::NotConnected);
381  profilingServiceHelper.ForceTransitionToState(ProfilingState::WaitingForAck);
382  profilingServiceHelper.ForceTransitionToState(ProfilingState::Active);
383 
384  // build up the structure of the network
386 
387  IConnectableLayer* input = net->AddInputLayer(0, "input");
388 
389  NormalizationDescriptor descriptor;
390  IConnectableLayer* normalize = net->AddNormalizationLayer(descriptor, "normalization");
391 
392  IConnectableLayer* output = net->AddOutputLayer(0, "output");
393 
394  input->GetOutputSlot(0).Connect(normalize->GetInputSlot(0));
395  normalize->GetOutputSlot(0).Connect(output->GetInputSlot(0));
396 
397  input->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
398  normalize->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
399 
400  // optimize the network
401  std::vector<armnn::BackendId> backends = { armnn::Compute::CpuRef };
402  IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime.GetDeviceSpec());
403 
404  ProfilingGuid optNetGuid = optNet->GetGuid();
405 
406  // Load it into the runtime. It should succeed.
407  armnn::NetworkId netId;
408  BOOST_TEST(runtime.LoadNetwork(netId, std::move(optNet)) == Status::Success);
409 
410  profiling::BufferManager& bufferManager = profilingServiceHelper.GetProfilingBufferManager();
411  auto readableBuffer = bufferManager.GetReadableBuffer();
412 
413  // Profiling is enabled, the post-optimisation structure should be created
414  BOOST_CHECK(readableBuffer != nullptr);
415 
416  unsigned int size = readableBuffer->GetSize();
417  BOOST_CHECK(size == 1068);
418 
419  const unsigned char* readableData = readableBuffer->GetReadableData();
420  BOOST_CHECK(readableData != nullptr);
421 
422  unsigned int offset = 0;
423 
424  // Verify Header
425  VerifyTimelineHeaderBinary(readableData, offset, 1060);
426 
427  // Post-optimisation network
428  // Network entity
429  VerifyTimelineEntityBinaryPacketData(optNetGuid, readableData, offset
430  );
431 
432  // Entity - Type relationship
434  EmptyOptional(),
435  optNetGuid,
437  readableData,
438  offset);
439 
440  // Type label relationship
442  EmptyOptional(),
443  EmptyOptional(),
445  readableData,
446  offset);
447 
448  // Input layer
449  // Input layer entity
450  VerifyTimelineEntityBinaryPacketData(input->GetGuid(), readableData, offset);
451 
452  // Name Entity
453  VerifyTimelineLabelBinaryPacketData(EmptyOptional(), "input", readableData, offset);
454 
455  // Entity - Name relationship
457  EmptyOptional(),
458  input->GetGuid(),
459  EmptyOptional(),
460  readableData,
461  offset);
462 
463  // Name label relationship
465  EmptyOptional(),
466  EmptyOptional(),
468  readableData,
469  offset);
470 
471  // Entity - Type relationship
473  EmptyOptional(),
474  input->GetGuid(),
475  EmptyOptional(),
476  readableData,
477  offset);
478 
479  // Type label relationship
481  EmptyOptional(),
482  EmptyOptional(),
484  readableData,
485  offset);
486 
487  // Network - Input layer relationship
489  EmptyOptional(),
490  optNetGuid,
491  input->GetGuid(),
492  readableData,
493  offset);
494 
495  // Normalization layer
496  // Normalization layer entity
497  VerifyTimelineEntityBinaryPacketData(normalize->GetGuid(), readableData, offset);
498 
499  // Name entity
500  VerifyTimelineLabelBinaryPacketData(EmptyOptional(), "normalization", readableData, offset);
501 
502  // Entity - Name relationship
504  EmptyOptional(),
505  normalize->GetGuid(),
506  EmptyOptional(),
507  readableData,
508  offset);
509 
510  // Name label relationship
512  EmptyOptional(),
513  EmptyOptional(),
515  readableData,
516  offset);
517 
518  // Entity - Type relationship
520  EmptyOptional(),
521  normalize->GetGuid(),
522  EmptyOptional(),
523  readableData,
524  offset);
525 
526  // Type label relationship
528  EmptyOptional(),
529  EmptyOptional(),
531  readableData,
532  offset);
533 
534  // Network - Normalize layer relationship
536  EmptyOptional(),
537  optNetGuid,
538  normalize->GetGuid(),
539  readableData,
540  offset);
541 
542  // Input layer - Normalize layer relationship
544  EmptyOptional(),
545  input->GetGuid(),
546  normalize->GetGuid(),
547  readableData,
548  offset);
549 
550  // Entity - Type relationship
552  EmptyOptional(),
553  EmptyOptional(),
555  readableData,
556  offset);
557 
558  // Type label relationship
560  EmptyOptional(),
561  EmptyOptional(),
563  readableData,
564  offset);
565 
566  // Normalization workload
567  // Normalization workload entity
568  VerifyTimelineEntityBinaryPacketData(EmptyOptional(), readableData, offset);
569 
570  // Entity - Type relationship
572  EmptyOptional(),
573  EmptyOptional(),
574  EmptyOptional(),
575  readableData,
576  offset);
577 
578  // Type label relationship
580  EmptyOptional(),
581  EmptyOptional(),
583  readableData,
584  offset);
585 
586  // BackendId entity
587  VerifyTimelineLabelBinaryPacketData(EmptyOptional(), "CpuRef", readableData, offset);
588 
589  // Entity - BackendId relationship
591  EmptyOptional(),
592  EmptyOptional(),
593  EmptyOptional(),
594  readableData,
595  offset);
596 
597  // BackendId label relationship
599  EmptyOptional(),
600  EmptyOptional(),
602  readableData,
603  offset);
604 
605  // Normalize layer - Normalize workload relationship
607  EmptyOptional(),
608  normalize->GetGuid(),
609  EmptyOptional(),
610  readableData,
611  offset);
612 
613  // Output layer
614  // Output layer entity
615  VerifyTimelineEntityBinaryPacketData(output->GetGuid(), readableData, offset);
616 
617  // Name entity
618  VerifyTimelineLabelBinaryPacketData(EmptyOptional(), "output", readableData, offset);
619 
620  // Entity - Name relationship
622  EmptyOptional(),
623  output->GetGuid(),
624  EmptyOptional(),
625  readableData,
626  offset);
627 
628  // Name label relationship
630  EmptyOptional(),
631  EmptyOptional(),
633  readableData,
634  offset);
635 
636  // Entity - Type relationship
638  EmptyOptional(),
639  output->GetGuid(),
640  EmptyOptional(),
641  readableData,
642  offset);
643 
644  // Type label relationship
646  EmptyOptional(),
647  EmptyOptional(),
649  readableData,
650  offset);
651 
652  // Network - Output layer relationship
654  EmptyOptional(),
655  optNetGuid,
656  output->GetGuid(),
657  readableData,
658  offset);
659 
660  // Normalize layer - Output layer relationship
662  EmptyOptional(),
663  normalize->GetGuid(),
664  output->GetGuid(),
665  readableData,
666  offset);
667 
668  // Entity - Type relationship
670  EmptyOptional(),
671  EmptyOptional(),
673  readableData,
674  offset);
675 
676  // Type label relationship
678  EmptyOptional(),
679  EmptyOptional(),
681  readableData,
682  offset);
683 
684  bufferManager.MarkRead(readableBuffer);
685 
686  // Creates structures for input & output.
687  std::vector<float> inputData(16);
688  std::vector<float> outputData(16);
689 
690  InputTensors inputTensors
691  {
692  {0, ConstTensor(runtime.GetInputTensorInfo(netId, 0), inputData.data())}
693  };
694  OutputTensors outputTensors
695  {
696  {0, Tensor(runtime.GetOutputTensorInfo(netId, 0), outputData.data())}
697  };
698 
699  // Does the inference.
700  runtime.EnqueueWorkload(netId, inputTensors, outputTensors);
701 
702  // Get readable buffer for input workload
703  auto inputReadableBuffer = bufferManager.GetReadableBuffer();
704  BOOST_CHECK(inputReadableBuffer != nullptr);
705 
706  // Get readable buffer for output workload
707  auto outputReadableBuffer = bufferManager.GetReadableBuffer();
708  BOOST_CHECK(outputReadableBuffer != nullptr);
709 
710  // Get readable buffer for inference timeline
711  auto inferenceReadableBuffer = bufferManager.GetReadableBuffer();
712  BOOST_CHECK(inferenceReadableBuffer != nullptr);
713 
714  // Validate input workload data
715  size = inputReadableBuffer->GetSize();
716  BOOST_CHECK(size == 204);
717 
718  readableData = inputReadableBuffer->GetReadableData();
719  BOOST_CHECK(readableData != nullptr);
720 
721  offset = 0;
722 
723  // Verify Header
724  VerifyTimelineHeaderBinary(readableData, offset, 196);
725 
726  // Input workload
727  // Input workload entity
728  VerifyTimelineEntityBinaryPacketData(EmptyOptional(), readableData, offset);
729 
730  // Entity - Type relationship
732  EmptyOptional(),
733  EmptyOptional(),
734  EmptyOptional(),
735  readableData,
736  offset);
737 
738  // Type label relationship
740  EmptyOptional(),
741  EmptyOptional(),
743  readableData,
744  offset);
745 
746  // BackendId entity
747  VerifyTimelineLabelBinaryPacketData(EmptyOptional(), "CpuRef", readableData, offset);
748 
749  // Entity - BackendId relationship
751  EmptyOptional(),
752  EmptyOptional(),
753  EmptyOptional(),
754  readableData,
755  offset);
756 
757  // BackendId label relationship
759  EmptyOptional(),
760  EmptyOptional(),
762  readableData,
763  offset);
764 
765  // Input layer - Input workload relationship
767  EmptyOptional(),
768  input->GetGuid(),
769  EmptyOptional(),
770  readableData,
771  offset);
772 
773  bufferManager.MarkRead(inputReadableBuffer);
774 
775  // Validate output workload data
776  size = outputReadableBuffer->GetSize();
777  BOOST_CHECK(size == 204);
778 
779  readableData = outputReadableBuffer->GetReadableData();
780  BOOST_CHECK(readableData != nullptr);
781 
782  offset = 0;
783 
784  // Verify Header
785  VerifyTimelineHeaderBinary(readableData, offset, 196);
786 
787  // Output workload
788  // Output workload entity
789  VerifyTimelineEntityBinaryPacketData(EmptyOptional(), readableData, offset);
790 
791  // Entity - Type relationship
793  EmptyOptional(),
794  EmptyOptional(),
795  EmptyOptional(),
796  readableData,
797  offset);
798 
799  // Type label relationship
801  EmptyOptional(),
802  EmptyOptional(),
804  readableData,
805  offset);
806 
807  // BackendId entity
808  VerifyTimelineLabelBinaryPacketData(EmptyOptional(), "CpuRef", readableData, offset);
809 
810  // Entity - BackendId relationship
812  EmptyOptional(),
813  EmptyOptional(),
814  EmptyOptional(),
815  readableData,
816  offset);
817 
818  // BackendId label relationship
820  EmptyOptional(),
821  EmptyOptional(),
823  readableData,
824  offset);
825 
826  // Output layer - Output workload relationship
828  EmptyOptional(),
829  output->GetGuid(),
830  EmptyOptional(),
831  readableData,
832  offset);
833 
834  bufferManager.MarkRead(outputReadableBuffer);
835 
836  // Validate inference data
837  size = inferenceReadableBuffer->GetSize();
838  BOOST_CHECK(size == 1208 + 8 * ThreadIdSize);
839 
840  readableData = inferenceReadableBuffer->GetReadableData();
841  BOOST_CHECK(readableData != nullptr);
842 
843  offset = 0;
844 
845  // Verify Header
846  VerifyTimelineHeaderBinary(readableData, offset, 1200 + 8 * ThreadIdSize);
847 
848  // Inference timeline trace
849  // Inference entity
850  VerifyTimelineEntityBinaryPacketData(EmptyOptional(), readableData, offset);
851 
852  // Entity - Type relationship
854  EmptyOptional(),
855  EmptyOptional(),
857  readableData,
858  offset);
859 
860  // Type label relationship
862  EmptyOptional(),
863  EmptyOptional(),
865  readableData,
866  offset);
867 
868  // Network - Inference relationship
870  EmptyOptional(),
871  optNetGuid,
872  EmptyOptional(),
873  readableData,
874  offset);
875 
876  // Start Inference life
877  // Event packet - timeline, threadId, eventGuid
879 
880  // Inference - event relationship
882  EmptyOptional(),
883  EmptyOptional(),
884  EmptyOptional(),
885  readableData,
886  offset);
887 
888  // Event - event class relationship
890  EmptyOptional(),
891  EmptyOptional(),
893  readableData,
894  offset);
895 
896  // Execution
897  // Input workload execution
898  // Input workload execution entity
899  VerifyTimelineEntityBinaryPacketData(EmptyOptional(), readableData, offset);
900 
901  // Entity - Type relationship
903  EmptyOptional(),
904  EmptyOptional(),
906  readableData,
907  offset);
908 
909  // Type label relationship
911  EmptyOptional(),
912  EmptyOptional(),
914  readableData,
915  offset);
916 
917  // Inference - Workload execution relationship
919  EmptyOptional(),
920  EmptyOptional(),
921  EmptyOptional(),
922  readableData,
923  offset);
924 
925  // Workload - Workload execution relationship
927  EmptyOptional(),
928  EmptyOptional(),
929  EmptyOptional(),
930  readableData,
931  offset);
932 
933  // Start Input workload execution life
934  // Event packet - timeline, threadId, eventGuid
936 
937  // Input workload execution - event relationship
939  EmptyOptional(),
940  EmptyOptional(),
941  EmptyOptional(),
942  readableData,
943  offset);
944 
945  // Event - event class relationship
947  EmptyOptional(),
948  EmptyOptional(),
950  readableData,
951  offset);
952 
953  // End of Input workload execution life
954  // Event packet - timeline, threadId, eventGuid
956 
957  // Input workload execution - event relationship
959  EmptyOptional(),
960  EmptyOptional(),
961  EmptyOptional(),
962  readableData,
963  offset);
964 
965  // Event - event class relationship
967  EmptyOptional(),
968  EmptyOptional(),
970  readableData,
971  offset);
972 
973  // Normalize workload execution
974  // Normalize workload execution entity
975  VerifyTimelineEntityBinaryPacketData(EmptyOptional(), readableData, offset);
976 
977  // Entity - Type relationship
979  EmptyOptional(),
980  EmptyOptional(),
982  readableData,
983  offset);
984 
985  // Type label relationship
987  EmptyOptional(),
988  EmptyOptional(),
990  readableData,
991  offset);
992 
993  // Inference - Workload execution relationship
995  EmptyOptional(),
996  EmptyOptional(),
997  EmptyOptional(),
998  readableData,
999  offset);
1000 
1001  // Workload - Workload execution relationship
1003  EmptyOptional(),
1004  EmptyOptional(),
1005  EmptyOptional(),
1006  readableData,
1007  offset);
1008 
1009  // Start Normalize workload execution life
1010  // Event packet - timeline, threadId, eventGuid
1012 
1013  // Normalize workload execution - event relationship
1015  EmptyOptional(),
1016  EmptyOptional(),
1017  EmptyOptional(),
1018  readableData,
1019  offset);
1020 
1021  // Event - event class relationship
1023  EmptyOptional(),
1024  EmptyOptional(),
1026  readableData,
1027  offset);
1028 
1029  // End of Normalize workload execution life
1030  // Event packet - timeline, threadId, eventGuid
1032 
1033  // Normalize workload execution - event relationship
1035  EmptyOptional(),
1036  EmptyOptional(),
1037  EmptyOptional(),
1038  readableData,
1039  offset);
1040 
1041  // Event - event class relationship
1043  EmptyOptional(),
1044  EmptyOptional(),
1046  readableData,
1047  offset);
1048 
1049  // Output workload execution
1050  // Output workload execution entity
1051  VerifyTimelineEntityBinaryPacketData(EmptyOptional(), readableData, offset);
1052 
1053  // Entity - Type relationship
1055  EmptyOptional(),
1056  EmptyOptional(),
1058  readableData,
1059  offset);
1060 
1061  // Type label relationship
1063  EmptyOptional(),
1064  EmptyOptional(),
1066  readableData,
1067  offset);
1068 
1069  // Inference - Workload execution relationship
1071  EmptyOptional(),
1072  EmptyOptional(),
1073  EmptyOptional(),
1074  readableData,
1075  offset);
1076 
1077  // Workload - Workload execution relationship
1079  EmptyOptional(),
1080  EmptyOptional(),
1081  EmptyOptional(),
1082  readableData,
1083  offset);
1084 
1085  // Start Output workload execution life
1086  // Event packet - timeline, threadId, eventGuid
1088 
1089  // Output workload execution - event relationship
1091  EmptyOptional(),
1092  EmptyOptional(),
1093  EmptyOptional(),
1094  readableData,
1095  offset);
1096 
1097  // Event - event class relationship
1099  EmptyOptional(),
1100  EmptyOptional(),
1102  readableData,
1103  offset);
1104 
1105  // End of Normalize workload execution life
1106  // Event packet - timeline, threadId, eventGuid
1108 
1109  // Output workload execution - event relationship
1111  EmptyOptional(),
1112  EmptyOptional(),
1113  EmptyOptional(),
1114  readableData,
1115  offset);
1116 
1117  // Event - event class relationship
1119  EmptyOptional(),
1120  EmptyOptional(),
1122  readableData,
1123  offset);
1124 
1125  // End of Inference life
1126  // Event packet - timeline, threadId, eventGuid
1128 
1129  // Inference - event relationship
1131  EmptyOptional(),
1132  EmptyOptional(),
1133  EmptyOptional(),
1134  readableData,
1135  offset);
1136 
1137  // Event - event class relationship
1139  EmptyOptional(),
1140  EmptyOptional(),
1142  readableData,
1143  offset);
1144 
1145  bufferManager.MarkRead(inferenceReadableBuffer);
1146 }
static ARMNN_DLLEXPORT ProfilingStaticGuid INFERENCE_GUID
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:61
void VerifyTimelineEntityBinaryPacketData(Optional< ProfilingGuid > guid, const unsigned char *readableData, unsigned int &offset)
CPU Execution: Reference C++ kernels.
std::vector< std::pair< LayerBindingId, class ConstTensor > > InputTensors
Definition: Tensor.hpp:225
int NetworkId
Definition: IRuntime.hpp:20
void VerifyTimelineLabelBinaryPacketData(Optional< ProfilingGuid > guid, const std::string &label, const unsigned char *readableData, unsigned int &offset)
Copyright (c) 2020 ARM Limited.
Head execution start depends on Tail execution completion.
static ARMNN_DLLEXPORT ProfilingStaticGuid CONNECTION_GUID
BOOST_CHECK(profilingService.GetCurrentState()==ProfilingState::WaitingForAck)
static ARMNN_DLLEXPORT ProfilingStaticGuid WORKLOAD_EXECUTION_GUID
static ARMNN_DLLEXPORT ProfilingStaticGuid ARMNN_PROFILING_EOL_EVENT_CLASS
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
static ARMNN_DLLEXPORT ProfilingStaticGuid NAME_GUID
static ARMNN_DLLEXPORT ProfilingStaticGuid ARMNN_PROFILING_SOL_EVENT_CLASS
A tensor defined by a TensorInfo (shape and data type) and a mutable backing store.
Definition: Tensor.hpp:191
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:1003
profiling::ProfilingService & GetProfilingService(armnn::Runtime *runtime)
Definition: TestUtils.cpp:25
virtual LayerGuid GetGuid() const =0
Returns the unique id of the layer.
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:199
std::vector< std::pair< LayerBindingId, class Tensor > > OutputTensors
Definition: Tensor.hpp:226
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:573
void ResetExternalProfilingOptions(const ExternalProfilingOptions &options, bool resetProfilingService=false)
IPacketBufferPtr GetReadableBuffer() override
constexpr unsigned int ThreadIdSize
void VerifyTimelineHeaderBinary(const unsigned char *readableData, unsigned int &offset, uint32_t packetDataLength)
static ARMNN_DLLEXPORT ProfilingStaticGuid NETWORK_GUID
EmptyOptional is used to initialize the Optional class in case we want to have default value for an O...
Definition: Optional.hpp:32
void VerifyTimelineRelationshipBinaryPacketData(ProfilingRelationshipType relationshipType, Optional< ProfilingGuid > relationshipGuid, Optional< ProfilingGuid > headGuid, Optional< ProfilingGuid > tailGuid, const unsigned char *readableData, unsigned int &offset)
static ARMNN_DLLEXPORT ProfilingStaticGuid TYPE_GUID
void VerifyTimelineEventBinaryPacket(Optional< uint64_t > timestamp, Optional< std::thread::id > threadId, Optional< ProfilingGuid > eventGuid, const unsigned char *readableData, unsigned int &offset)
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:101
virtual int Connect(IInputSlot &destination)=0
armnn::Runtime::CreationOptions::ExternalProfilingOptions options
A NormalizationDescriptor for the NormalizationLayer.
ExternalProfilingOptions m_ProfilingOptions
Definition: IRuntime.hpp:83
static ARMNN_DLLEXPORT ProfilingStaticGuid BACKENDID_GUID
static INetworkPtr Create()
Definition: Network.cpp:50

◆ BOOST_AUTO_TEST_CASE() [8/8]

BOOST_AUTO_TEST_CASE ( ProfilingPostOptimisationStructureCpuRef  )

Definition at line 1148 of file RuntimeTests.cpp.

References BOOST_AUTO_TEST_SUITE_END(), armnn::CpuRef, and VerifyPostOptimisationStructureTestImpl().

1149 {
1151 }
void VerifyPostOptimisationStructureTestImpl(armnn::BackendId backendId)
CPU Execution: Reference C++ kernels.