ArmNN
 22.05
DebugCallbackTest.cpp File Reference
#include <armnn/Descriptors.hpp>
#include <armnn/IRuntime.hpp>
#include <armnn/INetwork.hpp>
#include <armnn/Types.hpp>
#include <Runtime.hpp>
#include <doctest/doctest.h>

Go to the source code of this file.

Functions

 TEST_SUITE ("DebugCallback")
 

Function Documentation

◆ TEST_SUITE()

TEST_SUITE ( "DebugCallback"  )

Definition at line 14 of file DebugCallbackTest.cpp.

References IOutputSlot::Connect(), IRuntime::Create(), INetwork::Create(), armnn::Float32, IConnectableLayer::GetInputSlot(), IConnectableLayer::GetOutputSlot(), armnn::IgnoreUnused(), ActivationDescriptor::m_Function, armnn::Optimize(), armnn::ReLu, TensorInfo::SetConstant(), IOutputSlot::SetTensorInfo(), and armnn::Success.

15 {
16 namespace
17 {
18 
19 using namespace armnn;
20 
21 INetworkPtr CreateSimpleNetwork()
22 {
24 
25  IConnectableLayer* input = net->AddInputLayer(0, "Input");
26 
27  ActivationDescriptor descriptor;
29  IConnectableLayer* activationLayer = net->AddActivationLayer(descriptor, "Activation:ReLu");
30 
31  IConnectableLayer* output = net->AddOutputLayer(0);
32 
33  input->GetOutputSlot(0).Connect(activationLayer->GetInputSlot(0));
34  activationLayer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
35 
36  input->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 1, 5 }, DataType::Float32));
37  activationLayer->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 1, 5 }, DataType::Float32));
38 
39  return net;
40 }
41 
42 TEST_CASE("RuntimeRegisterDebugCallback")
43 {
44  INetworkPtr net = CreateSimpleNetwork();
45 
47  IRuntimePtr runtime(IRuntime::Create(options));
48 
49  // Optimize the network with debug option
50  OptimizerOptions optimizerOptions(false, true);
51  std::vector<BackendId> backends = { "CpuRef" };
52  IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec(), optimizerOptions);
53 
54  NetworkId netId;
55  CHECK(runtime->LoadNetwork(netId, std::move(optNet)) == Status::Success);
56 
57  // Set up callback function
58  int callCount = 0;
59  std::vector<TensorShape> tensorShapes;
60  std::vector<unsigned int> slotIndexes;
61  auto mockCallback = [&](LayerGuid guid, unsigned int slotIndex, ITensorHandle* tensor)
62  {
63  armnn::IgnoreUnused(guid);
64  slotIndexes.push_back(slotIndex);
65  tensorShapes.push_back(tensor->GetShape());
66  callCount++;
67  };
68 
69  runtime->RegisterDebugCallback(netId, mockCallback);
70 
71  std::vector<float> inputData({-2, -1, 0, 1, 2});
72  std::vector<float> outputData(5);
73 
74  TensorInfo inputTensorInfo = runtime->GetInputTensorInfo(netId, 0);
75  inputTensorInfo.SetConstant(true);
76  InputTensors inputTensors
77  {
78  {0, ConstTensor(inputTensorInfo, inputData.data())}
79  };
80  OutputTensors outputTensors
81  {
82  {0, Tensor(runtime->GetOutputTensorInfo(netId, 0), outputData.data())}
83  };
84 
85  runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
86 
87  // Check that the callback was called twice
88  CHECK(callCount == 2);
89 
90  // Check that tensor handles passed to callback have correct shapes
91  const std::vector<TensorShape> expectedShapes({TensorShape({1, 1, 1, 5}), TensorShape({1, 1, 1, 5})});
92  CHECK(tensorShapes == expectedShapes);
93 
94  // Check that slot indexes passed to callback are correct
95  const std::vector<unsigned int> expectedSlotIndexes({0, 0});
96  CHECK(slotIndexes == expectedSlotIndexes);
97 }
98 
99 } // anonymous namespace
100 
101 }
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:49
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:66
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:33
std::vector< std::pair< LayerBindingId, class ConstTensor > > InputTensors
Definition: Tensor.hpp:392
Copyright (c) 2021 ARM Limited and Contributors.
void IgnoreUnused(Ts &&...)
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
A tensor defined by a TensorInfo (shape and data type) and a mutable backing store.
Definition: Tensor.hpp:319
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:1847
int NetworkId
Definition: IRuntime.hpp:27
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:327
std::vector< std::pair< LayerBindingId, class Tensor > > OutputTensors
Definition: Tensor.hpp:393
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:242
ArmNN performs an optimization on each model/network before it gets loaded for execution.
Definition: INetwork.hpp:137
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:36
arm::pipe::ProfilingGuid LayerGuid
Define LayerGuid type.
Definition: Types.hpp:26
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
void SetConstant(const bool IsConstant=true)
Marks the data corresponding to this tensor info as constant.
Definition: Tensor.cpp:514
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:241
virtual int Connect(IInputSlot &destination)=0
static INetworkPtr Create(NetworkOptions networkOptions={})
Definition: Network.cpp:476
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:59