ArmNN
 21.08
DebugCallbackTest.cpp File Reference
#include <armnn/Descriptors.hpp>
#include <armnn/IRuntime.hpp>
#include <armnn/INetwork.hpp>
#include <armnn/Types.hpp>
#include <Runtime.hpp>
#include <doctest/doctest.h>

Go to the source code of this file.

Functions

 TEST_SUITE ("DebugCallback")
 

Function Documentation

◆ TEST_SUITE()

TEST_SUITE ( "DebugCallback"  )

Definition at line 14 of file DebugCallbackTest.cpp.

References IOutputSlot::Connect(), INetwork::Create(), IRuntime::Create(), armnn::Float32, IConnectableLayer::GetInputSlot(), IConnectableLayer::GetOutputSlot(), armnn::IgnoreUnused(), ActivationDescriptor::m_Function, armnn::Optimize(), armnn::ReLu, IOutputSlot::SetTensorInfo(), and armnn::Success.

15 {
16 namespace
17 {
18 
19 using namespace armnn;
20 
21 INetworkPtr CreateSimpleNetwork()
22 {
24 
25  IConnectableLayer* input = net->AddInputLayer(0, "Input");
26 
27  ActivationDescriptor descriptor;
29  IConnectableLayer* activationLayer = net->AddActivationLayer(descriptor, "Activation:ReLu");
30 
31  IConnectableLayer* output = net->AddOutputLayer(0);
32 
33  input->GetOutputSlot(0).Connect(activationLayer->GetInputSlot(0));
34  activationLayer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
35 
36  input->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 1, 5 }, DataType::Float32));
37  activationLayer->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 1, 5 }, DataType::Float32));
38 
39  return net;
40 }
41 
42 TEST_CASE("RuntimeRegisterDebugCallback")
43 {
44  INetworkPtr net = CreateSimpleNetwork();
45 
47  IRuntimePtr runtime(IRuntime::Create(options));
48 
49  // Optimize the network with debug option
50  OptimizerOptions optimizerOptions(false, true);
51  std::vector<BackendId> backends = { "CpuRef" };
52  IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec(), optimizerOptions);
53 
54  NetworkId netId;
55  CHECK(runtime->LoadNetwork(netId, std::move(optNet)) == Status::Success);
56 
57  // Set up callback function
58  int callCount = 0;
59  std::vector<TensorShape> tensorShapes;
60  std::vector<unsigned int> slotIndexes;
61  auto mockCallback = [&](LayerGuid guid, unsigned int slotIndex, ITensorHandle* tensor)
62  {
63  IgnoreUnused(guid);
64  slotIndexes.push_back(slotIndex);
65  tensorShapes.push_back(tensor->GetShape());
66  callCount++;
67  };
68 
69  runtime->RegisterDebugCallback(netId, mockCallback);
70 
71  std::vector<float> inputData({-2, -1, 0, 1, 2});
72  std::vector<float> outputData(5);
73 
74  InputTensors inputTensors
75  {
76  {0, ConstTensor(runtime->GetInputTensorInfo(netId, 0), inputData.data())}
77  };
78  OutputTensors outputTensors
79  {
80  {0, Tensor(runtime->GetOutputTensorInfo(netId, 0), outputData.data())}
81  };
82 
83  runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
84 
85  // Check that the callback was called twice
86  CHECK(callCount == 2);
87 
88  // Check that tensor handles passed to callback have correct shapes
89  const std::vector<TensorShape> expectedShapes({TensorShape({1, 1, 1, 5}), TensorShape({1, 1, 1, 5})});
90  CHECK(tensorShapes == expectedShapes);
91 
92  // Check that slot indexes passed to callback are correct
93  const std::vector<unsigned int> expectedSlotIndexes({0, 0});
94  CHECK(slotIndexes == expectedSlotIndexes);
95 }
96 
97 } // anonymous namespace
98 
99 }
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:39
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:61
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:30
std::vector< std::pair< LayerBindingId, class ConstTensor > > InputTensors
Definition: Tensor.hpp:360
Copyright (c) 2021 ARM Limited and Contributors.
void IgnoreUnused(Ts &&...)
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
A tensor defined by a TensorInfo (shape and data type) and a mutable backing store.
Definition: Tensor.hpp:319
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:1613
int NetworkId
Definition: IRuntime.hpp:24
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:327
std::vector< std::pair< LayerBindingId, class Tensor > > OutputTensors
Definition: Tensor.hpp:361
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:173
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
profiling::ProfilingGuid LayerGuid
Define LayerGuid type.
Definition: Types.hpp:313
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:172
virtual int Connect(IInputSlot &destination)=0
static INetworkPtr Create(NetworkOptions networkOptions={})
Definition: Network.cpp:530
ActivationFunction m_Function
The activation function to use (Sigmoid, TanH, Linear, ReLu, BoundedReLu, SoftReLu, LeakyReLu, Abs, Sqrt, Square, Elu).
Definition: Descriptors.hpp:48