ArmNN
 21.02
ClRuntimeTests.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #include <test/RuntimeTests.hpp>
7 
8 #include <LeakChecking.hpp>
9 
13 
14 #include <boost/test/unit_test.hpp>
15 
16 #ifdef WITH_VALGRIND
17 #include <valgrind/memcheck.h>
18 #endif
19 
20 BOOST_AUTO_TEST_SUITE(ClRuntime)
21 
22 BOOST_AUTO_TEST_CASE(RuntimeValidateGpuDeviceSupportLayerNoFallback)
23 {
24  // build up the structure of the network
26 
27  armnn::IConnectableLayer* input = net->AddInputLayer(0);
28  armnn::IConnectableLayer* output = net->AddOutputLayer(0);
29 
30  input->GetOutputSlot(0).Connect(output->GetInputSlot(0));
32 
35 
36  std::vector<armnn::BackendId> backends = { armnn::Compute::GpuAcc };
37  armnn::IOptimizedNetworkPtr optNet = armnn::Optimize(*net, backends, runtime->GetDeviceSpec());
38  BOOST_CHECK(optNet);
39 
40  // Load it into the runtime. It should success.
41  armnn::NetworkId netId;
42  BOOST_TEST(runtime->LoadNetwork(netId, std::move(optNet)) == armnn::Status::Success);
43 }
44 
45 #ifdef ARMNN_LEAK_CHECKING_ENABLED
46 BOOST_AUTO_TEST_CASE(RuntimeMemoryLeaksGpuAcc)
47 {
48  BOOST_TEST(ARMNN_LEAK_CHECKER_IS_ACTIVE());
50  armnn::RuntimeImpl runtime(options);
52 
53  std::vector<armnn::BackendId> backends = {armnn::Compute::GpuAcc};
54  {
55  // Do a warmup of this so we make sure that all one-time
56  // initialization happens before we do the leak checking.
57  CreateAndDropDummyNetwork(backends, runtime);
58  }
59 
60  {
61  ARMNN_SCOPED_LEAK_CHECKER("LoadAndUnloadNetworkGpuAcc");
62  BOOST_TEST(ARMNN_NO_LEAKS_IN_SCOPE());
63  // In the second run we check for all remaining memory
64  // in use after the network was unloaded. If there is any
65  // then it will be treated as a memory leak.
66  CreateAndDropDummyNetwork(backends, runtime);
67  BOOST_TEST(ARMNN_NO_LEAKS_IN_SCOPE());
68  BOOST_TEST(ARMNN_BYTES_LEAKED_IN_SCOPE() == 0);
69  BOOST_TEST(ARMNN_OBJECTS_LEAKED_IN_SCOPE() == 0);
70  }
71 }
72 #endif
73 
74 // Note: this part of the code is due to be removed when we fully trust the gperftools based results.
75 #if defined(WITH_VALGRIND)
76 BOOST_AUTO_TEST_CASE(RuntimeMemoryUsage)
77 {
78  // From documentation:
79 
80  // This means that no pointer to the block can be found. The block is classified as "lost",
81  // because the programmer could not possibly have freed it at program exit, since no pointer to it exists.
82  unsigned long leakedBefore = 0;
83  unsigned long leakedAfter = 0;
84 
85  // A start-pointer or chain of start-pointers to the block is found. Since the block is still pointed at,
86  // the programmer could, at least in principle, have freed it before program exit.
87  // We want to test this in case memory is not freed as early as it could have been.
88  unsigned long reachableBefore = 0;
89  unsigned long reachableAfter = 0;
90 
91  // Needed as out params but we don't test them.
92  unsigned long dubious = 0;
93  unsigned long suppressed = 0;
94 
95  // Ensure that runtime is large enough before checking for memory leaks.
96  // Otherwise, when loading the network, it will automatically reserve memory that won't be released
97  // until destruction.
98  armnn::NetworkId networkIdentifier;
100  armnn::Runtime runtime(options);
102 
103  // Checks for leaks before we load the network and record them so that we can see the delta after unloading.
104  VALGRIND_DO_QUICK_LEAK_CHECK;
105  VALGRIND_COUNT_LEAKS(leakedBefore, dubious, reachableBefore, suppressed);
106 
107  // build a mock-network and load it into the runtime
108  std::vector<armnn::BackendId> backends = {armnn::Compute::GpuAcc};
109  {
112 
114 
115  armnn::IConnectableLayer* input = mockNetwork->AddInputLayer(0, "input");
116  armnn::IConnectableLayer* layer = mockNetwork->AddActivationLayer(armnn::ActivationDescriptor(), "test");
117  armnn::IConnectableLayer* output = mockNetwork->AddOutputLayer(0, "output");
118 
119  input->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
120  layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
121 
122  // Sets the tensors in the network.
123  input->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
124  layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
125 
126  // optimize the network
127  armnn::IOptimizedNetworkPtr optNet = Optimize(*mockNetwork, backends, runtime.GetDeviceSpec());
128 
129  runtime.LoadNetwork(networkIdentifier, std::move(optNet));
130  }
131 
132  runtime.UnloadNetwork(networkIdentifier);
133 
134  VALGRIND_DO_ADDED_LEAK_CHECK;
135  VALGRIND_COUNT_LEAKS(leakedAfter, dubious, reachableAfter, suppressed);
136 
137  // If we're not running under Valgrind, these vars will have been initialised to 0, so this will always pass.
138  BOOST_TEST(leakedBefore == leakedAfter);
139 
140  // Add resonable threshold after and before running valgrind with the ACL clear cache function.
141  // TODO Threshold set to 80k until the root cause of the memory leakage is found and fixed. Revert threshold
142  // value to 1024 when fixed.
143  BOOST_TEST(static_cast<long>(reachableAfter) - static_cast<long>(reachableBefore) < 81920);
144 
145  // These are needed because VALGRIND_COUNT_LEAKS is a macro that assigns to the parameters
146  // so they are assigned to, but still considered unused, causing a warning.
147  IgnoreUnused(dubious);
148  IgnoreUnused(suppressed);
149 }
150 #endif
151 
152 BOOST_AUTO_TEST_CASE(ProfilingPostOptimisationStructureGpuAcc)
153 {
155 }
156 
#define ARMNN_SCOPED_LEAK_CHECKER(TAG)
BOOST_AUTO_TEST_SUITE(TensorflowLiteParser)
static IRuntimePtr Create(const CreationOptions &options)
Definition: Runtime.cpp:37
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:62
void VerifyPostOptimisationStructureTestImpl(armnn::BackendId backendId)
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:26
int NetworkId
Definition: IRuntime.hpp:20
void IgnoreUnused(Ts &&...)
virtual void SetTensorInfo(const TensorInfo &tensorInfo)=0
#define ARMNN_LEAK_CHECKER_IS_ACTIVE()
#define ARMNN_OBJECTS_LEAKED_IN_SCOPE()
void RuntimeLoadedNetworksReserve(armnn::RuntimeImpl *runtime)
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:1502
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:174
GPU Execution: OpenCL: ArmCompute.
An ActivationDescriptor for the ActivationLayer.
Definition: Descriptors.hpp:25
BOOST_AUTO_TEST_SUITE_END()
BOOST_AUTO_TEST_CASE(RuntimeValidateGpuDeviceSupportLayerNoFallback)
#define ARMNN_NO_LEAKS_IN_SCOPE()
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:173
virtual int Connect(IInputSlot &destination)=0
static INetworkPtr Create(NetworkOptions networkOptions={})
Definition: Network.cpp:510
#define ARMNN_BYTES_LEAKED_IN_SCOPE()