ArmNN
 21.02
RefTensorHandleTests.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
7 
8 #include <boost/test/unit_test.hpp>
9 
10 BOOST_AUTO_TEST_SUITE(RefTensorHandleTests)
11 using namespace armnn;
12 
13 BOOST_AUTO_TEST_CASE(AcquireAndRelease)
14 {
15  std::shared_ptr<RefMemoryManager> memoryManager = std::make_shared<RefMemoryManager>();
16 
17  TensorInfo info({ 1, 1, 1, 1 }, DataType::Float32);
18  RefTensorHandle handle(info, memoryManager);
19 
20  handle.Manage();
21  handle.Allocate();
22 
23  memoryManager->Acquire();
24  {
25  float* buffer = reinterpret_cast<float*>(handle.Map());
26 
27  BOOST_CHECK(buffer != nullptr); // Yields a valid pointer
28 
29  buffer[0] = 2.5f;
30 
31  BOOST_CHECK(buffer[0] == 2.5f); // Memory is writable and readable
32 
33  }
34  memoryManager->Release();
35 
36  memoryManager->Acquire();
37  {
38  float* buffer = reinterpret_cast<float*>(handle.Map());
39 
40  BOOST_CHECK(buffer != nullptr); // Yields a valid pointer
41 
42  buffer[0] = 3.5f;
43 
44  BOOST_CHECK(buffer[0] == 3.5f); // Memory is writable and readable
45  }
46  memoryManager->Release();
47 }
48 
49 BOOST_AUTO_TEST_CASE(RefTensorHandleFactoryMemoryManaged)
50 {
51  std::shared_ptr<RefMemoryManager> memoryManager = std::make_shared<RefMemoryManager>();
52  RefTensorHandleFactory handleFactory(memoryManager);
53  TensorInfo info({ 1, 1, 2, 1 }, DataType::Float32);
54 
55  // create TensorHandle with memory managed
56  auto handle = handleFactory.CreateTensorHandle(info, true);
57  handle->Manage();
58  handle->Allocate();
59 
60  memoryManager->Acquire();
61  {
62  float* buffer = reinterpret_cast<float*>(handle->Map());
63  BOOST_CHECK(buffer != nullptr); // Yields a valid pointer
64  buffer[0] = 1.5f;
65  buffer[1] = 2.5f;
66  BOOST_CHECK(buffer[0] == 1.5f); // Memory is writable and readable
67  BOOST_CHECK(buffer[1] == 2.5f); // Memory is writable and readable
68  }
69  memoryManager->Release();
70 
71  memoryManager->Acquire();
72  {
73  float* buffer = reinterpret_cast<float*>(handle->Map());
74  BOOST_CHECK(buffer != nullptr); // Yields a valid pointer
75  buffer[0] = 3.5f;
76  buffer[1] = 4.5f;
77  BOOST_CHECK(buffer[0] == 3.5f); // Memory is writable and readable
78  BOOST_CHECK(buffer[1] == 4.5f); // Memory is writable and readable
79  }
80  memoryManager->Release();
81 
82  float testPtr[2] = { 2.5f, 5.5f };
83  // Cannot import as import is disabled
84  BOOST_CHECK(!handle->Import(static_cast<void*>(testPtr), MemorySource::Malloc));
85 }
86 
87 BOOST_AUTO_TEST_CASE(RefTensorHandleFactoryImport)
88 {
89  std::shared_ptr<RefMemoryManager> memoryManager = std::make_shared<RefMemoryManager>();
90  RefTensorHandleFactory handleFactory(memoryManager);
91  TensorInfo info({ 1, 1, 2, 1 }, DataType::Float32);
92 
93  // create TensorHandle without memory managed
94  auto handle = handleFactory.CreateTensorHandle(info, false);
95  handle->Manage();
96  handle->Allocate();
97  memoryManager->Acquire();
98 
99  // No buffer allocated when import is enabled
100  BOOST_CHECK_THROW(handle->Map(), armnn::NullPointerException);
101 
102  float testPtr[2] = { 2.5f, 5.5f };
103  // Correctly import
104  BOOST_CHECK(handle->Import(static_cast<void*>(testPtr), MemorySource::Malloc));
105  float* buffer = reinterpret_cast<float*>(handle->Map());
106  BOOST_CHECK(buffer != nullptr); // Yields a valid pointer after import
107  BOOST_CHECK(buffer == testPtr); // buffer is pointing to testPtr
108  // Memory is writable and readable with correct value
109  BOOST_CHECK(buffer[0] == 2.5f);
110  BOOST_CHECK(buffer[1] == 5.5f);
111  buffer[0] = 3.5f;
112  buffer[1] = 10.0f;
113  BOOST_CHECK(buffer[0] == 3.5f);
114  BOOST_CHECK(buffer[1] == 10.0f);
115  memoryManager->Release();
116 }
117 
118 BOOST_AUTO_TEST_CASE(RefTensorHandleImport)
119 {
120  TensorInfo info({ 1, 1, 2, 1 }, DataType::Float32);
121  RefTensorHandle handle(info, static_cast<unsigned int>(MemorySource::Malloc));
122 
123  handle.Manage();
124  handle.Allocate();
125 
126  // No buffer allocated when import is enabled
127  BOOST_CHECK_THROW(handle.Map(), armnn::NullPointerException);
128 
129  float testPtr[2] = { 2.5f, 5.5f };
130  // Correctly import
131  BOOST_CHECK(handle.Import(static_cast<void*>(testPtr), MemorySource::Malloc));
132  float* buffer = reinterpret_cast<float*>(handle.Map());
133  BOOST_CHECK(buffer != nullptr); // Yields a valid pointer after import
134  BOOST_CHECK(buffer == testPtr); // buffer is pointing to testPtr
135  // Memory is writable and readable with correct value
136  BOOST_CHECK(buffer[0] == 2.5f);
137  BOOST_CHECK(buffer[1] == 5.5f);
138  buffer[0] = 3.5f;
139  buffer[1] = 10.0f;
140  BOOST_CHECK(buffer[0] == 3.5f);
141  BOOST_CHECK(buffer[1] == 10.0f);
142 }
143 
144 BOOST_AUTO_TEST_CASE(RefTensorHandleGetCapabilities)
145 {
146  std::shared_ptr<RefMemoryManager> memoryManager = std::make_shared<RefMemoryManager>();
147  RefTensorHandleFactory handleFactory(memoryManager);
148 
149  // Builds up the structure of the network.
151  IConnectableLayer* input = net->AddInputLayer(0);
152  IConnectableLayer* output = net->AddOutputLayer(0);
153  input->GetOutputSlot(0).Connect(output->GetInputSlot(0));
154 
155  std::vector<Capability> capabilities = handleFactory.GetCapabilities(input,
156  output,
158  BOOST_CHECK(capabilities.empty());
159 }
160 
161 BOOST_AUTO_TEST_CASE(RefTensorHandleSupportsInPlaceComputation)
162 {
163  std::shared_ptr<RefMemoryManager> memoryManager = std::make_shared<RefMemoryManager>();
164  RefTensorHandleFactory handleFactory(memoryManager);
165 
166  // RefTensorHandleFactory does not support InPlaceComputation
167  ARMNN_ASSERT(!(handleFactory.SupportsInPlaceComputation()));
168 }
169 
170 #if !defined(__ANDROID__)
171 // Only run these tests on non Android platforms
172 BOOST_AUTO_TEST_CASE(CheckSourceType)
173 {
175  RefTensorHandle handle(info, static_cast<unsigned int>(MemorySource::Malloc));
176 
177  int* testPtr = new int(4);
178 
179  // Not supported
180  BOOST_CHECK(!handle.Import(static_cast<void *>(testPtr), MemorySource::DmaBuf));
181 
182  // Not supported
183  BOOST_CHECK(!handle.Import(static_cast<void *>(testPtr), MemorySource::DmaBufProtected));
184 
185  // Supported
186  BOOST_CHECK(handle.Import(static_cast<void *>(testPtr), MemorySource::Malloc));
187 
188  delete testPtr;
189 }
190 
191 BOOST_AUTO_TEST_CASE(ReusePointer)
192 {
194  RefTensorHandle handle(info, static_cast<unsigned int>(MemorySource::Malloc));
195 
196  int* testPtr = new int(4);
197 
198  handle.Import(static_cast<void *>(testPtr), MemorySource::Malloc);
199 
200  // Reusing previously Imported pointer
201  BOOST_CHECK(handle.Import(static_cast<void *>(testPtr), MemorySource::Malloc));
202 
203  delete testPtr;
204 }
205 
206 BOOST_AUTO_TEST_CASE(MisalignedPointer)
207 {
209  RefTensorHandle handle(info, static_cast<unsigned int>(MemorySource::Malloc));
210 
211  // Allocate a 2 int array
212  int* testPtr = new int[2];
213 
214  // Increment pointer by 1 byte
215  void* misalignedPtr = static_cast<void*>(reinterpret_cast<char*>(testPtr) + 1);
216 
217  BOOST_CHECK(!handle.Import(misalignedPtr, MemorySource::Malloc));
218 
219  delete[] testPtr;
220 }
221 
222 #endif
223 
BOOST_AUTO_TEST_SUITE(TensorflowLiteParser)
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:62
Copyright (c) 2021 ARM Limited and Contributors.
#define ARMNN_ASSERT(COND)
Definition: Assert.hpp:14
BOOST_AUTO_TEST_CASE(CheckConvolution2dLayer)
BOOST_AUTO_TEST_SUITE_END()
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:173
virtual int Connect(IInputSlot &destination)=0
static INetworkPtr Create(NetworkOptions networkOptions={})
Definition: Network.cpp:510