ArmNN
 20.08
RefTensorHandleTests.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
7 
8 #include <boost/test/unit_test.hpp>
9 
10 BOOST_AUTO_TEST_SUITE(RefTensorHandleTests)
11 using namespace armnn;
12 
13 BOOST_AUTO_TEST_CASE(AcquireAndRelease)
14 {
15  std::shared_ptr<RefMemoryManager> memoryManager = std::make_shared<RefMemoryManager>();
16 
17  TensorInfo info({ 1, 1, 1, 1 }, DataType::Float32);
18  RefTensorHandle handle(info, memoryManager);
19 
20  handle.Manage();
21  handle.Allocate();
22 
23  memoryManager->Acquire();
24  {
25  float* buffer = reinterpret_cast<float*>(handle.Map());
26 
27  BOOST_CHECK(buffer != nullptr); // Yields a valid pointer
28 
29  buffer[0] = 2.5f;
30 
31  BOOST_CHECK(buffer[0] == 2.5f); // Memory is writable and readable
32 
33  }
34  memoryManager->Release();
35 
36  memoryManager->Acquire();
37  {
38  float* buffer = reinterpret_cast<float*>(handle.Map());
39 
40  BOOST_CHECK(buffer != nullptr); // Yields a valid pointer
41 
42  buffer[0] = 3.5f;
43 
44  BOOST_CHECK(buffer[0] == 3.5f); // Memory is writable and readable
45  }
46  memoryManager->Release();
47 }
48 
49 BOOST_AUTO_TEST_CASE(RefTensorHandleFactoryMemoryManaged)
50 {
51  std::shared_ptr<RefMemoryManager> memoryManager = std::make_shared<RefMemoryManager>();
52  RefTensorHandleFactory handleFactory(memoryManager);
53  TensorInfo info({ 1, 1, 2, 1 }, DataType::Float32);
54 
55  // create TensorHandle with memory managed
56  auto handle = handleFactory.CreateTensorHandle(info, true);
57  handle->Manage();
58  handle->Allocate();
59 
60  memoryManager->Acquire();
61  {
62  float* buffer = reinterpret_cast<float*>(handle->Map());
63  BOOST_CHECK(buffer != nullptr); // Yields a valid pointer
64  buffer[0] = 1.5f;
65  buffer[1] = 2.5f;
66  BOOST_CHECK(buffer[0] == 1.5f); // Memory is writable and readable
67  BOOST_CHECK(buffer[1] == 2.5f); // Memory is writable and readable
68  }
69  memoryManager->Release();
70 
71  memoryManager->Acquire();
72  {
73  float* buffer = reinterpret_cast<float*>(handle->Map());
74  BOOST_CHECK(buffer != nullptr); // Yields a valid pointer
75  buffer[0] = 3.5f;
76  buffer[1] = 4.5f;
77  BOOST_CHECK(buffer[0] == 3.5f); // Memory is writable and readable
78  BOOST_CHECK(buffer[1] == 4.5f); // Memory is writable and readable
79  }
80  memoryManager->Release();
81 
82  float testPtr[2] = { 2.5f, 5.5f };
83  // Cannot import as import is disabled
84  BOOST_CHECK(!handle->Import(static_cast<void*>(testPtr), MemorySource::Malloc));
85 }
86 
87 BOOST_AUTO_TEST_CASE(RefTensorHandleFactoryImport)
88 {
89  std::shared_ptr<RefMemoryManager> memoryManager = std::make_shared<RefMemoryManager>();
90  RefTensorHandleFactory handleFactory(memoryManager);
91  TensorInfo info({ 1, 1, 2, 1 }, DataType::Float32);
92 
93  // create TensorHandle without memory managed
94  auto handle = handleFactory.CreateTensorHandle(info, false);
95  handle->Manage();
96  handle->Allocate();
97  memoryManager->Acquire();
98 
99  // No buffer allocated when import is enabled
100  BOOST_CHECK_THROW(handle->Map(), armnn::NullPointerException);
101 
102  float testPtr[2] = { 2.5f, 5.5f };
103  // Correctly import
104  BOOST_CHECK(handle->Import(static_cast<void*>(testPtr), MemorySource::Malloc));
105  float* buffer = reinterpret_cast<float*>(handle->Map());
106  BOOST_CHECK(buffer != nullptr); // Yields a valid pointer after import
107  BOOST_CHECK(buffer == testPtr); // buffer is pointing to testPtr
108  // Memory is writable and readable with correct value
109  BOOST_CHECK(buffer[0] == 2.5f);
110  BOOST_CHECK(buffer[1] == 5.5f);
111  buffer[0] = 3.5f;
112  buffer[1] = 10.0f;
113  BOOST_CHECK(buffer[0] == 3.5f);
114  BOOST_CHECK(buffer[1] == 10.0f);
115  memoryManager->Release();
116 }
117 
118 BOOST_AUTO_TEST_CASE(RefTensorHandleImport)
119 {
120  TensorInfo info({ 1, 1, 2, 1 }, DataType::Float32);
121  RefTensorHandle handle(info, static_cast<unsigned int>(MemorySource::Malloc));
122 
123  handle.Manage();
124  handle.Allocate();
125 
126  // No buffer allocated when import is enabled
127  BOOST_CHECK_THROW(handle.Map(), armnn::NullPointerException);
128 
129  float testPtr[2] = { 2.5f, 5.5f };
130  // Correctly import
131  BOOST_CHECK(handle.Import(static_cast<void*>(testPtr), MemorySource::Malloc));
132  float* buffer = reinterpret_cast<float*>(handle.Map());
133  BOOST_CHECK(buffer != nullptr); // Yields a valid pointer after import
134  BOOST_CHECK(buffer == testPtr); // buffer is pointing to testPtr
135  // Memory is writable and readable with correct value
136  BOOST_CHECK(buffer[0] == 2.5f);
137  BOOST_CHECK(buffer[1] == 5.5f);
138  buffer[0] = 3.5f;
139  buffer[1] = 10.0f;
140  BOOST_CHECK(buffer[0] == 3.5f);
141  BOOST_CHECK(buffer[1] == 10.0f);
142 }
143 
144 BOOST_AUTO_TEST_CASE(RefTensorHandleGetCapabilities)
145 {
146  std::shared_ptr<RefMemoryManager> memoryManager = std::make_shared<RefMemoryManager>();
147  RefTensorHandleFactory handleFactory(memoryManager);
148 
149  // Builds up the structure of the network.
151  IConnectableLayer* input = net->AddInputLayer(0);
152  IConnectableLayer* output = net->AddOutputLayer(0);
153  input->GetOutputSlot(0).Connect(output->GetInputSlot(0));
154 
155  std::vector<Capability> capabilities = handleFactory.GetCapabilities(input,
156  output,
158  BOOST_CHECK(capabilities.empty());
159 }
160 
161 #if !defined(__ANDROID__)
162 // Only run these tests on non Android platforms
163 BOOST_AUTO_TEST_CASE(CheckSourceType)
164 {
166  RefTensorHandle handle(info, static_cast<unsigned int>(MemorySource::Malloc));
167 
168  int* testPtr = new int(4);
169 
170  // Not supported
171  BOOST_CHECK(!handle.Import(static_cast<void *>(testPtr), MemorySource::DmaBuf));
172 
173  // Not supported
174  BOOST_CHECK(!handle.Import(static_cast<void *>(testPtr), MemorySource::DmaBufProtected));
175 
176  // Supported
177  BOOST_CHECK(handle.Import(static_cast<void *>(testPtr), MemorySource::Malloc));
178 
179  delete testPtr;
180 }
181 
182 BOOST_AUTO_TEST_CASE(ReusePointer)
183 {
185  RefTensorHandle handle(info, static_cast<unsigned int>(MemorySource::Malloc));
186 
187  int* testPtr = new int(4);
188 
189  handle.Import(static_cast<void *>(testPtr), MemorySource::Malloc);
190 
191  // Reusing previously Imported pointer
192  BOOST_CHECK(handle.Import(static_cast<void *>(testPtr), MemorySource::Malloc));
193 
194  delete testPtr;
195 }
196 
197 BOOST_AUTO_TEST_CASE(MisalignedPointer)
198 {
200  RefTensorHandle handle(info, static_cast<unsigned int>(MemorySource::Malloc));
201 
202  // Allocate a 2 int array
203  int* testPtr = new int[2];
204 
205  // Increment pointer by 1 byte
206  void* misalignedPtr = static_cast<void*>(reinterpret_cast<char*>(testPtr) + 1);
207 
208  BOOST_CHECK(!handle.Import(misalignedPtr, MemorySource::Malloc));
209 
210  delete[] testPtr;
211 }
212 
213 #endif
214 
BOOST_AUTO_TEST_SUITE(TensorflowLiteParser)
Interface for a layer that is connectable to other layers via InputSlots and OutputSlots.
Definition: INetwork.hpp:61
Copyright (c) 2020 ARM Limited.
BOOST_AUTO_TEST_CASE(CheckConvolution2dLayer)
BOOST_AUTO_TEST_SUITE_END()
virtual const IInputSlot & GetInputSlot(unsigned int index) const =0
Get a const input slot handle by slot index.
virtual const IOutputSlot & GetOutputSlot(unsigned int index) const =0
Get the const output slot handle by slot index.
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:101
virtual int Connect(IInputSlot &destination)=0
static INetworkPtr Create(NetworkOptions networkOptions={})
Definition: Network.cpp:50