ArmNN
 21.02
TensorHandleStrategyTest.cpp File Reference

Go to the source code of this file.

Functions

 BOOST_AUTO_TEST_CASE (RegisterFactories)
 
 BOOST_AUTO_TEST_CASE (TensorHandleSelectionStrategy)
 

Function Documentation

◆ BOOST_AUTO_TEST_CASE() [1/2]

BOOST_AUTO_TEST_CASE ( RegisterFactories  )

Definition at line 274 of file TensorHandleStrategyTest.cpp.

275 {
276  TestBackendA backendA;
277  TestBackendB backendB;
278 
279  BOOST_TEST(backendA.GetHandleFactoryPreferences()[0] == "TestHandleFactoryA1");
280  BOOST_TEST(backendA.GetHandleFactoryPreferences()[1] == "TestHandleFactoryA2");
281  BOOST_TEST(backendA.GetHandleFactoryPreferences()[2] == "TestHandleFactoryB1");
282 
284  backendA.RegisterTensorHandleFactories(registry);
285  backendB.RegisterTensorHandleFactories(registry);
286 
287  BOOST_TEST((registry.GetFactory("Non-existing Backend") == nullptr));
288  BOOST_TEST((registry.GetFactory("TestHandleFactoryA1") != nullptr));
289  BOOST_TEST((registry.GetFactory("TestHandleFactoryA2") != nullptr));
290  BOOST_TEST((registry.GetFactory("TestHandleFactoryB1") != nullptr));
291 }

◆ BOOST_AUTO_TEST_CASE() [2/2]

BOOST_AUTO_TEST_CASE ( TensorHandleSelectionStrategy  )

Definition at line 293 of file TensorHandleStrategyTest.cpp.

References Graph::AddCompatibilityLayers(), Graph::AddLayer(), BOOST_AUTO_TEST_SUITE_END(), OutputSlot::Connect(), armnn::CopyToTarget, armnn::DirectCompatibility, armnn::ExportToTarget, Graph::ForEachLayer(), OutputSlot::GetEdgeStrategyForConnection(), Layer::GetInputSlot(), Layer::GetOutputSlot(), OutputSlot::GetTensorHandleFactoryId(), Layer::GetType(), armnn::MemCopy, armnn::MemImport, armnn::SelectTensorHandleStrategy(), Layer::SetBackendId(), and Graph::TopologicalSort().

294 {
295  auto backendA = std::make_unique<TestBackendA>();
296  auto backendB = std::make_unique<TestBackendB>();
297  auto backendC = std::make_unique<TestBackendC>();
298  auto backendD = std::make_unique<TestBackendD>();
299 
301  backendA->RegisterTensorHandleFactories(registry);
302  backendB->RegisterTensorHandleFactories(registry);
303  backendC->RegisterTensorHandleFactories(registry);
304  backendD->RegisterTensorHandleFactories(registry);
305 
306  BackendsMap backends;
307  backends["BackendA"] = std::move(backendA);
308  backends["BackendB"] = std::move(backendB);
309  backends["BackendC"] = std::move(backendC);
310  backends["BackendD"] = std::move(backendD);
311 
312  armnn::Graph graph;
313 
314  armnn::InputLayer* const inputLayer = graph.AddLayer<armnn::InputLayer>(0, "input");
315  inputLayer->SetBackendId("BackendA");
316 
318  armnn::SoftmaxLayer* const softmaxLayer1 = graph.AddLayer<armnn::SoftmaxLayer>(smDesc, "softmax1");
319  softmaxLayer1->SetBackendId("BackendA");
320 
321  armnn::SoftmaxLayer* const softmaxLayer2 = graph.AddLayer<armnn::SoftmaxLayer>(smDesc, "softmax2");
322  softmaxLayer2->SetBackendId("BackendB");
323 
324  armnn::SoftmaxLayer* const softmaxLayer3 = graph.AddLayer<armnn::SoftmaxLayer>(smDesc, "softmax3");
325  softmaxLayer3->SetBackendId("BackendC");
326 
327  armnn::SoftmaxLayer* const softmaxLayer4 = graph.AddLayer<armnn::SoftmaxLayer>(smDesc, "softmax4");
328  softmaxLayer4->SetBackendId("BackendD");
329 
330  armnn::OutputLayer* const outputLayer = graph.AddLayer<armnn::OutputLayer>(0, "output");
331  outputLayer->SetBackendId("BackendA");
332 
333  inputLayer->GetOutputSlot(0).Connect(softmaxLayer1->GetInputSlot(0));
334  softmaxLayer1->GetOutputSlot(0).Connect(softmaxLayer2->GetInputSlot(0));
335  softmaxLayer2->GetOutputSlot(0).Connect(softmaxLayer3->GetInputSlot(0));
336  softmaxLayer3->GetOutputSlot(0).Connect(softmaxLayer4->GetInputSlot(0));
337  softmaxLayer4->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
338 
339  graph.TopologicalSort();
340 
341  std::vector<std::string> errors;
342  auto result = SelectTensorHandleStrategy(graph, backends, registry, true, errors);
343 
344  BOOST_TEST(result.m_Error == false);
345  BOOST_TEST(result.m_Warning == false);
346 
347  OutputSlot& inputLayerOut = inputLayer->GetOutputSlot(0);
348  OutputSlot& softmaxLayer1Out = softmaxLayer1->GetOutputSlot(0);
349  OutputSlot& softmaxLayer2Out = softmaxLayer2->GetOutputSlot(0);
350  OutputSlot& softmaxLayer3Out = softmaxLayer3->GetOutputSlot(0);
351  OutputSlot& softmaxLayer4Out = softmaxLayer4->GetOutputSlot(0);
352 
353  // Check that the correct factory was selected
354  BOOST_TEST(inputLayerOut.GetTensorHandleFactoryId() == "TestHandleFactoryA1");
355  BOOST_TEST(softmaxLayer1Out.GetTensorHandleFactoryId() == "TestHandleFactoryB1");
356  BOOST_TEST(softmaxLayer2Out.GetTensorHandleFactoryId() == "TestHandleFactoryB1");
357  BOOST_TEST(softmaxLayer3Out.GetTensorHandleFactoryId() == "TestHandleFactoryC1");
358  BOOST_TEST(softmaxLayer4Out.GetTensorHandleFactoryId() == "TestHandleFactoryD1");
359 
360  // Check that the correct strategy was selected
361  BOOST_TEST((inputLayerOut.GetEdgeStrategyForConnection(0) == EdgeStrategy::DirectCompatibility));
362  BOOST_TEST((softmaxLayer1Out.GetEdgeStrategyForConnection(0) == EdgeStrategy::DirectCompatibility));
363  BOOST_TEST((softmaxLayer2Out.GetEdgeStrategyForConnection(0) == EdgeStrategy::CopyToTarget));
364  BOOST_TEST((softmaxLayer3Out.GetEdgeStrategyForConnection(0) == EdgeStrategy::ExportToTarget));
365  BOOST_TEST((softmaxLayer4Out.GetEdgeStrategyForConnection(0) == EdgeStrategy::DirectCompatibility));
366 
367  graph.AddCompatibilityLayers(backends, registry);
368 
369  // Test for copy layers
370  int copyCount= 0;
371  graph.ForEachLayer([&copyCount](Layer* layer)
372  {
373  if (layer->GetType() == LayerType::MemCopy)
374  {
375  copyCount++;
376  }
377  });
378  BOOST_TEST(copyCount == 1);
379 
380  // Test for import layers
381  int importCount= 0;
382  graph.ForEachLayer([&importCount](Layer *layer)
383  {
384  if (layer->GetType() == LayerType::MemImport)
385  {
386  importCount++;
387  }
388  });
389  BOOST_TEST(importCount == 1);
390 }
LayerT * AddLayer(Args &&... args)
Adds a new layer, of type LayerType, to the graph constructed with the arguments passed.
Definition: Graph.hpp:402
int Connect(InputSlot &destination)
Definition: Layer.cpp:83
EdgeStrategy GetEdgeStrategyForConnection(unsigned int connectionIdx) const
Definition: Layer.cpp:186
void SetBackendId(const BackendId &id)
Definition: Layer.hpp:270
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
Definition: Layer.hpp:316
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: OutputLayer.hpp:13
void ForEachLayer(Func func) const
Definition: Graph.hpp:39
This layer represents a softmax operation.
LayerType GetType() const override
Returns the armnn::LayerType of this layer.
Definition: Layer.hpp:265
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: InputLayer.hpp:13
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
Definition: Layer.hpp:318
OptimizationResult SelectTensorHandleStrategy(Graph &optGraph, BackendsMap &backends, TensorHandleFactoryRegistry &registry, bool importEnabled, Optional< std::vector< std::string > &> errMessages)
Definition: Network.cpp:1434
ITensorHandleFactory::FactoryId GetTensorHandleFactoryId() const
Definition: Layer.cpp:176
Graph & TopologicalSort()
Sorts layers in topological order and return this.
Definition: Graph.hpp:177
A SoftmaxDescriptor for the SoftmaxLayer.
void AddCompatibilityLayers(std::map< BackendId, std::unique_ptr< class IBackendInternal >> &backends, TensorHandleFactoryRegistry &registry)
Modifies the graph in-place, removing edges connecting layers using different compute devices...
Definition: Graph.cpp:300
std::map< BackendId, std::unique_ptr< class IBackendInternal > > BackendsMap
Definition: Network.hpp:310