ArmNN
 21.05
TensorHandleStrategyTest.cpp File Reference

Go to the source code of this file.

Functions

 BOOST_AUTO_TEST_CASE (RegisterFactories)
 
 BOOST_AUTO_TEST_CASE (TensorHandleSelectionStrategy)
 

Function Documentation

◆ BOOST_AUTO_TEST_CASE() [1/2]

BOOST_AUTO_TEST_CASE ( RegisterFactories  )

Definition at line 275 of file TensorHandleStrategyTest.cpp.

276 {
277  TestBackendA backendA;
278  TestBackendB backendB;
279 
280  BOOST_TEST(backendA.GetHandleFactoryPreferences()[0] == "TestHandleFactoryA1");
281  BOOST_TEST(backendA.GetHandleFactoryPreferences()[1] == "TestHandleFactoryA2");
282  BOOST_TEST(backendA.GetHandleFactoryPreferences()[2] == "TestHandleFactoryB1");
283  BOOST_TEST(backendA.GetHandleFactoryPreferences()[3] == "TestHandleFactoryD1");
284 
286  backendA.RegisterTensorHandleFactories(registry);
287  backendB.RegisterTensorHandleFactories(registry);
288 
289  BOOST_TEST((registry.GetFactory("Non-existing Backend") == nullptr));
290  BOOST_TEST((registry.GetFactory("TestHandleFactoryA1") != nullptr));
291  BOOST_TEST((registry.GetFactory("TestHandleFactoryA2") != nullptr));
292  BOOST_TEST((registry.GetFactory("TestHandleFactoryB1") != nullptr));
293 }

◆ BOOST_AUTO_TEST_CASE() [2/2]

BOOST_AUTO_TEST_CASE ( TensorHandleSelectionStrategy  )

Definition at line 295 of file TensorHandleStrategyTest.cpp.

References Graph::AddCompatibilityLayers(), Graph::AddLayer(), BOOST_AUTO_TEST_SUITE_END(), OutputSlot::Connect(), armnn::CopyToTarget, armnn::DirectCompatibility, armnn::ExportToTarget, Graph::ForEachLayer(), OutputSlot::GetEdgeStrategyForConnection(), Layer::GetInputSlot(), Layer::GetOutputSlot(), OutputSlot::GetTensorHandleFactoryId(), Layer::GetType(), armnn::MemCopy, armnn::MemImport, armnn::SelectTensorHandleStrategy(), Layer::SetBackendId(), and Graph::TopologicalSort().

296 {
297  auto backendA = std::make_unique<TestBackendA>();
298  auto backendB = std::make_unique<TestBackendB>();
299  auto backendC = std::make_unique<TestBackendC>();
300  auto backendD = std::make_unique<TestBackendD>();
301 
303  backendA->RegisterTensorHandleFactories(registry);
304  backendB->RegisterTensorHandleFactories(registry);
305  backendC->RegisterTensorHandleFactories(registry);
306  backendD->RegisterTensorHandleFactories(registry);
307 
308  BackendsMap backends;
309  backends["BackendA"] = std::move(backendA);
310  backends["BackendB"] = std::move(backendB);
311  backends["BackendC"] = std::move(backendC);
312  backends["BackendD"] = std::move(backendD);
313 
314  armnn::Graph graph;
315 
316  armnn::InputLayer* const inputLayer = graph.AddLayer<armnn::InputLayer>(0, "input");
317  inputLayer->SetBackendId("BackendA");
318 
320  armnn::SoftmaxLayer* const softmaxLayer1 = graph.AddLayer<armnn::SoftmaxLayer>(smDesc, "softmax1");
321  softmaxLayer1->SetBackendId("BackendA");
322 
323  armnn::SoftmaxLayer* const softmaxLayer2 = graph.AddLayer<armnn::SoftmaxLayer>(smDesc, "softmax2");
324  softmaxLayer2->SetBackendId("BackendB");
325 
326  armnn::SoftmaxLayer* const softmaxLayer3 = graph.AddLayer<armnn::SoftmaxLayer>(smDesc, "softmax3");
327  softmaxLayer3->SetBackendId("BackendC");
328 
329  armnn::SoftmaxLayer* const softmaxLayer4 = graph.AddLayer<armnn::SoftmaxLayer>(smDesc, "softmax4");
330  softmaxLayer4->SetBackendId("BackendD");
331 
332  armnn::OutputLayer* const outputLayer = graph.AddLayer<armnn::OutputLayer>(0, "output");
333  outputLayer->SetBackendId("BackendA");
334 
335  inputLayer->GetOutputSlot(0).Connect(softmaxLayer1->GetInputSlot(0));
336  softmaxLayer1->GetOutputSlot(0).Connect(softmaxLayer2->GetInputSlot(0));
337  softmaxLayer2->GetOutputSlot(0).Connect(softmaxLayer3->GetInputSlot(0));
338  softmaxLayer3->GetOutputSlot(0).Connect(softmaxLayer4->GetInputSlot(0));
339  softmaxLayer4->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
340 
341  graph.TopologicalSort();
342 
343  std::vector<std::string> errors;
344  auto result = SelectTensorHandleStrategy(graph, backends, registry, true, errors);
345 
346  BOOST_TEST(result.m_Error == false);
347  BOOST_TEST(result.m_Warning == false);
348 
349  OutputSlot& inputLayerOut = inputLayer->GetOutputSlot(0);
350  OutputSlot& softmaxLayer1Out = softmaxLayer1->GetOutputSlot(0);
351  OutputSlot& softmaxLayer2Out = softmaxLayer2->GetOutputSlot(0);
352  OutputSlot& softmaxLayer3Out = softmaxLayer3->GetOutputSlot(0);
353  OutputSlot& softmaxLayer4Out = softmaxLayer4->GetOutputSlot(0);
354 
355  // Check that the correct factory was selected
356  BOOST_TEST(inputLayerOut.GetTensorHandleFactoryId() == "TestHandleFactoryD1");
357  BOOST_TEST(softmaxLayer1Out.GetTensorHandleFactoryId() == "TestHandleFactoryB1");
358  BOOST_TEST(softmaxLayer2Out.GetTensorHandleFactoryId() == "TestHandleFactoryB1");
359  BOOST_TEST(softmaxLayer3Out.GetTensorHandleFactoryId() == "TestHandleFactoryC1");
360  BOOST_TEST(softmaxLayer4Out.GetTensorHandleFactoryId() == "TestHandleFactoryD1");
361 
362  // Check that the correct strategy was selected
363  BOOST_TEST((inputLayerOut.GetEdgeStrategyForConnection(0) == EdgeStrategy::DirectCompatibility));
364  BOOST_TEST((softmaxLayer1Out.GetEdgeStrategyForConnection(0) == EdgeStrategy::DirectCompatibility));
365  BOOST_TEST((softmaxLayer2Out.GetEdgeStrategyForConnection(0) == EdgeStrategy::CopyToTarget));
366  BOOST_TEST((softmaxLayer3Out.GetEdgeStrategyForConnection(0) == EdgeStrategy::ExportToTarget));
367  BOOST_TEST((softmaxLayer4Out.GetEdgeStrategyForConnection(0) == EdgeStrategy::DirectCompatibility));
368 
369  graph.AddCompatibilityLayers(backends, registry);
370 
371  // Test for copy layers
372  int copyCount= 0;
373  graph.ForEachLayer([&copyCount](Layer* layer)
374  {
375  if (layer->GetType() == LayerType::MemCopy)
376  {
377  copyCount++;
378  }
379  });
380  BOOST_TEST(copyCount == 1);
381 
382  // Test for import layers
383  int importCount= 0;
384  graph.ForEachLayer([&importCount](Layer *layer)
385  {
386  if (layer->GetType() == LayerType::MemImport)
387  {
388  importCount++;
389  }
390  });
391  BOOST_TEST(importCount == 1);
392 }
LayerT * AddLayer(Args &&... args)
Adds a new layer, of type LayerType, to the graph constructed with the arguments passed.
Definition: Graph.hpp:402
int Connect(InputSlot &destination)
Definition: Layer.cpp:83
EdgeStrategy GetEdgeStrategyForConnection(unsigned int connectionIdx) const
Definition: Layer.cpp:186
void SetBackendId(const BackendId &id)
Definition: Layer.hpp:270
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
Definition: Layer.hpp:316
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: OutputLayer.hpp:13
void ForEachLayer(Func func) const
Definition: Graph.hpp:39
This layer represents a softmax operation.
LayerType GetType() const override
Returns the armnn::LayerType of this layer.
Definition: Layer.hpp:265
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: InputLayer.hpp:13
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
Definition: Layer.hpp:318
OptimizationResult SelectTensorHandleStrategy(Graph &optGraph, BackendsMap &backends, TensorHandleFactoryRegistry &registry, bool importEnabled, Optional< std::vector< std::string > &> errMessages)
Definition: Network.cpp:1500
ITensorHandleFactory::FactoryId GetTensorHandleFactoryId() const
Definition: Layer.cpp:176
Graph & TopologicalSort()
Sorts layers in topological order and return this.
Definition: Graph.hpp:177
A SoftmaxDescriptor for the SoftmaxLayer.
void AddCompatibilityLayers(std::map< BackendId, std::unique_ptr< class IBackendInternal >> &backends, TensorHandleFactoryRegistry &registry)
Modifies the graph in-place, removing edges connecting layers using different compute devices...
Definition: Graph.cpp:300
std::map< BackendId, std::unique_ptr< class IBackendInternal > > BackendsMap
Definition: Network.hpp:317