ArmNN
 22.02
RedirectMembersToConstantInputsTests.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2021 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 
6 #include <TestUtils.hpp>
7 
8 #include <Optimizer.hpp>
9 
10 #include <doctest/doctest.h>
11 
12 TEST_SUITE("Optimizer")
13 {
14 using namespace armnn::optimizations;
15 
16 TEST_CASE("RedirectMembersToConstantInputsFullyConnectedTest")
17 {
18  armnn::Graph graph;
19 
20  const armnn::TensorInfo inputInfo ({ 1, 2, 2, 3 }, armnn::DataType::Float32);
21  const armnn::TensorInfo outputInfo ({ 1, 2, 2, 3 }, armnn::DataType::Float32);
22  const armnn::TensorInfo weightsInfo({ 4 }, armnn::DataType::Float32, 0.0f, 0, true);
23  const armnn::TensorInfo biasesInfo ({ 2 }, armnn::DataType::Float32, 0.0f, 0, true);
24 
25  // Check if isConstant is enabled for weights and biases tensor info.
26  CHECK(weightsInfo.IsConstant());
27  CHECK(biasesInfo.IsConstant());
28 
30  desc.m_BiasEnabled = true;
31  desc.m_ConstantWeights = false;
32 
33  // Create the simple test network with Weights and Biases as inputs to a FullyConnected layer.
34  auto input = graph.AddLayer<armnn::InputLayer>(0, "Input");
35  auto weights = graph.AddLayer<armnn::ConstantLayer>("Weights");
36  auto biases = graph.AddLayer<armnn::ConstantLayer>("Biases");
37  auto fcLayer = graph.AddLayer<armnn::FullyConnectedLayer>(desc, "FullyConnected");
38  auto output = graph.AddLayer<armnn::OutputLayer>(1, "Output");
39 
40  float expectedWeightsData[] = { 1.0f, 1.0f, 1.0f, 1.0f };
41  float expectedBiasesData[] = { 2.0f, 2.0f };
42 
43  // Set the m_LayerOutput for the optimizer to point to.
44  armnn::ConstTensor weightsTensor(weightsInfo, &expectedWeightsData);
45  armnn::ConstTensor biasesTensor(biasesInfo, &expectedBiasesData);
46  weights->m_LayerOutput = std::make_unique<armnn::ScopedTensorHandle>(weightsTensor);
47  biases->m_LayerOutput = std::make_unique<armnn::ScopedTensorHandle>(biasesTensor);
48 
49  input->GetOutputSlot().SetTensorInfo(inputInfo);
50  weights->GetOutputSlot().SetTensorInfo(weightsInfo);
51  biases->GetOutputSlot().SetTensorInfo(biasesInfo);
52  fcLayer->GetOutputSlot().SetTensorInfo(outputInfo);
53 
54  // Connect up the layers
55  input->GetOutputSlot(0).Connect(fcLayer->GetInputSlot(0));
56  weights->GetOutputSlot(0).Connect(fcLayer->GetInputSlot(1));
57  biases->GetOutputSlot(0).Connect(fcLayer->GetInputSlot(2));
58  fcLayer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
59 
60  // Member variables should be null before optimization.
61  CHECK(fcLayer->m_Weight == nullptr);
62  CHECK(fcLayer->m_Bias == nullptr);
63 
64  // Run the optimizer
66 
67  // Check if member variables are not null and shape is set correctly.
68  CHECK(fcLayer->m_Weight != nullptr);
69  CHECK(fcLayer->m_Bias != nullptr);
70  CHECK(fcLayer->m_Weight->GetTensorInfo().GetShape() == weightsInfo.GetShape());
71  CHECK(fcLayer->m_Bias->GetTensorInfo().GetShape() == biasesInfo.GetShape());
72 
73  // Check whether data matches expected float data
74  const float* weightsData = fcLayer->m_Weight->GetConstTensor<float>();
75  CHECK(weightsData[0] == expectedWeightsData[0]);
76  CHECK(weightsData[1] == expectedWeightsData[1]);
77  CHECK(weightsData[2] == expectedWeightsData[2]);
78  CHECK(weightsData[3] == expectedWeightsData[3]);
79 
80  const float* biasesData = fcLayer->m_Bias->GetConstTensor<float>();
81  CHECK(biasesData[0] == expectedBiasesData[0]);
82  CHECK(biasesData[1] == expectedBiasesData[1]);
83 }
84 
85 }
A layer that the constant data can be bound to.
Optimizer::Optimizations MakeOptimizations(Args &&... args)
Definition: Optimizer.hpp:43
std::shared_ptr< ConstTensorHandle > m_LayerOutput
LayerT * AddLayer(Args &&... args)
Adds a new layer, of type LayerType, to the graph constructed with the arguments passed.
Definition: Graph.hpp:420
int Connect(InputSlot &destination)
Definition: Layer.cpp:86
static void Pass(Graph &graph, const Optimizations &optimizations)
Definition: Optimizer.cpp:16
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
Definition: Layer.hpp:321
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: OutputLayer.hpp:13
This layer represents a fully connected operation.
A FullyConnectedDescriptor for the FullyConnectedLayer.
bool m_BiasEnabled
Enable/disable bias.
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:327
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: InputLayer.hpp:13
void SetTensorInfo(const TensorInfo &tensorInfo) override
Definition: Layer.cpp:61
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
Definition: Layer.hpp:323
OptimizeForType< FullyConnectedLayer, RedirectMembersToConstantInputsImpl > RedirectMembersToConstantInputs