ArmNN
 21.08
ConvertConstantsFloatToHalfTests.cpp File Reference
#include "../TestUtils.hpp"
#include <Optimizer.hpp>
#include <Half.hpp>
#include <doctest/doctest.h>

Go to the source code of this file.

Functions

 TEST_SUITE ("Optimizer")
 

Function Documentation

◆ TEST_SUITE()

TEST_SUITE ( "Optimizer"  )

Definition at line 15 of file ConvertConstantsFloatToHalfTests.cpp.

References Graph::AddLayer(), OutputSlot::Connect(), armnn::Float16, armnn::Float32, Layer::GetOutputSlot(), armnn::info, FullyConnectedLayer::m_Weight, armnn::MakeOptimizations(), Optimizer::Pass(), and OutputSlot::SetTensorInfo().

16 {
17 using namespace armnn::optimizations;
18 
19 TEST_CASE("ConvertConstantsFloatToHalfTest")
20 {
21  armnn::Graph graph;
22 
23  const armnn::TensorInfo info({ 1, 1, 1, 2 }, armnn::DataType::Float16);
24 
25  // Create const tensor from fp32 data
26  unsigned int dims[] = { 4, 1, 1, 1 };
27  std::vector<float> floatWeights{ 1.0f, 2.0f, 3.0f, 4.0f };
28  armnn::ConstTensor weights(armnn::TensorInfo(4, dims, armnn::DataType::Float32), floatWeights);
29 
30  // Create simple test network
31  auto input = graph.AddLayer<armnn::InputLayer>(0, "input");
32  input->GetOutputSlot().SetTensorInfo(info);
33 
35  fc->m_Weight = std::make_unique<armnn::ScopedTensorHandle>(weights);
36  fc->GetOutputSlot().SetTensorInfo(info);
37 
38  auto output = graph.AddLayer<armnn::OutputLayer>(1, "output");
39 
40  // Connect up the layers
41  input->GetOutputSlot().Connect(fc->GetInputSlot(0));
42  fc->GetOutputSlot().Connect(output->GetInputSlot(0));
43 
44  // Check tensor data type before conversion
45  CHECK(fc->m_Weight->GetTensorInfo().GetDataType() == armnn::DataType::Float32);
46 
47  // Run the optimizer
49 
50  // Check tensor data type after conversion
51  CHECK(fc->m_Weight->GetTensorInfo().GetDataType() == armnn::DataType::Float16);
52 
53  // Check whether data matches expected fp16 data
54  const Half* data = fc->m_Weight->GetConstTensor<Half>();
55  CHECK(data[0] == Half(1.0f));
56  CHECK(data[1] == Half(2.0f));
57  CHECK(data[2] == Half(3.0f));
58  CHECK(data[3] == Half(4.0f));
59 }
60 
61 }
Optimizer::Optimizations MakeOptimizations(Args &&... args)
Definition: Optimizer.hpp:43
LayerT * AddLayer(Args &&... args)
Adds a new layer, of type LayerType, to the graph constructed with the arguments passed.
Definition: Graph.hpp:402
int Connect(InputSlot &destination)
Definition: Layer.cpp:83
static void Pass(Graph &graph, const Optimizations &optimizations)
Definition: Optimizer.cpp:16
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: OutputLayer.hpp:13
ConvertConstants< Float32ToFloat16, IsFloat16Layer > ConvertConstantsFloatToHalf
This layer represents a fully connected operation.
std::shared_ptr< ConstTensorHandle > m_Weight
A unique pointer to store Weight values.
A FullyConnectedDescriptor for the FullyConnectedLayer.
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:327
A layer user-provided data can be bound to (e.g. inputs, outputs).
Definition: InputLayer.hpp:13
void SetTensorInfo(const TensorInfo &tensorInfo) override
Definition: Layer.cpp:58
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
Definition: Layer.hpp:318
half_float::half Half
Definition: Half.hpp:16