ArmNN
 22.08
QuantizedLstmEndToEndTestImpl.cpp File Reference
#include "QuantizedLstmEndToEndTestImpl.hpp"
#include <CommonTestUtils.hpp>
#include "EndToEndTestImpl.hpp"
#include <ResolveType.hpp>
#include <armnn/INetwork.hpp>
#include <armnn/QuantizedLstmParams.hpp>
#include <armnn/utility/NumericCast.hpp>
#include <armnnTestUtils/TensorHelpers.hpp>
#include <doctest/doctest.h>
#include <type_traits>

Go to the source code of this file.

Functions

void QuantizedLstmEndToEnd (const std::vector< armnn::BackendId > &backends)
 

Function Documentation

◆ QuantizedLstmEndToEnd()

void QuantizedLstmEndToEnd ( const std::vector< armnn::BackendId > &  backends)

Definition at line 181 of file QuantizedLstmEndToEndTestImpl.cpp.

References armnn::Optimize(), armnn::QAsymmU8, armnn::QSymmS16, and TensorInfo::SetConstant().

Referenced by TEST_SUITE().

182 {
183  std::vector<uint8_t> inputVector = {166, 179, 50, 150};
184  armnn::TensorInfo inputDesc({2, 2}, armnn::DataType::QAsymmU8);
185 
186  std::vector<int16_t> cellStateInVector = {876, 1034, 955, -909, 761, 1029, 796, -1036};
187  armnn::TensorInfo cellStateInDesc({2, 4}, armnn::DataType::QSymmS16);
188 
189  std::vector<uint8_t> outputStateInVector = {136, 150, 140, 115, 135, 152, 138, 112};
190  armnn::TensorInfo outputStateInDesc({2, 4}, armnn::DataType::QAsymmU8);
191 
192  std::vector<int16_t> cellStateOutVector = {1485, 1177, 1373, -1023, 1019, 1355, 1097, -1235};
193  armnn::TensorInfo cellStateOutVectorDesc({2, 4}, armnn::DataType::QSymmS16);
194 
195  std::vector<uint8_t> outputStateOutVector = {140, 151, 146, 112, 136, 156, 142, 112};
196  armnn::TensorInfo outputDesc({2, 4}, armnn::DataType::QAsymmU8);
197 
198  // Builds up the structure of the network
199  armnn::INetworkPtr net = CreateQuantizedLstmNetwork(inputDesc.GetShape(), outputDesc.GetShape());
200 
202  IRuntimePtr runtime(IRuntime::Create(options));
203 
204  // optimize the network
205  IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
206 
207  // Loads it into the runtime.
208  NetworkId netId;
209  runtime->LoadNetwork(netId, std::move(optNet));
210 
211  InputTensors inputTensors;
212  inputTensors.reserve(3);
213 
214  // input
215  TensorInfo inputTensorInfo0 = runtime->GetInputTensorInfo(netId, 0);
216  TensorInfo inputTensorInfo1 = runtime->GetInputTensorInfo(netId, 1);
217  TensorInfo inputTensorInfo2 = runtime->GetInputTensorInfo(netId, 2);
218  inputTensorInfo0.SetConstant(true);
219  inputTensorInfo1.SetConstant(true);
220  inputTensorInfo2.SetConstant(true);
221 
222  inputTensors.push_back({0, ConstTensor(inputTensorInfo0, inputVector.data())});
223  inputTensors.push_back({1, ConstTensor(inputTensorInfo1, cellStateInVector.data())});
224  inputTensors.push_back({2, ConstTensor(inputTensorInfo2, outputStateInVector.data())});
225 
226  OutputTensors outputTensors;
227  outputTensors.reserve(2);
228 
229  //output
230  std::vector<int16_t> cellStateOutResult(cellStateOutVector.size());
231  std::vector<uint8_t> outputStateOutResult(outputStateOutVector.size());
232  outputTensors.push_back({0, Tensor(runtime->GetOutputTensorInfo(netId, 0), cellStateOutResult.data())});
233  outputTensors.push_back({1, Tensor(runtime->GetOutputTensorInfo(netId, 1), outputStateOutResult.data())});
234 
235  // Does the inference.
236  runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
237 
238  // Checks the results
239  constexpr int16_t toleranceInt16 = 2;
240  for (unsigned int i = 0u; i < cellStateOutResult.size(); ++i)
241  {
242  CHECK(IsCloseEnough(cellStateOutVector[i], cellStateOutResult[i], toleranceInt16));
243  }
244 
245  constexpr uint8_t toleranceUint8 = 1;
246  for (unsigned int i = 0u; i < outputStateOutResult.size(); ++i)
247  {
248  CHECK(IsCloseEnough(outputStateOutVector[i], outputStateOutResult[i], toleranceUint8));
249  }
250 }
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:33
std::vector< std::pair< LayerBindingId, class ConstTensor > > InputTensors
Definition: Tensor.hpp:392
A tensor defined by a TensorInfo (shape and data type) and a mutable backing store.
Definition: Tensor.hpp:319
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:1864
int NetworkId
Definition: IRuntime.hpp:27
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:327
std::vector< std::pair< LayerBindingId, class Tensor > > OutputTensors
Definition: Tensor.hpp:393
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:239
void SetConstant(const bool IsConstant=true)
Marks the data corresponding to this tensor info as constant.
Definition: Tensor.cpp:514
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:238