ArmNN
 20.05
QuantizedLstmEndToEndTestImpl.cpp File Reference
#include "QuantizedLstmEndToEndTestImpl.hpp"
#include "CommonTestUtils.hpp"
#include "EndToEndTestImpl.hpp"
#include <ResolveType.hpp>
#include <armnn/INetwork.hpp>
#include <armnn/QuantizedLstmParams.hpp>
#include <test/TensorHelpers.hpp>
#include <boost/test/unit_test.hpp>
#include <type_traits>

Go to the source code of this file.

Functions

void QuantizedLstmEndToEnd (const std::vector< armnn::BackendId > &backends)
 

Function Documentation

◆ QuantizedLstmEndToEnd()

void QuantizedLstmEndToEnd ( const std::vector< armnn::BackendId > &  backends)

Definition at line 179 of file QuantizedLstmEndToEndTestImpl.cpp.

References BOOST_CHECK(), armnn::Optimize(), options, armnn::QAsymmU8, and armnn::QSymmS16.

Referenced by BOOST_AUTO_TEST_CASE().

180 {
181  std::vector<uint8_t> inputVector = {166, 179, 50, 150};
182  armnn::TensorInfo inputDesc({2, 2}, armnn::DataType::QAsymmU8);
183  boost::multi_array<uint8_t, 2> input = MakeTensor<uint8_t, 2>(inputDesc, inputVector);
184 
185  std::vector<int16_t> cellStateInVector = {876, 1034, 955, -909, 761, 1029, 796, -1036};
186  armnn::TensorInfo cellStateInDesc({2, 4}, armnn::DataType::QSymmS16);
187  boost::multi_array<int16_t, 2> cellStateIn = MakeTensor<int16_t, 2>(cellStateInDesc, cellStateInVector);
188 
189  std::vector<uint8_t> outputStateInVector = {136, 150, 140, 115, 135, 152, 138, 112};
190  armnn::TensorInfo outputStateInDesc({2, 4}, armnn::DataType::QAsymmU8);
191  boost::multi_array<uint8_t, 2> outputStateIn = MakeTensor<uint8_t, 2>(outputStateInDesc, outputStateInVector);
192 
193  std::vector<int16_t> cellStateOutVector = {1485, 1177, 1373, -1023, 1019, 1355, 1097, -1235};
194  armnn::TensorInfo cellStateOutVectorDesc({2, 4}, armnn::DataType::QSymmS16);
195  boost::multi_array<int16_t, 2> cellStateOut = MakeTensor<int16_t, 2>(cellStateOutVectorDesc, cellStateOutVector);
196 
197  std::vector<uint8_t> outputStateOutVector = {140, 151, 146, 112, 136, 156, 142, 112};
198  armnn::TensorInfo outputDesc({2, 4}, armnn::DataType::QAsymmU8);
199  boost::multi_array<uint8_t, 2> outputStateOut = MakeTensor<uint8_t, 2>(outputDesc, outputStateOutVector);
200 
201  // Builds up the structure of the network
202  armnn::INetworkPtr net = CreateQuantizedLstmNetwork(input, outputStateOut);
203 
204  BOOST_TEST_CHECKPOINT("create a network");
205 
207  IRuntimePtr runtime(IRuntime::Create(options));
208 
209  // optimize the network
210  IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
211 
212  // Loads it into the runtime.
213  NetworkId netId;
214  runtime->LoadNetwork(netId, std::move(optNet));
215 
216  InputTensors inputTensors;
217  inputTensors.reserve(3);
218 
219  // input
220  inputTensors.push_back({0, ConstTensor(runtime->GetInputTensorInfo(netId, 0), inputVector.data())});
221  inputTensors.push_back({1, ConstTensor(runtime->GetInputTensorInfo(netId, 1), cellStateInVector.data())});
222  inputTensors.push_back({2, ConstTensor(runtime->GetInputTensorInfo(netId, 2), outputStateInVector.data())});
223 
224  OutputTensors outputTensors;
225  outputTensors.reserve(2);
226 
227  //output
228  std::vector<int16_t > cellStateOutResult(cellStateOutVector.size());
229  std::vector<uint8_t > outputStateOutResult(outputStateOutVector.size());
230  outputTensors.push_back({0, Tensor(runtime->GetOutputTensorInfo(netId, 0), cellStateOutResult.data())});
231  outputTensors.push_back({1, Tensor(runtime->GetOutputTensorInfo(netId, 1), outputStateOutResult.data())});
232 
233  // Does the inference.
234  runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
235 
236  // Checks the results
237  constexpr int16_t toleranceInt16 = 2;
238  for (unsigned int i = 0u; i < cellStateOutResult.size(); ++i)
239  {
240  BOOST_CHECK(IsCloseEnough(cellStateOutVector[i], cellStateOutResult[i], toleranceInt16));
241  }
242 
243  constexpr uint8_t toleranceUint8 = 1;
244  for (unsigned int i = 0u; i < outputStateOutResult.size(); ++i)
245  {
246  BOOST_TEST(IsCloseEnough(outputStateOutVector[i], outputStateOutResult[i], toleranceUint8));
247  }
248 }
std::unique_ptr< IRuntime, void(*)(IRuntime *runtime)> IRuntimePtr
Definition: IRuntime.hpp:25
std::vector< std::pair< LayerBindingId, class ConstTensor > > InputTensors
Definition: Tensor.hpp:225
int NetworkId
Definition: IRuntime.hpp:20
BOOST_CHECK(profilingService.GetCurrentState()==ProfilingState::WaitingForAck)
A tensor defined by a TensorInfo (shape and data type) and a mutable backing store.
Definition: Tensor.hpp:191
IOptimizedNetworkPtr Optimize(const INetwork &network, const std::vector< BackendId > &backendPreferences, const IDeviceSpec &deviceSpec, const OptimizerOptions &options=OptimizerOptions(), Optional< std::vector< std::string > &> messages=EmptyOptional())
Create an optimized version of the network.
Definition: Network.cpp:1003
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:199
std::vector< std::pair< LayerBindingId, class Tensor > > OutputTensors
Definition: Tensor.hpp:226
std::unique_ptr< IOptimizedNetwork, void(*)(IOptimizedNetwork *network)> IOptimizedNetworkPtr
Definition: INetwork.hpp:573
std::unique_ptr< INetwork, void(*)(INetwork *network)> INetworkPtr
Definition: INetwork.hpp:101
armnn::Runtime::CreationOptions::ExternalProfilingOptions options