ArmNN
 21.02
QLstmLayer.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2020 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 #include "QLstmLayer.hpp"
6 
7 #include "LayerCloneBase.hpp"
8 
9 #include <armnn/LstmParams.hpp>
10 #include <armnn/TypesUtils.hpp>
13 
14 namespace armnn
15 {
16 
17 QLstmLayer::QLstmLayer(const QLstmDescriptor& param, const char* name)
18  : LayerWithParameters(3, 3, LayerType::QLstm, param, name)
19 {
20 }
21 
22 std::unique_ptr<IWorkload> QLstmLayer::CreateWorkload(const IWorkloadFactory& factory) const
23 {
24  QLstmQueueDescriptor descriptor;
25 
26  // Basic parameters
34  descriptor.m_CellBias = m_BasicParameters.m_CellBias.get();
36 
37  // CIFG parameters
39  {
43  }
44 
45  // Projection parameters
47  {
50  }
51 
52  // Peephole parameters
54  {
56  {
58  }
59 
62  }
63 
64  // Layer normalisation parameters
66  {
68  {
70  }
74  }
75 
76  SetAdditionalInfo(descriptor);
77 
78  return factory.CreateQLstm(descriptor, PrepInfoAndDesc(descriptor));
79 }
80 
82 {
83  auto layer = CloneBase<QLstmLayer>(graph, m_Param, GetName());
84 
86  std::make_unique<ScopedCpuTensorHandle>(*m_BasicParameters.m_InputToForgetWeights) : nullptr;
87  layer->m_BasicParameters.m_InputToCellWeights = m_BasicParameters.m_InputToCellWeights ?
88  std::make_unique<ScopedCpuTensorHandle>(*m_BasicParameters.m_InputToCellWeights) : nullptr;
89  layer->m_BasicParameters.m_InputToOutputWeights = m_BasicParameters.m_InputToOutputWeights ?
90  std::make_unique<ScopedCpuTensorHandle>(*m_BasicParameters.m_InputToOutputWeights) : nullptr;
91  layer->m_BasicParameters.m_RecurrentToForgetWeights = m_BasicParameters.m_RecurrentToForgetWeights ?
92  std::make_unique<ScopedCpuTensorHandle>(*m_BasicParameters.m_RecurrentToForgetWeights) : nullptr;
93  layer->m_BasicParameters.m_RecurrentToCellWeights = m_BasicParameters.m_RecurrentToCellWeights ?
94  std::make_unique<ScopedCpuTensorHandle>(*m_BasicParameters.m_RecurrentToCellWeights) : nullptr;
95  layer->m_BasicParameters.m_RecurrentToOutputWeights = m_BasicParameters.m_RecurrentToOutputWeights ?
96  std::make_unique<ScopedCpuTensorHandle>(*m_BasicParameters.m_RecurrentToOutputWeights) : nullptr;
97  layer->m_BasicParameters.m_ForgetGateBias = m_BasicParameters.m_ForgetGateBias ?
98  std::make_unique<ScopedCpuTensorHandle>(*m_BasicParameters.m_ForgetGateBias) : nullptr;
99  layer->m_BasicParameters.m_CellBias = m_BasicParameters.m_CellBias ?
100  std::make_unique<ScopedCpuTensorHandle>(*m_BasicParameters.m_CellBias) : nullptr;
101  layer->m_BasicParameters.m_OutputGateBias = m_BasicParameters.m_OutputGateBias ?
102  std::make_unique<ScopedCpuTensorHandle>(*m_BasicParameters.m_OutputGateBias) : nullptr;
103 
104  if (!m_Param.m_CifgEnabled)
105  {
106  layer->m_CifgParameters.m_InputToInputWeights = m_CifgParameters.m_InputToInputWeights ?
107  std::make_unique<ScopedCpuTensorHandle>(*m_CifgParameters.m_InputToInputWeights) : nullptr;
108  layer->m_CifgParameters.m_RecurrentToInputWeights = m_CifgParameters.m_RecurrentToInputWeights ?
109  std::make_unique<ScopedCpuTensorHandle>(*m_CifgParameters.m_RecurrentToInputWeights) : nullptr;
110  layer->m_CifgParameters.m_InputGateBias = m_CifgParameters.m_InputGateBias ?
111  std::make_unique<ScopedCpuTensorHandle>(*m_CifgParameters.m_InputGateBias) : nullptr;
112  }
113 
114  if (m_Param.m_ProjectionEnabled)
115  {
116  layer->m_ProjectionParameters.m_ProjectionWeights = m_ProjectionParameters.m_ProjectionWeights ?
117  std::make_unique<ScopedCpuTensorHandle>(*m_ProjectionParameters.m_ProjectionWeights) : nullptr;
118  layer->m_ProjectionParameters.m_ProjectionBias = m_ProjectionParameters.m_ProjectionBias ?
119  std::make_unique<ScopedCpuTensorHandle>(*m_ProjectionParameters.m_ProjectionBias) : nullptr;
120  }
121 
122  if (m_Param.m_PeepholeEnabled)
123  {
124  if (!m_Param.m_CifgEnabled) {
125  layer->m_PeepholeParameters.m_CellToInputWeights = m_PeepholeParameters.m_CellToInputWeights ?
126  std::make_unique<ScopedCpuTensorHandle>(*m_PeepholeParameters.m_CellToInputWeights) : nullptr;
127  }
128 
129  layer->m_PeepholeParameters.m_CellToForgetWeights = m_PeepholeParameters.m_CellToForgetWeights ?
130  std::make_unique<ScopedCpuTensorHandle>(*m_PeepholeParameters.m_CellToForgetWeights) : nullptr;
131  layer->m_PeepholeParameters.m_CellToOutputWeights = m_PeepholeParameters.m_CellToOutputWeights ?
132  std::make_unique<ScopedCpuTensorHandle>(*m_PeepholeParameters.m_CellToOutputWeights) : nullptr;
133  }
134 
135  if (m_Param.m_LayerNormEnabled)
136  {
137  if (!m_Param.m_CifgEnabled) {
138  layer->m_LayerNormParameters.m_InputLayerNormWeights = m_LayerNormParameters.m_InputLayerNormWeights ?
139  std::make_unique<ScopedCpuTensorHandle>(*m_LayerNormParameters.m_InputLayerNormWeights) : nullptr;
140  }
141 
142  layer->m_LayerNormParameters.m_ForgetLayerNormWeights = m_LayerNormParameters.m_ForgetLayerNormWeights ?
143  std::make_unique<ScopedCpuTensorHandle>(*m_LayerNormParameters.m_ForgetLayerNormWeights) : nullptr;
144  layer->m_LayerNormParameters.m_CellLayerNormWeights = m_LayerNormParameters.m_CellLayerNormWeights ?
145  std::make_unique<ScopedCpuTensorHandle>(*m_LayerNormParameters.m_CellLayerNormWeights) : nullptr;
146  layer->m_LayerNormParameters.m_OutputLayerNormWeights = m_LayerNormParameters.m_OutputLayerNormWeights ?
147  std::make_unique<ScopedCpuTensorHandle>(*m_LayerNormParameters.m_OutputLayerNormWeights) : nullptr;
148  }
149 
150  return std::move(layer);
151 }
152 
153 std::vector<TensorShape> QLstmLayer::InferOutputShapes(const std::vector<TensorShape>& inputShapes) const
154 {
155  ARMNN_ASSERT(inputShapes.size() == 3);
156 
157  // Get input values for validation
158  unsigned int batchSize = inputShapes[0][0];
159  unsigned int outputSize = inputShapes[1][1];
160  unsigned int numUnits = inputShapes[2][1];
161 
162  std::vector<TensorShape> outShapes;
163  outShapes.push_back(TensorShape({ batchSize, outputSize })); // outputStateOut
164  outShapes.push_back(TensorShape({ batchSize, numUnits })); // cellStateOut
165  outShapes.push_back(TensorShape({ batchSize, outputSize })); // output
166 
167  return outShapes;
168 }
169 
171 {
173 
174  const TensorShape& outputShape = GetOutputSlot(0).GetTensorInfo().GetShape();
175 
177 
178  auto inferredShapes = InferOutputShapes(
179  {
181  GetInputSlot(1).GetConnection()->GetTensorInfo().GetShape(), // previousOutputIn
182  GetInputSlot(2).GetConnection()->GetTensorInfo().GetShape() // previousCellStateIn
183  });
184 
185  ARMNN_ASSERT(inferredShapes.size() == 3);
186 
187  // Check if the weights are nullptr for basic params
189  "QLstmLayer: m_BasicParameters.m_InputToForgetWeights should not be null.");
191  "QLstmLayer: m_BasicParameters.m_InputToCellWeights should not be null.");
193  "QLstmLayer: m_BasicParameters.m_InputToOutputWeights should not be null.");
195  "QLstmLayer: m_BasicParameters.m_RecurrentToForgetWeights should not be null.");
197  "QLstmLayer: m_BasicParameters.m_RecurrentToCellWeights should not be null.");
199  "QLstmLayer: m_BasicParameters.m_RecurrentToOutputWeights should not be null.");
201  "QLstmLayer: m_BasicParameters.m_ForgetGateBias should not be null.");
203  "QLstmLayer: m_BasicParameters.m_CellBias should not be null.");
205  "QLstmLayer: m_BasicParameters.m_OutputGateBias should not be null.");
206 
207  if (!m_Param.m_CifgEnabled)
208  {
210  "QLstmLayer: m_CifgParameters.m_InputToInputWeights should not be null.");
212  "QLstmLayer: m_CifgParameters.m_RecurrentToInputWeights should not be null.");
214  "QLstmLayer: m_CifgParameters.m_InputGateBias should not be null.");
215 
216  ValidateAndCopyShape(outputShape, inferredShapes[0], m_ShapeInferenceMethod, "QLstmLayer");
217  }
218  else
219  {
221  "QLstmLayer: m_CifgParameters.m_InputToInputWeights should not have a value when CIFG is enabled.");
223  "QLstmLayer: m_CifgParameters.m_RecurrentToInputWeights should "
224  "not have a value when CIFG is enabled.");
226  "QLstmLayer: m_CifgParameters.m_InputGateBias should not have a value when CIFG is enabled.");
227 
228  ValidateAndCopyShape(outputShape, inferredShapes[0], m_ShapeInferenceMethod, "QLstmLayer");
229  }
230 
232  {
234  "QLstmLayer: m_ProjectionParameters.m_ProjectionWeights should not be null.");
235  }
236 
238  {
239  if (!m_Param.m_CifgEnabled) {
241  "QLstmLayer: m_PeepholeParameters.m_CellToInputWeights should not be null "
242  "when Peephole is enabled and CIFG is disabled.");
243  }
244 
246  "QLstmLayer: m_PeepholeParameters.m_CellToForgetWeights should not be null.");
248  "QLstmLayer: m_PeepholeParameters.m_CellToOutputWeights should not be null.");
249  }
250 
252  GetOutputSlot(1).GetTensorInfo().GetShape(), inferredShapes[1], m_ShapeInferenceMethod, "QLstmLayer", 1);
254  GetOutputSlot(2).GetTensorInfo().GetShape(), inferredShapes[2], m_ShapeInferenceMethod, "QLstmLayer", 2);
255 
257  {
259  {
261  "QLstmLayer: m_LayerNormParameters.m_InputLayerNormWeights should not be null.");
262  }
264  "QLstmLayer: m_LayerNormParameters.m_ForgetLayerNormWeights should not be null.");
266  "QLstmLayer: m_LayerNormParameters.m_CellLayerNormWeights should not be null.");
268  "QLstmLayer: m_LayerNormParameters.m_UutputLayerNormWeights should not be null.");
269  }
270 }
271 
273 {
283 
284  // Cifg parameters
288 
289  // Projection parameters
292 
293  // Peephole parameters
297 
298  // Layer normalisation parameters
303 }
304 
305 void QLstmLayer::Accept(ILayerVisitor& visitor) const
306 {
307  LstmInputParams inputParams;
308 
309  ConstTensor inputToInputWeightsTensor;
311  {
312  ConstTensor inputToInputWeightsTensorCopy(m_CifgParameters.m_InputToInputWeights->GetTensorInfo(),
314  inputToInputWeightsTensor = inputToInputWeightsTensorCopy;
315  inputParams.m_InputToInputWeights = &inputToInputWeightsTensor;
316  }
317 
318  ConstTensor inputToForgetWeightsTensor;
320  {
321  ConstTensor inputToForgetWeightsTensorCopy(m_BasicParameters.m_InputToForgetWeights->GetTensorInfo(),
323  inputToForgetWeightsTensor = inputToForgetWeightsTensorCopy;
324  inputParams.m_InputToForgetWeights = &inputToForgetWeightsTensor;
325  }
326 
327  ConstTensor inputToCellWeightsTensor;
329  {
330  ConstTensor inputToCellWeightsTensorCopy(m_BasicParameters.m_InputToCellWeights->GetTensorInfo(),
332  inputToCellWeightsTensor = inputToCellWeightsTensorCopy;
333  inputParams.m_InputToCellWeights = &inputToCellWeightsTensor;
334  }
335 
336  ConstTensor inputToOutputWeightsTensor;
338  {
339  ConstTensor inputToOutputWeightsTensorCopy(m_BasicParameters.m_InputToOutputWeights->GetTensorInfo(),
341  inputToOutputWeightsTensor = inputToOutputWeightsTensorCopy;
342  inputParams.m_InputToOutputWeights = &inputToOutputWeightsTensor;
343  }
344 
345  ConstTensor recurrentToInputWeightsTensor;
347  {
348  ConstTensor recurrentToInputWeightsTensorCopy(
351  recurrentToInputWeightsTensor = recurrentToInputWeightsTensorCopy;
352  inputParams.m_RecurrentToInputWeights = &recurrentToInputWeightsTensor;
353  }
354 
355  ConstTensor recurrentToForgetWeightsTensor;
357  {
358  ConstTensor recurrentToForgetWeightsTensorCopy(
361  recurrentToForgetWeightsTensor = recurrentToForgetWeightsTensorCopy;
362  inputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeightsTensor;
363  }
364 
365  ConstTensor recurrentToCellWeightsTensor;
367  {
368  ConstTensor recurrentToCellWeightsTensorCopy(
371  recurrentToCellWeightsTensor = recurrentToCellWeightsTensorCopy;
372  inputParams.m_RecurrentToCellWeights = &recurrentToCellWeightsTensor;
373  }
374 
375  ConstTensor recurrentToOutputWeightsTensor;
377  {
378  ConstTensor recurrentToOutputWeightsTensorCopy(
381  recurrentToOutputWeightsTensor = recurrentToOutputWeightsTensorCopy;
382  inputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeightsTensor;
383  }
384 
385  ConstTensor cellToInputWeightsTensor;
387  {
388  ConstTensor cellToInputWeightsTensorCopy(m_PeepholeParameters.m_CellToInputWeights->GetTensorInfo(),
390  cellToInputWeightsTensor = cellToInputWeightsTensorCopy;
391  inputParams.m_CellToInputWeights = &cellToInputWeightsTensor;
392  }
393 
394  ConstTensor cellToForgetWeightsTensor;
396  {
397  ConstTensor cellToForgetWeightsTensorCopy(m_PeepholeParameters.m_CellToForgetWeights->GetTensorInfo(),
399  cellToForgetWeightsTensor = cellToForgetWeightsTensorCopy;
400  inputParams.m_CellToForgetWeights = &cellToForgetWeightsTensor;
401  }
402 
403  ConstTensor cellToOutputWeightsTensor;
405  {
406  ConstTensor cellToOutputWeightsTensorCopy(m_PeepholeParameters.m_CellToOutputWeights->GetTensorInfo(),
408  cellToOutputWeightsTensor = cellToOutputWeightsTensorCopy;
409  inputParams.m_CellToOutputWeights = &cellToOutputWeightsTensor;
410  }
411 
412  ConstTensor inputGateBiasTensor;
413  if (m_CifgParameters.m_InputGateBias != nullptr)
414  {
415  ConstTensor inputGateBiasTensorCopy(m_CifgParameters.m_InputGateBias->GetTensorInfo(),
416  m_CifgParameters.m_InputGateBias->Map(true));
417  inputGateBiasTensor = inputGateBiasTensorCopy;
418  inputParams.m_InputGateBias = &inputGateBiasTensor;
419  }
420 
421  ConstTensor forgetGateBiasTensor;
422  if (m_BasicParameters.m_ForgetGateBias != nullptr)
423  {
424  ConstTensor forgetGateBiasTensorCopy(m_BasicParameters.m_ForgetGateBias->GetTensorInfo(),
426  forgetGateBiasTensor = forgetGateBiasTensorCopy;
427  inputParams.m_ForgetGateBias = &forgetGateBiasTensor;
428  }
429 
430  ConstTensor cellBiasTensor;
431  if (m_BasicParameters.m_CellBias != nullptr)
432  {
433  ConstTensor cellBiasTensorCopy(m_BasicParameters.m_CellBias->GetTensorInfo(),
434  m_BasicParameters.m_CellBias->Map(true));
435  cellBiasTensor = cellBiasTensorCopy;
436  inputParams.m_CellBias = &cellBiasTensor;
437  }
438 
439  ConstTensor outputGateBias;
440  if (m_BasicParameters.m_OutputGateBias != nullptr)
441  {
442  ConstTensor outputGateBiasCopy(m_BasicParameters.m_OutputGateBias->GetTensorInfo(),
444  outputGateBias = outputGateBiasCopy;
445  inputParams.m_OutputGateBias = &outputGateBias;
446  }
447 
448  ConstTensor projectionWeightsTensor;
450  {
451  ConstTensor projectionWeightsTensorCopy(m_ProjectionParameters.m_ProjectionWeights->GetTensorInfo(),
453  projectionWeightsTensor = projectionWeightsTensorCopy;
454  inputParams.m_ProjectionWeights = &projectionWeightsTensor;
455  }
456 
457  ConstTensor projectionBiasTensor;
459  {
460  ConstTensor projectionBiasTensorCopy(m_ProjectionParameters.m_ProjectionBias->GetTensorInfo(),
462  projectionBiasTensor = projectionBiasTensorCopy;
463  inputParams.m_ProjectionBias = &projectionBiasTensor;
464  }
465 
466  ConstTensor inputLayerNormTensor;
468  {
469  ConstTensor inputLayerNormTensorCopy(m_LayerNormParameters.m_InputLayerNormWeights->GetTensorInfo(),
471  inputLayerNormTensor = inputLayerNormTensorCopy;
472  inputParams.m_InputLayerNormWeights = &inputLayerNormTensor;
473  }
474 
475  ConstTensor forgetLayerNormTensor;
477  {
478  ConstTensor forgetLayerNormTensorCopy(m_LayerNormParameters.m_ForgetLayerNormWeights->GetTensorInfo(),
480  forgetLayerNormTensor = forgetLayerNormTensorCopy;
481  inputParams.m_ForgetLayerNormWeights = &forgetLayerNormTensor;
482  }
483 
484  ConstTensor cellLayerNormTensor;
486  {
487  ConstTensor cellLayerNormTensorCopy(m_LayerNormParameters.m_CellLayerNormWeights->GetTensorInfo(),
489  cellLayerNormTensor = cellLayerNormTensorCopy;
490  inputParams.m_CellLayerNormWeights = &cellLayerNormTensor;
491  }
492 
493  ConstTensor outputLayerNormTensor;
495  {
496  ConstTensor outputLayerNormTensorCopy(m_LayerNormParameters.m_OutputLayerNormWeights->GetTensorInfo(),
498  outputLayerNormTensor = outputLayerNormTensorCopy;
499  inputParams.m_OutputLayerNormWeights = &outputLayerNormTensor;
500  }
501 
502 
503  visitor.VisitQLstmLayer(this, GetParameters(), inputParams, GetName());
504 }
505 
506 
508 {
509  std::vector<ConstTensor> constTensors;
510 
511  // First add mandatory/basic parameters
513  {
514  constTensors.emplace_back(ConstTensor(m_BasicParameters.m_InputToForgetWeights->GetTensorInfo(),
516  }
518  {
519  constTensors.emplace_back(ConstTensor(m_BasicParameters.m_InputToCellWeights->GetTensorInfo(),
521  }
523  {
524  constTensors.emplace_back(ConstTensor(m_BasicParameters.m_InputToOutputWeights->GetTensorInfo(),
526  }
528  {
529  constTensors.emplace_back(ConstTensor(
532  }
534  {
535  constTensors.emplace_back(ConstTensor(
538  }
540  {
541  constTensors.emplace_back(ConstTensor(
544  }
545  if (m_BasicParameters.m_ForgetGateBias != nullptr)
546  {
547  constTensors.emplace_back(ConstTensor(m_BasicParameters.m_ForgetGateBias->GetTensorInfo(),
548  m_BasicParameters.m_ForgetGateBias->Map(true)));
549  }
550  if (m_BasicParameters.m_CellBias != nullptr)
551  {
552  constTensors.emplace_back(ConstTensor(m_BasicParameters.m_CellBias->GetTensorInfo(),
553  m_BasicParameters.m_CellBias->Map(true)));
554  }
555  if (m_BasicParameters.m_OutputGateBias != nullptr)
556  {
557  constTensors.emplace_back(ConstTensor(m_BasicParameters.m_OutputGateBias->GetTensorInfo(),
558  m_BasicParameters.m_OutputGateBias->Map(true)));
559  }
560 
561  // Add cifig parameters
563  {
564  constTensors.emplace_back(ConstTensor(m_CifgParameters.m_InputToInputWeights->GetTensorInfo(),
566  }
568  {
569  constTensors.emplace_back(ConstTensor(
572  }
573  if (m_CifgParameters.m_InputGateBias != nullptr)
574  {
575  constTensors.emplace_back(ConstTensor(m_CifgParameters.m_InputGateBias->GetTensorInfo(),
576  m_CifgParameters.m_InputGateBias->Map(true)));
577  }
578 
579  // Add peephole parameters
581  {
582  constTensors.emplace_back(ConstTensor(m_PeepholeParameters.m_CellToInputWeights->GetTensorInfo(),
584  }
586  {
587  constTensors.emplace_back(ConstTensor(m_PeepholeParameters.m_CellToForgetWeights->GetTensorInfo(),
589  }
591  {
592  constTensors.emplace_back(ConstTensor(m_PeepholeParameters.m_CellToOutputWeights->GetTensorInfo(),
594  }
595 
596  // Add projection parameters
598  {
599  constTensors.emplace_back(ConstTensor(m_ProjectionParameters.m_ProjectionWeights->GetTensorInfo(),
601  }
603  {
604  constTensors.emplace_back(ConstTensor(m_ProjectionParameters.m_ProjectionBias->GetTensorInfo(),
606  }
607 
608  // Add norm parameters
610  {
611  constTensors.emplace_back(ConstTensor(m_LayerNormParameters.m_InputLayerNormWeights->GetTensorInfo(),
613  }
615  {
616  constTensors.emplace_back(ConstTensor(m_LayerNormParameters.m_ForgetLayerNormWeights->GetTensorInfo(),
618  }
620  {
621  constTensors.emplace_back(ConstTensor(m_LayerNormParameters.m_CellLayerNormWeights->GetTensorInfo(),
623  }
625  {
626  constTensors.emplace_back(ConstTensor(m_LayerNormParameters.m_OutputLayerNormWeights->GetTensorInfo(),
628  }
629  strategy.ExecuteStrategy(this, GetParameters(), constTensors, GetName());
630 }
631 
632 } // namespace armnn
const ConstCpuTensorHandle * m_CellToForgetWeights
virtual void VisitQLstmLayer(const IConnectableLayer *layer, const QLstmDescriptor &descriptor, const LstmInputParams &params, const char *name=nullptr)=0
Function a QLstm layer should call back to when its Accept(ILayerVisitor&) function is invoked...
const ConstTensor * m_ProjectionWeights
Definition: LstmParams.hpp:55
const ConstTensor * m_CellBias
Definition: LstmParams.hpp:53
QLstmDescriptor m_Param
The parameters for the layer (not including tensor-valued weights etc.).
const TensorShape & GetShape() const
Definition: Tensor.hpp:187
const ConstCpuTensorHandle * m_ProjectionWeights
QLstmOptProjectionParameters m_ProjectionParameters
Definition: QLstmLayer.hpp:85
const ConstTensor * m_CellToOutputWeights
Definition: LstmParams.hpp:50
std::unique_ptr< ScopedCpuTensorHandle > m_OutputGateBias
A unique pointer to represent 1D bias tensor with dimensions [num_units] (int32). ...
Definition: QLstmLayer.hpp:35
std::unique_ptr< ScopedCpuTensorHandle > m_RecurrentToInputWeights
A unique pointer to represent 2D weights tensor with dimensions [input_size, num_units] (QSymmS8)...
Definition: QLstmLayer.hpp:61
std::unique_ptr< ScopedCpuTensorHandle > m_CellLayerNormWeights
A unique pointer to represent 1D weights tensor with dimensions [num_units] (QSymmS16).
Definition: QLstmLayer.hpp:73
bool m_PeepholeEnabled
Enable/disable peephole.
std::unique_ptr< ScopedCpuTensorHandle > m_InputLayerNormWeights
A unique pointer to represent 1D weights tensor with dimensions [num_units] (QSymmS16).
Definition: QLstmLayer.hpp:69
const ConstCpuTensorHandle * m_ProjectionBias
virtual void ExecuteStrategy(const armnn::IConnectableLayer *layer, const armnn::BaseDescriptor &descriptor, const std::vector< armnn::ConstTensor > &constants, const char *name, const armnn::LayerBindingId id=0)=0
const ConstCpuTensorHandle * m_ForgetLayerNormWeights
const ConstTensor * m_CellToInputWeights
Definition: LstmParams.hpp:48
std::unique_ptr< ScopedCpuTensorHandle > m_InputToInputWeights
A unique pointer to represent 2D weights tensor with dimensions [input_size, num_units] (QSymmS8)...
Definition: QLstmLayer.hpp:59
const ConstCpuTensorHandle * m_CellLayerNormWeights
std::unique_ptr< ScopedCpuTensorHandle > m_CellToOutputWeights
A unique pointer to represent 1D weights tensor with dimensions [num_units] (QSymmS16).
Definition: QLstmLayer.hpp:53
const ConstTensor * m_InputGateBias
Definition: LstmParams.hpp:51
std::unique_ptr< ScopedCpuTensorHandle > m_ProjectionBias
A unique pointer to represent 1D weights tensor with dimensions [output_size] (int32).
Definition: QLstmLayer.hpp:43
std::unique_ptr< ScopedCpuTensorHandle > m_ProjectionWeights
A unique pointer to represent 2D weights tensor with dimensions [output_size, num_units] (QSymmS8)...
Definition: QLstmLayer.hpp:41
const ConstCpuTensorHandle * m_RecurrentToCellWeights
virtual std::unique_ptr< IWorkload > CreateQLstm(const QLstmQueueDescriptor &descriptor, const WorkloadInfo &info) const
const ConstCpuTensorHandle * m_RecurrentToInputWeights
std::unique_ptr< ScopedCpuTensorHandle > m_InputToForgetWeights
A unique pointer to represent 2D weights tensor with dimensions [num_units, inputSize] (QSymmS8)...
Definition: QLstmLayer.hpp:17
std::unique_ptr< ScopedCpuTensorHandle > m_CellBias
A unique pointer to represent 1D bias tensor with dimensions [num_units] (int32). ...
Definition: QLstmLayer.hpp:33
const ConstTensor * m_RecurrentToCellWeights
Definition: LstmParams.hpp:46
const ConstTensor * m_ForgetLayerNormWeights
Definition: LstmParams.hpp:58
void VerifyShapeInferenceType(const TensorShape &outputShape, ShapeInferenceMethod shapeInferenceMethod)
Definition: Layer.cpp:432
const ConstCpuTensorHandle * m_OutputGateBias
const ConstTensor * m_CellToForgetWeights
Definition: LstmParams.hpp:49
const ConstCpuTensorHandle * m_CellBias
Copyright (c) 2021 ARM Limited and Contributors.
std::unique_ptr< ScopedCpuTensorHandle > m_InputToOutputWeights
A unique pointer to represent 2D weights tensor with dimensions [num_units, inputSize] (QSymmS8)...
Definition: QLstmLayer.hpp:21
const IOutputSlot * GetConnection() const override
Definition: Layer.hpp:199
std::unique_ptr< ScopedCpuTensorHandle > m_OutputLayerNormWeights
A unique pointer to represent 1D weights tensor with dimensions [num_units] (QSymmS16).
Definition: QLstmLayer.hpp:75
void ValidateAndCopyShape(const TensorShape &outputShape, const TensorShape &inferredShape, const ShapeInferenceMethod shapeInferenceMethod, const std::string &layerName, const unsigned int outputSlotIndex=0)
Definition: Layer.cpp:392
const ConstTensor * m_OutputGateBias
Definition: LstmParams.hpp:54
QLstmOptLayerNormParameters m_LayerNormParameters
Definition: QLstmLayer.hpp:87
QLstmLayer * Clone(Graph &graph) const override
Creates a dynamically-allocated copy of this layer.
Definition: QLstmLayer.cpp:81
const ConstCpuTensorHandle * m_CellToOutputWeights
std::unique_ptr< ScopedCpuTensorHandle > m_RecurrentToForgetWeights
A unique pointer to represent 2D weights tensor with dimensions [num_units, outputSize] (QSymmS8)...
Definition: QLstmLayer.hpp:24
const ConstCpuTensorHandle * m_OutputLayerNormWeights
void VerifyLayerConnections(unsigned int expectedConnections, const CheckLocation &location) const
Definition: Layer.cpp:348
std::unique_ptr< ScopedCpuTensorHandle > m_CellToInputWeights
A unique pointer to represent 1D weights tensor with dimensions [num_units] (QSymmS16).
Definition: QLstmLayer.hpp:49
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
Definition: Layer.hpp:316
const ConstTensor * m_InputLayerNormWeights
Definition: LstmParams.hpp:57
bool m_LayerNormEnabled
Enable/disable layer normalization.
const ConstTensor * m_RecurrentToOutputWeights
Definition: LstmParams.hpp:47
#define ARMNN_ASSERT_MSG(COND, MSG)
Definition: Assert.hpp:15
virtual std::unique_ptr< IWorkload > CreateWorkload(const IWorkloadFactory &factory) const override
Makes a workload for the QLstm type.
Definition: QLstmLayer.cpp:22
const ConstCpuTensorHandle * m_InputToForgetWeights
const ConstTensor * m_ProjectionBias
Definition: LstmParams.hpp:56
std::unique_ptr< ScopedCpuTensorHandle > m_CellToForgetWeights
A unique pointer to represent 1D weights tensor with dimensions [num_units] (QSymmS16).
Definition: QLstmLayer.hpp:51
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:314
std::unique_ptr< ScopedCpuTensorHandle > m_InputGateBias
A unique pointer to represent 1D weights tensor with dimensions [num_units] (int32).
Definition: QLstmLayer.hpp:63
#define ARMNN_ASSERT(COND)
Definition: Assert.hpp:14
A QLstmDescriptor for the QLstmLayer.
std::unique_ptr< ScopedCpuTensorHandle > m_ForgetLayerNormWeights
A unique pointer to represent 1D weights tensor with dimensions [num_units] (QSymmS16).
Definition: QLstmLayer.hpp:71
void ValidateTensorShapesFromInputs() override
Check if the input tensor shape(s) will lead to a valid configuration of QLstmLayer.
Definition: QLstmLayer.cpp:170
QLstmLayer(const QLstmDescriptor &param, const char *name)
Constructor to create a QLstmLayer.
Definition: QLstmLayer.cpp:17
Layer::ConstantTensors GetConstantTensorsByRef() override
Retrieve the handles to the constant values stored by the layer.
Definition: QLstmLayer.cpp:272
#define CHECK_LOCATION()
Definition: Exceptions.hpp:197
std::unique_ptr< ScopedCpuTensorHandle > m_InputToCellWeights
A unique pointer to represent 2D weights tensor with dimensions [num_units, inputSize] (QSymmS8)...
Definition: QLstmLayer.hpp:19
QLstmBasicParameters m_BasicParameters
Definition: QLstmLayer.hpp:83
void ExecuteStrategy(IStrategy &strategy) const override
Apply a visitor to this layer.
Definition: QLstmLayer.cpp:507
const ConstCpuTensorHandle * m_CellToInputWeights
void SetAdditionalInfo(QueueDescriptor &descriptor) const
Definition: Layer.cpp:245
const ConstTensor * m_CellLayerNormWeights
Definition: LstmParams.hpp:59
const ConstTensor * m_ForgetGateBias
Definition: LstmParams.hpp:52
const ConstTensor * m_InputToCellWeights
Definition: LstmParams.hpp:42
const ConstTensor * m_InputToOutputWeights
Definition: LstmParams.hpp:43
This layer represents a QLstm operation.
Definition: QLstmLayer.hpp:79
std::unique_ptr< ScopedCpuTensorHandle > m_ForgetGateBias
A unique pointer to represent 1D bias tensor with dimensions [num_units] (int32). ...
Definition: QLstmLayer.hpp:31
const ConstTensor * m_RecurrentToForgetWeights
Definition: LstmParams.hpp:45
const ConstCpuTensorHandle * m_RecurrentToOutputWeights
std::unique_ptr< ScopedCpuTensorHandle > m_RecurrentToCellWeights
A unique pointer to represent 2D weights tensor with dimensions [num_units, outputSize] (QSymmS8)...
Definition: QLstmLayer.hpp:26
bool m_ProjectionEnabled
Enable/disable the projection layer.
const ConstTensor * m_RecurrentToInputWeights
Definition: LstmParams.hpp:44
WorkloadInfo PrepInfoAndDesc(QueueDescriptor &descriptor) const
Helper function to reduce duplication in *LayerCreateWorkload.
const ConstCpuTensorHandle * m_InputGateBias
std::unique_ptr< ScopedCpuTensorHandle > m_RecurrentToOutputWeights
A unique pointer to represent 2D weights tensor with dimensions [num_units, outputSize] (QSymmS8)...
Definition: QLstmLayer.hpp:28
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
Definition: Layer.hpp:318
void Accept(ILayerVisitor &visitor) const override
Apply a visitor to this layer.
Definition: QLstmLayer.cpp:305
virtual const TensorInfo & GetTensorInfo() const =0
QLstmOptCifgParameters m_CifgParameters
Definition: QLstmLayer.hpp:84
QLstmOptPeepholeParameters m_PeepholeParameters
Definition: QLstmLayer.hpp:86
const char * GetName() const override
Returns the name of the layer.
Definition: Layer.hpp:311
const ConstCpuTensorHandle * m_InputLayerNormWeights
const ConstCpuTensorHandle * m_RecurrentToForgetWeights
const ConstTensor * m_OutputLayerNormWeights
Definition: LstmParams.hpp:60
std::vector< std::reference_wrapper< std::unique_ptr< ScopedCpuTensorHandle > >> ConstantTensors
Definition: Layer.hpp:393
std::vector< TensorShape > InferOutputShapes(const std::vector< TensorShape > &inputShapes) const override
By default returns inputShapes if the number of inputs are equal to number of outputs, otherwise infers the output shapes from given input shapes and layer properties.
Definition: QLstmLayer.cpp:153
const ConstCpuTensorHandle * m_ForgetGateBias
const TensorInfo & GetTensorInfo(const ITensorHandle *tensorHandle)
float32 helpers
const TensorInfo & GetTensorInfo() const override
Definition: Layer.cpp:63
const ConstCpuTensorHandle * m_InputToOutputWeights
bool m_CifgEnabled
Enable/disable CIFG (coupled input & forget gate).
const ConstCpuTensorHandle * m_InputToInputWeights
ShapeInferenceMethod m_ShapeInferenceMethod
Definition: Layer.hpp:408
const ConstTensor * m_InputToForgetWeights
Definition: LstmParams.hpp:41
LayerType
When adding a new layer, adapt also the LastLayer enum value in the enum class LayerType below...
Definition: Types.hpp:419
const ConstTensor * m_InputToInputWeights
Definition: LstmParams.hpp:40
const ConstCpuTensorHandle * m_InputToCellWeights