ArmNN
 22.02
QLstmLayer.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2020 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 #include "QLstmLayer.hpp"
6 
7 #include "LayerCloneBase.hpp"
8 
9 #include <armnn/LstmParams.hpp>
10 #include <armnn/TypesUtils.hpp>
13 
14 namespace armnn
15 {
16 
17 QLstmLayer::QLstmLayer(const QLstmDescriptor& param, const char* name)
18  : LayerWithParameters(3, 3, LayerType::QLstm, param, name)
19 {
20 }
21 
22 std::unique_ptr<IWorkload> QLstmLayer::CreateWorkload(const IWorkloadFactory& factory) const
23 {
24  QLstmQueueDescriptor descriptor;
25 
26  // Basic parameters
34  descriptor.m_CellBias = m_BasicParameters.m_CellBias.get();
36 
37  // CIFG parameters
39  {
43  }
44 
45  // Projection parameters
47  {
50  }
51 
52  // Peephole parameters
54  {
56  {
58  }
59 
62  }
63 
64  // Layer normalisation parameters
66  {
68  {
70  }
74  }
75 
76  SetAdditionalInfo(descriptor);
77 
78  return factory.CreateWorkload(LayerType::QLstm, descriptor, PrepInfoAndDesc(descriptor));
79 }
80 
82 {
83  auto layer = CloneBase<QLstmLayer>(graph, m_Param, GetName());
84 
87  layer->m_BasicParameters.m_InputToCellWeights = m_BasicParameters.m_InputToCellWeights ?
89  layer->m_BasicParameters.m_InputToOutputWeights = m_BasicParameters.m_InputToOutputWeights ?
91  layer->m_BasicParameters.m_RecurrentToForgetWeights = m_BasicParameters.m_RecurrentToForgetWeights ?
93  layer->m_BasicParameters.m_RecurrentToCellWeights = m_BasicParameters.m_RecurrentToCellWeights ?
95  layer->m_BasicParameters.m_RecurrentToOutputWeights = m_BasicParameters.m_RecurrentToOutputWeights ?
97  layer->m_BasicParameters.m_ForgetGateBias = m_BasicParameters.m_ForgetGateBias ?
99  layer->m_BasicParameters.m_CellBias = m_BasicParameters.m_CellBias ?
100  m_BasicParameters.m_CellBias : nullptr;
101  layer->m_BasicParameters.m_OutputGateBias = m_BasicParameters.m_OutputGateBias ?
103 
104  if (!m_Param.m_CifgEnabled)
105  {
106  layer->m_CifgParameters.m_InputToInputWeights = m_CifgParameters.m_InputToInputWeights ?
108  layer->m_CifgParameters.m_RecurrentToInputWeights = m_CifgParameters.m_RecurrentToInputWeights ?
110  layer->m_CifgParameters.m_InputGateBias = m_CifgParameters.m_InputGateBias ?
112  }
113 
114  if (m_Param.m_ProjectionEnabled)
115  {
116  layer->m_ProjectionParameters.m_ProjectionWeights = m_ProjectionParameters.m_ProjectionWeights ?
118  layer->m_ProjectionParameters.m_ProjectionBias = m_ProjectionParameters.m_ProjectionBias ?
120  }
121 
122  if (m_Param.m_PeepholeEnabled)
123  {
124  if (!m_Param.m_CifgEnabled) {
125  layer->m_PeepholeParameters.m_CellToInputWeights = m_PeepholeParameters.m_CellToInputWeights ?
127  }
128 
129  layer->m_PeepholeParameters.m_CellToForgetWeights = m_PeepholeParameters.m_CellToForgetWeights ?
131  layer->m_PeepholeParameters.m_CellToOutputWeights = m_PeepholeParameters.m_CellToOutputWeights ?
133  }
134 
135  if (m_Param.m_LayerNormEnabled)
136  {
137  if (!m_Param.m_CifgEnabled) {
138  layer->m_LayerNormParameters.m_InputLayerNormWeights = m_LayerNormParameters.m_InputLayerNormWeights ?
140  }
141 
142  layer->m_LayerNormParameters.m_ForgetLayerNormWeights = m_LayerNormParameters.m_ForgetLayerNormWeights ?
144  layer->m_LayerNormParameters.m_CellLayerNormWeights = m_LayerNormParameters.m_CellLayerNormWeights ?
146  layer->m_LayerNormParameters.m_OutputLayerNormWeights = m_LayerNormParameters.m_OutputLayerNormWeights ?
148  }
149 
150  return std::move(layer);
151 }
152 
153 std::vector<TensorShape> QLstmLayer::InferOutputShapes(const std::vector<TensorShape>& inputShapes) const
154 {
155  ARMNN_ASSERT(inputShapes.size() == 3);
156 
157  // Get input values for validation
158  unsigned int batchSize = inputShapes[0][0];
159  unsigned int outputSize = inputShapes[1][1];
160  unsigned int numUnits = inputShapes[2][1];
161 
162  std::vector<TensorShape> outShapes;
163  outShapes.push_back(TensorShape({ batchSize, outputSize })); // outputStateOut
164  outShapes.push_back(TensorShape({ batchSize, numUnits })); // cellStateOut
165  outShapes.push_back(TensorShape({ batchSize, outputSize })); // output
166 
167  return outShapes;
168 }
169 
171 {
173 
174  const TensorShape& outputShape = GetOutputSlot(0).GetTensorInfo().GetShape();
175 
177 
178  auto inferredShapes = InferOutputShapes(
179  {
181  GetInputSlot(1).GetConnection()->GetTensorInfo().GetShape(), // previousOutputIn
182  GetInputSlot(2).GetConnection()->GetTensorInfo().GetShape() // previousCellStateIn
183  });
184 
185  ARMNN_ASSERT(inferredShapes.size() == 3);
186 
187  // Check if the weights are nullptr for basic params
189  "QLstmLayer: m_BasicParameters.m_InputToForgetWeights should not be null.");
191  "QLstmLayer: m_BasicParameters.m_InputToCellWeights should not be null.");
193  "QLstmLayer: m_BasicParameters.m_InputToOutputWeights should not be null.");
195  "QLstmLayer: m_BasicParameters.m_RecurrentToForgetWeights should not be null.");
197  "QLstmLayer: m_BasicParameters.m_RecurrentToCellWeights should not be null.");
199  "QLstmLayer: m_BasicParameters.m_RecurrentToOutputWeights should not be null.");
201  "QLstmLayer: m_BasicParameters.m_ForgetGateBias should not be null.");
203  "QLstmLayer: m_BasicParameters.m_CellBias should not be null.");
205  "QLstmLayer: m_BasicParameters.m_OutputGateBias should not be null.");
206 
207  if (!m_Param.m_CifgEnabled)
208  {
210  "QLstmLayer: m_CifgParameters.m_InputToInputWeights should not be null.");
212  "QLstmLayer: m_CifgParameters.m_RecurrentToInputWeights should not be null.");
214  "QLstmLayer: m_CifgParameters.m_InputGateBias should not be null.");
215 
216  ValidateAndCopyShape(outputShape, inferredShapes[0], m_ShapeInferenceMethod, "QLstmLayer");
217  }
218  else
219  {
221  "QLstmLayer: m_CifgParameters.m_InputToInputWeights should not have a value when CIFG is enabled.");
223  "QLstmLayer: m_CifgParameters.m_RecurrentToInputWeights should "
224  "not have a value when CIFG is enabled.");
226  "QLstmLayer: m_CifgParameters.m_InputGateBias should not have a value when CIFG is enabled.");
227 
228  ValidateAndCopyShape(outputShape, inferredShapes[0], m_ShapeInferenceMethod, "QLstmLayer");
229  }
230 
232  {
234  "QLstmLayer: m_ProjectionParameters.m_ProjectionWeights should not be null.");
235  }
236 
238  {
239  if (!m_Param.m_CifgEnabled) {
241  "QLstmLayer: m_PeepholeParameters.m_CellToInputWeights should not be null "
242  "when Peephole is enabled and CIFG is disabled.");
243  }
244 
246  "QLstmLayer: m_PeepholeParameters.m_CellToForgetWeights should not be null.");
248  "QLstmLayer: m_PeepholeParameters.m_CellToOutputWeights should not be null.");
249  }
250 
252  GetOutputSlot(1).GetTensorInfo().GetShape(), inferredShapes[1], m_ShapeInferenceMethod, "QLstmLayer", 1);
254  GetOutputSlot(2).GetTensorInfo().GetShape(), inferredShapes[2], m_ShapeInferenceMethod, "QLstmLayer", 2);
255 
257  {
259  {
261  "QLstmLayer: m_LayerNormParameters.m_InputLayerNormWeights should not be null.");
262  }
264  "QLstmLayer: m_LayerNormParameters.m_ForgetLayerNormWeights should not be null.");
266  "QLstmLayer: m_LayerNormParameters.m_CellLayerNormWeights should not be null.");
268  "QLstmLayer: m_LayerNormParameters.m_UutputLayerNormWeights should not be null.");
269  }
270 }
271 
273 {
274  // For API stability DO NOT ALTER order and add new members to the end of vector
284 
285  // Cifg parameters
289 
290  // Projection parameters
293 
294  // Peephole parameters
298 
299  // Layer normalisation parameters
304 }
305 
307 void QLstmLayer::Accept(ILayerVisitor& visitor) const
308 {
309  LstmInputParams inputParams;
319 
320  // Cifg parameters
324 
325  // Projection parameters
328 
329  // Peephole parameters
333 
334  // Layer normalisation parameters
339 
340  ConstTensor inputToInputWeightsTensor;
342  {
343  ConstTensor inputToInputWeightsTensorCopy(managedInputToInputWeights.GetTensorInfo(),
344  managedInputToInputWeights.Map());
345  inputToInputWeightsTensor = inputToInputWeightsTensorCopy;
346  inputParams.m_InputToInputWeights = &inputToInputWeightsTensor;
347  }
348 
349  ConstTensor inputToForgetWeightsTensor;
351  {
352  ConstTensor inputToForgetWeightsTensorCopy(managedInputToForgetWeights.GetTensorInfo(),
353  managedInputToForgetWeights.Map());
354  inputToForgetWeightsTensor = inputToForgetWeightsTensorCopy;
355  inputParams.m_InputToForgetWeights = &inputToForgetWeightsTensor;
356  }
357 
358  ConstTensor inputToCellWeightsTensor;
360  {
361  ConstTensor inputToCellWeightsTensorCopy(managedInputToCellWeights.GetTensorInfo(),
362  managedInputToCellWeights.Map());
363  inputToCellWeightsTensor = inputToCellWeightsTensorCopy;
364  inputParams.m_InputToCellWeights = &inputToCellWeightsTensor;
365  }
366 
367  ConstTensor inputToOutputWeightsTensor;
369  {
370  ConstTensor inputToOutputWeightsTensorCopy(managedInputToOutputWeights.GetTensorInfo(),
371  managedInputToOutputWeights.Map());
372  inputToOutputWeightsTensor = inputToOutputWeightsTensorCopy;
373  inputParams.m_InputToOutputWeights = &inputToOutputWeightsTensor;
374  }
375 
376  ConstTensor recurrentToInputWeightsTensor;
378  {
379  ConstTensor recurrentToInputWeightsTensorCopy(
380  managedRecurrentToInputWeights.GetTensorInfo(),
381  managedRecurrentToInputWeights.Map());
382  recurrentToInputWeightsTensor = recurrentToInputWeightsTensorCopy;
383  inputParams.m_RecurrentToInputWeights = &recurrentToInputWeightsTensor;
384  }
385 
386  ConstTensor recurrentToForgetWeightsTensor;
388  {
389  ConstTensor recurrentToForgetWeightsTensorCopy(
390  managedRecurrentToForgetWeights.GetTensorInfo(),
391  managedRecurrentToForgetWeights.Map());
392  recurrentToForgetWeightsTensor = recurrentToForgetWeightsTensorCopy;
393  inputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeightsTensor;
394  }
395 
396  ConstTensor recurrentToCellWeightsTensor;
398  {
399  ConstTensor recurrentToCellWeightsTensorCopy(
400  managedRecurrentToCellWeights.GetTensorInfo(),
401  managedRecurrentToCellWeights.Map());
402  recurrentToCellWeightsTensor = recurrentToCellWeightsTensorCopy;
403  inputParams.m_RecurrentToCellWeights = &recurrentToCellWeightsTensor;
404  }
405 
406  ConstTensor recurrentToOutputWeightsTensor;
408  {
409  ConstTensor recurrentToOutputWeightsTensorCopy(
410  managedRecurrentToOutputWeights.GetTensorInfo(),
411  managedRecurrentToOutputWeights.Map());
412  recurrentToOutputWeightsTensor = recurrentToOutputWeightsTensorCopy;
413  inputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeightsTensor;
414  }
415 
416  ConstTensor cellToInputWeightsTensor;
418  {
419  ConstTensor cellToInputWeightsTensorCopy(managedCellToInputWeights.GetTensorInfo(),
420  managedCellToInputWeights.Map());
421  cellToInputWeightsTensor = cellToInputWeightsTensorCopy;
422  inputParams.m_CellToInputWeights = &cellToInputWeightsTensor;
423  }
424 
425  ConstTensor cellToForgetWeightsTensor;
427  {
428  ConstTensor cellToForgetWeightsTensorCopy(managedCellToForgetWeights.GetTensorInfo(),
429  managedCellToForgetWeights.Map());
430  cellToForgetWeightsTensor = cellToForgetWeightsTensorCopy;
431  inputParams.m_CellToForgetWeights = &cellToForgetWeightsTensor;
432  }
433 
434  ConstTensor cellToOutputWeightsTensor;
436  {
437  ConstTensor cellToOutputWeightsTensorCopy(managedCellToOutputWeights.GetTensorInfo(),
438  managedCellToOutputWeights.Map());
439  cellToOutputWeightsTensor = cellToOutputWeightsTensorCopy;
440  inputParams.m_CellToOutputWeights = &cellToOutputWeightsTensor;
441  }
442 
443  ConstTensor inputGateBiasTensor;
444  if (m_CifgParameters.m_InputGateBias != nullptr)
445  {
446  ConstTensor inputGateBiasTensorCopy(managedInputGateBias.GetTensorInfo(),
447  managedInputGateBias.Map());
448  inputGateBiasTensor = inputGateBiasTensorCopy;
449  inputParams.m_InputGateBias = &inputGateBiasTensor;
450  }
451 
452  ConstTensor forgetGateBiasTensor;
453  if (m_BasicParameters.m_ForgetGateBias != nullptr)
454  {
455  ConstTensor forgetGateBiasTensorCopy(managedForgetGateBias.GetTensorInfo(),
456  managedForgetGateBias.Map());
457  forgetGateBiasTensor = forgetGateBiasTensorCopy;
458  inputParams.m_ForgetGateBias = &forgetGateBiasTensor;
459  }
460 
461  ConstTensor cellBiasTensor;
462  if (m_BasicParameters.m_CellBias != nullptr)
463  {
464  ConstTensor cellBiasTensorCopy(managedCellBias.GetTensorInfo(),
465  managedCellBias.Map());
466  cellBiasTensor = cellBiasTensorCopy;
467  inputParams.m_CellBias = &cellBiasTensor;
468  }
469 
470  ConstTensor outputGateBias;
471  if (m_BasicParameters.m_OutputGateBias != nullptr)
472  {
473  ConstTensor outputGateBiasCopy(managedOutputGateBias.GetTensorInfo(),
474  managedOutputGateBias.Map());
475  outputGateBias = outputGateBiasCopy;
476  inputParams.m_OutputGateBias = &outputGateBias;
477  }
478 
479  ConstTensor projectionWeightsTensor;
481  {
482  ConstTensor projectionWeightsTensorCopy(managedProjectionWeights.GetTensorInfo(),
483  managedProjectionWeights.Map());
484  projectionWeightsTensor = projectionWeightsTensorCopy;
485  inputParams.m_ProjectionWeights = &projectionWeightsTensor;
486  }
487 
488  ConstTensor projectionBiasTensor;
490  {
491  ConstTensor projectionBiasTensorCopy(managedProjectionBias.GetTensorInfo(),
492  managedProjectionBias.Map());
493  projectionBiasTensor = projectionBiasTensorCopy;
494  inputParams.m_ProjectionBias = &projectionBiasTensor;
495  }
496 
497  ConstTensor inputLayerNormTensor;
499  {
500  ConstTensor inputLayerNormTensorCopy(managedInputLayerNormWeights.GetTensorInfo(),
501  managedInputLayerNormWeights.Map());
502  inputLayerNormTensor = inputLayerNormTensorCopy;
503  inputParams.m_InputLayerNormWeights = &inputLayerNormTensor;
504  }
505 
506  ConstTensor forgetLayerNormTensor;
508  {
509  ConstTensor forgetLayerNormTensorCopy(managedForgetLayerNormWeights.GetTensorInfo(),
510  managedForgetLayerNormWeights.Map());
511  forgetLayerNormTensor = forgetLayerNormTensorCopy;
512  inputParams.m_ForgetLayerNormWeights = &forgetLayerNormTensor;
513  }
514 
515  ConstTensor cellLayerNormTensor;
517  {
518  ConstTensor cellLayerNormTensorCopy(managedCellLayerNormWeights.GetTensorInfo(),
519  managedCellLayerNormWeights.Map());
520  cellLayerNormTensor = cellLayerNormTensorCopy;
521  inputParams.m_CellLayerNormWeights = &cellLayerNormTensor;
522  }
523 
524  ConstTensor outputLayerNormTensor;
526  {
527  ConstTensor outputLayerNormTensorCopy(managedOutputLayerNormWeights.GetTensorInfo(),
528  managedOutputLayerNormWeights.Map());
529  outputLayerNormTensor = outputLayerNormTensorCopy;
530  inputParams.m_OutputLayerNormWeights = &outputLayerNormTensor;
531  }
532 
533 
534  visitor.VisitQLstmLayer(this, GetParameters(), inputParams, GetName());
535 }
537 
538 
540 {
541  std::vector<ConstTensor> constTensors;
551 
552  // Cifg parameters
556 
557  // Projection parameters
560 
561  // Peephole parameters
565 
566  // Layer normalisation parameters
571 
572  // First add mandatory/basic parameters
574  {
575  constTensors.emplace_back(ConstTensor(managedInputToForgetWeights.GetTensorInfo(),
576  managedInputToForgetWeights.Map()));
577  }
579  {
580  constTensors.emplace_back(ConstTensor(managedInputToCellWeights.GetTensorInfo(),
581  managedInputToCellWeights.Map()));
582  }
584  {
585  constTensors.emplace_back(ConstTensor(managedInputToOutputWeights.GetTensorInfo(),
586  managedInputToOutputWeights.Map()));
587  }
589  {
590  constTensors.emplace_back(ConstTensor(
591  managedRecurrentToForgetWeights.GetTensorInfo(),
592  managedRecurrentToForgetWeights.Map()));
593  }
595  {
596  constTensors.emplace_back(ConstTensor(
597  managedRecurrentToCellWeights.GetTensorInfo(),
598  managedRecurrentToCellWeights.Map()));
599  }
601  {
602  constTensors.emplace_back(ConstTensor(
603  managedRecurrentToOutputWeights.GetTensorInfo(),
604  managedRecurrentToOutputWeights.Map()));
605  }
606  if (m_BasicParameters.m_ForgetGateBias != nullptr)
607  {
608  constTensors.emplace_back(ConstTensor(managedForgetGateBias.GetTensorInfo(),
609  managedForgetGateBias.Map()));
610  }
611  if (m_BasicParameters.m_CellBias != nullptr)
612  {
613  constTensors.emplace_back(ConstTensor(managedCellBias.GetTensorInfo(),
614  managedCellBias.Map()));
615  }
616  if (m_BasicParameters.m_OutputGateBias != nullptr)
617  {
618  constTensors.emplace_back(ConstTensor(managedOutputGateBias.GetTensorInfo(),
619  managedOutputGateBias.Map()));
620  }
621 
622  // Add cifig parameters
624  {
625  constTensors.emplace_back(ConstTensor(managedInputToInputWeights.GetTensorInfo(),
626  managedInputToInputWeights.Map()));
627  }
629  {
630  constTensors.emplace_back(ConstTensor(
631  managedRecurrentToInputWeights.GetTensorInfo(),
632  managedRecurrentToInputWeights.Map()));
633  }
634  if (m_CifgParameters.m_InputGateBias != nullptr)
635  {
636  constTensors.emplace_back(ConstTensor(managedInputGateBias.GetTensorInfo(),
637  managedInputGateBias.Map()));
638  }
639 
640  // Add peephole parameters
642  {
643  constTensors.emplace_back(ConstTensor(managedCellToInputWeights.GetTensorInfo(),
644  managedCellToInputWeights.Map()));
645  }
647  {
648  constTensors.emplace_back(ConstTensor(managedCellToForgetWeights.GetTensorInfo(),
649  managedCellToForgetWeights.Map()));
650  }
652  {
653  constTensors.emplace_back(ConstTensor(managedCellToOutputWeights.GetTensorInfo(),
654  managedCellToOutputWeights.Map()));
655  }
656 
657  // Add projection parameters
659  {
660  constTensors.emplace_back(ConstTensor(managedProjectionWeights.GetTensorInfo(),
661  managedProjectionWeights.Map()));
662  }
664  {
665  constTensors.emplace_back(ConstTensor(managedProjectionBias.GetTensorInfo(),
666  managedProjectionBias.Map()));
667  }
668 
669  // Add norm parameters
671  {
672  constTensors.emplace_back(ConstTensor(managedInputLayerNormWeights.GetTensorInfo(),
673  managedInputLayerNormWeights.Map()));
674  }
676  {
677  constTensors.emplace_back(ConstTensor(managedForgetLayerNormWeights.GetTensorInfo(),
678  managedForgetLayerNormWeights.Map()));
679  }
681  {
682  constTensors.emplace_back(ConstTensor(managedCellLayerNormWeights.GetTensorInfo(),
683  managedCellLayerNormWeights.Map()));
684  }
686  {
687  constTensors.emplace_back(ConstTensor(managedOutputLayerNormWeights.GetTensorInfo(),
688  managedOutputLayerNormWeights.Map()));
689  }
690  strategy.ExecuteStrategy(this, GetParameters(), constTensors, GetName());
691 }
692 
693 } // namespace armnn
const ConstTensorHandle * m_CellLayerNormWeights
const ConstTensorHandle * m_ProjectionWeights
const ConstTensor * m_ProjectionWeights
Definition: LstmParams.hpp:55
const ConstTensorHandle * m_ForgetGateBias
const ConstTensor * m_CellBias
Definition: LstmParams.hpp:53
const ConstTensorHandle * m_InputToOutputWeights
QLstmDescriptor m_Param
The parameters for the layer (not including tensor-valued weights etc.).
const TensorShape & GetShape() const
Definition: Tensor.hpp:191
QLstmOptProjectionParameters m_ProjectionParameters
Definition: QLstmLayer.hpp:85
const ConstTensor * m_CellToOutputWeights
Definition: LstmParams.hpp:50
std::shared_ptr< ConstTensorHandle > m_OutputGateBias
A unique pointer to represent 1D bias tensor with dimensions [num_units] (int32). ...
Definition: QLstmLayer.hpp:35
#define ARMNN_NO_DEPRECATE_WARN_BEGIN
Definition: Deprecated.hpp:33
bool m_PeepholeEnabled
Enable/disable peephole.
virtual void ExecuteStrategy(const armnn::IConnectableLayer *layer, const armnn::BaseDescriptor &descriptor, const std::vector< armnn::ConstTensor > &constants, const char *name, const armnn::LayerBindingId id=0)=0
const ConstTensor * m_CellToInputWeights
Definition: LstmParams.hpp:48
const ConstTensorHandle * m_InputToInputWeights
std::shared_ptr< ConstTensorHandle > m_RecurrentToInputWeights
A unique pointer to represent 2D weights tensor with dimensions [input_size, num_units] (QSymmS8)...
Definition: QLstmLayer.hpp:61
const ConstTensorHandle * m_CellToOutputWeights
std::shared_ptr< ConstTensorHandle > m_ForgetLayerNormWeights
A unique pointer to represent 1D weights tensor with dimensions [num_units] (QSymmS16).
Definition: QLstmLayer.hpp:71
const ConstTensor * m_InputGateBias
Definition: LstmParams.hpp:51
const ConstTensorHandle * m_CellToInputWeights
std::shared_ptr< ConstTensorHandle > m_InputToOutputWeights
A unique pointer to represent 2D weights tensor with dimensions [num_units, inputSize] (QSymmS8)...
Definition: QLstmLayer.hpp:21
std::shared_ptr< ConstTensorHandle > m_InputLayerNormWeights
A unique pointer to represent 1D weights tensor with dimensions [num_units] (QSymmS16).
Definition: QLstmLayer.hpp:69
const ConstTensor * m_RecurrentToCellWeights
Definition: LstmParams.hpp:46
const ConstTensor * m_ForgetLayerNormWeights
Definition: LstmParams.hpp:58
void VerifyShapeInferenceType(const TensorShape &outputShape, ShapeInferenceMethod shapeInferenceMethod)
Definition: Layer.cpp:436
const ConstTensor * m_CellToForgetWeights
Definition: LstmParams.hpp:49
const TensorInfo & GetTensorInfo() const
Copyright (c) 2021 ARM Limited and Contributors.
const QLstmDescriptor & GetParameters() const override
const IOutputSlot * GetConnection() const override
Definition: Layer.hpp:204
std::shared_ptr< ConstTensorHandle > m_CellToOutputWeights
A unique pointer to represent 1D weights tensor with dimensions [num_units] (QSymmS16).
Definition: QLstmLayer.hpp:53
void ValidateAndCopyShape(const TensorShape &outputShape, const TensorShape &inferredShape, const ShapeInferenceMethod shapeInferenceMethod, const std::string &layerName, const unsigned int outputSlotIndex=0)
Definition: Layer.cpp:396
std::shared_ptr< ConstTensorHandle > m_InputToCellWeights
A unique pointer to represent 2D weights tensor with dimensions [num_units, inputSize] (QSymmS8)...
Definition: QLstmLayer.hpp:19
const ConstTensor * m_OutputGateBias
Definition: LstmParams.hpp:54
QLstmOptLayerNormParameters m_LayerNormParameters
Definition: QLstmLayer.hpp:87
QLstmLayer * Clone(Graph &graph) const override
Creates a dynamically-allocated copy of this layer.
Definition: QLstmLayer.cpp:81
const ConstTensorHandle * m_ForgetLayerNormWeights
std::shared_ptr< ConstTensorHandle > m_CellBias
A unique pointer to represent 1D bias tensor with dimensions [num_units] (int32). ...
Definition: QLstmLayer.hpp:33
std::shared_ptr< ConstTensorHandle > m_ProjectionWeights
A unique pointer to represent 2D weights tensor with dimensions [output_size, num_units] (QSymmS8)...
Definition: QLstmLayer.hpp:41
void VerifyLayerConnections(unsigned int expectedConnections, const CheckLocation &location) const
Definition: Layer.cpp:352
std::shared_ptr< ConstTensorHandle > m_InputToForgetWeights
A unique pointer to represent 2D weights tensor with dimensions [num_units, inputSize] (QSymmS8)...
Definition: QLstmLayer.hpp:17
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
Definition: Layer.hpp:321
std::shared_ptr< ConstTensorHandle > m_InputGateBias
A unique pointer to represent 1D weights tensor with dimensions [num_units] (int32).
Definition: QLstmLayer.hpp:63
const ConstTensor * m_InputLayerNormWeights
Definition: LstmParams.hpp:57
bool m_LayerNormEnabled
Enable/disable layer normalization.
std::vector< std::reference_wrapper< std::shared_ptr< ConstTensorHandle > >> ConstantTensors
Definition: INetwork.hpp:124
const ConstTensor * m_RecurrentToOutputWeights
Definition: LstmParams.hpp:47
#define ARMNN_NO_DEPRECATE_WARN_END
Definition: Deprecated.hpp:34
std::shared_ptr< ConstTensorHandle > m_CellLayerNormWeights
A unique pointer to represent 1D weights tensor with dimensions [num_units] (QSymmS16).
Definition: QLstmLayer.hpp:73
#define ARMNN_ASSERT_MSG(COND, MSG)
Definition: Assert.hpp:15
std::shared_ptr< ConstTensorHandle > m_ForgetGateBias
A unique pointer to represent 1D bias tensor with dimensions [num_units] (int32). ...
Definition: QLstmLayer.hpp:31
virtual std::unique_ptr< IWorkload > CreateWorkload(const IWorkloadFactory &factory) const override
Makes a workload for the QLstm type.
Definition: QLstmLayer.cpp:22
std::shared_ptr< ConstTensorHandle > m_CellToInputWeights
A unique pointer to represent 1D weights tensor with dimensions [num_units] (QSymmS16).
Definition: QLstmLayer.hpp:49
const ConstTensor * m_ProjectionBias
Definition: LstmParams.hpp:56
std::shared_ptr< ConstTensorHandle > m_CellToForgetWeights
A unique pointer to represent 1D weights tensor with dimensions [num_units] (QSymmS16).
Definition: QLstmLayer.hpp:51
const ConstTensorHandle * m_InputToForgetWeights
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:327
const ConstTensorHandle * m_CellBias
#define ARMNN_ASSERT(COND)
Definition: Assert.hpp:14
A QLstmDescriptor for the QLstmLayer.
const ConstTensorHandle * m_InputLayerNormWeights
void ValidateTensorShapesFromInputs() override
Check if the input tensor shape(s) will lead to a valid configuration of QLstmLayer.
Definition: QLstmLayer.cpp:170
QLstmLayer(const QLstmDescriptor &param, const char *name)
Constructor to create a QLstmLayer.
Definition: QLstmLayer.cpp:17
Layer::ConstantTensors GetConstantTensorsByRef() override
Retrieve the handles to the constant values stored by the layer.
Definition: QLstmLayer.cpp:272
#define CHECK_LOCATION()
Definition: Exceptions.hpp:209
std::shared_ptr< ConstTensorHandle > m_OutputLayerNormWeights
A unique pointer to represent 1D weights tensor with dimensions [num_units] (QSymmS16).
Definition: QLstmLayer.hpp:75
std::shared_ptr< ConstTensorHandle > m_RecurrentToCellWeights
A unique pointer to represent 2D weights tensor with dimensions [num_units, outputSize] (QSymmS8)...
Definition: QLstmLayer.hpp:26
QLstmBasicParameters m_BasicParameters
Definition: QLstmLayer.hpp:83
std::shared_ptr< ConstTensorHandle > m_RecurrentToOutputWeights
A unique pointer to represent 2D weights tensor with dimensions [num_units, outputSize] (QSymmS8)...
Definition: QLstmLayer.hpp:28
const ConstTensorHandle * m_InputToCellWeights
void SetAdditionalInfo(QueueDescriptor &descriptor) const
Definition: Layer.cpp:248
const ConstTensor * m_CellLayerNormWeights
Definition: LstmParams.hpp:59
const ConstTensor * m_ForgetGateBias
Definition: LstmParams.hpp:52
const ConstTensor * m_InputToCellWeights
Definition: LstmParams.hpp:42
std::shared_ptr< ConstTensorHandle > m_InputToInputWeights
A unique pointer to represent 2D weights tensor with dimensions [input_size, num_units] (QSymmS8)...
Definition: QLstmLayer.hpp:59
const ConstTensor * m_InputToOutputWeights
Definition: LstmParams.hpp:43
This layer represents a QLstm operation.
Definition: QLstmLayer.hpp:79
const ConstTensorHandle * m_CellToForgetWeights
ARMNN_NO_DEPRECATE_WARN_BEGIN void Accept(ILayerVisitor &visitor) const override
Definition: QLstmLayer.cpp:307
const ConstTensor * m_RecurrentToForgetWeights
Definition: LstmParams.hpp:45
const ConstTensorHandle * m_ProjectionBias
const ConstTensorHandle * m_RecurrentToCellWeights
bool m_ProjectionEnabled
Enable/disable the projection layer.
const ConstTensorHandle * m_InputGateBias
const ConstTensor * m_RecurrentToInputWeights
Definition: LstmParams.hpp:44
WorkloadInfo PrepInfoAndDesc(QueueDescriptor &descriptor) const
Helper function to reduce duplication in *LayerCreateWorkload.
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
Definition: Layer.hpp:323
virtual const TensorInfo & GetTensorInfo() const =0
QLstmOptCifgParameters m_CifgParameters
Definition: QLstmLayer.hpp:84
QLstmOptPeepholeParameters m_PeepholeParameters
Definition: QLstmLayer.hpp:86
const char * GetName() const override
Returns the name of the layer.
Definition: Layer.hpp:316
const ConstTensorHandle * m_OutputGateBias
std::shared_ptr< ConstTensorHandle > m_ProjectionBias
A unique pointer to represent 1D weights tensor with dimensions [output_size] (int32).
Definition: QLstmLayer.hpp:43
const ConstTensor * m_OutputLayerNormWeights
Definition: LstmParams.hpp:60
const void * Map(bool blocking=true)
RAII Managed resource Unmaps MemoryArea once out of scope.
std::vector< TensorShape > InferOutputShapes(const std::vector< TensorShape > &inputShapes) const override
By default returns inputShapes if the number of inputs are equal to number of outputs, otherwise infers the output shapes from given input shapes and layer properties.
Definition: QLstmLayer.cpp:153
std::shared_ptr< ConstTensorHandle > m_RecurrentToForgetWeights
A unique pointer to represent 2D weights tensor with dimensions [num_units, outputSize] (QSymmS8)...
Definition: QLstmLayer.hpp:24
const TensorInfo & GetTensorInfo(const ITensorHandle *tensorHandle)
float32 helpers
const ConstTensorHandle * m_OutputLayerNormWeights
virtual std::unique_ptr< IWorkload > CreateWorkload(LayerType type, const QueueDescriptor &descriptor, const WorkloadInfo &info) const
const TensorInfo & GetTensorInfo() const override
Definition: Layer.cpp:66
const ConstTensorHandle * m_RecurrentToOutputWeights
const ConstTensorHandle * m_RecurrentToInputWeights
bool m_CifgEnabled
Enable/disable CIFG (coupled input & forget gate).
ShapeInferenceMethod m_ShapeInferenceMethod
Definition: Layer.hpp:415
ARMNN_NO_DEPRECATE_WARN_END void ExecuteStrategy(IStrategy &strategy) const override
Apply a visitor to this layer.
Definition: QLstmLayer.cpp:539
const ConstTensorHandle * m_RecurrentToForgetWeights
const ConstTensor * m_InputToForgetWeights
Definition: LstmParams.hpp:41
LayerType
When adding a new layer, adapt also the LastLayer enum value in the enum class LayerType below...
Definition: Types.hpp:458
const ConstTensor * m_InputToInputWeights
Definition: LstmParams.hpp:40