ArmNN
 21.11
LstmLayer.cpp
Go to the documentation of this file.
1 //
2 // Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 #include "LstmLayer.hpp"
6 
7 #include "LayerCloneBase.hpp"
8 
9 #include <armnn/LstmParams.hpp>
10 #include <armnn/TypesUtils.hpp>
13 
14 namespace armnn
15 {
16 
17 LstmLayer::LstmLayer(const LstmDescriptor& param, const char* name)
18  : LayerWithParameters(3, 4, LayerType::Lstm, param, name)
19 {
20 }
21 
22 std::unique_ptr<IWorkload> LstmLayer::CreateWorkload(const IWorkloadFactory& factory) const
23 {
24  LstmQueueDescriptor descriptor;
25 
26  // Basic parameters
34  descriptor.m_CellBias = m_BasicParameters.m_CellBias.get();
36 
37  // Cifg parameters
39  {
43  }
44 
45  // Projection parameters
47  {
50  }
51 
52  // Peephole parameters
54  {
56  {
58  }
61  }
62 
63  // Layer normalisation parameters
65  {
67  {
69  }
73  }
74 
75  SetAdditionalInfo(descriptor);
76 
77  return factory.CreateLstm(descriptor, PrepInfoAndDesc(descriptor));
78 }
79 
81 {
82  auto layer = CloneBase<LstmLayer>(graph, m_Param, GetName());
83 
86  : nullptr;
87  layer->m_BasicParameters.m_InputToCellWeights = m_BasicParameters.m_InputToCellWeights ?
89  layer->m_BasicParameters.m_InputToOutputWeights = m_BasicParameters.m_InputToOutputWeights ?
91  layer->m_BasicParameters.m_RecurrentToForgetWeights = m_BasicParameters.m_RecurrentToForgetWeights ?
93  layer->m_BasicParameters.m_RecurrentToCellWeights = m_BasicParameters.m_RecurrentToCellWeights ?
95  layer->m_BasicParameters.m_RecurrentToOutputWeights = m_BasicParameters.m_RecurrentToOutputWeights ?
97  layer->m_BasicParameters.m_ForgetGateBias = m_BasicParameters.m_ForgetGateBias ?
99  layer->m_BasicParameters.m_CellBias = m_BasicParameters.m_CellBias ?
100  m_BasicParameters.m_CellBias : nullptr;
101  layer->m_BasicParameters.m_OutputGateBias = m_BasicParameters.m_OutputGateBias ?
103 
104  if (!m_Param.m_CifgEnabled)
105  {
106  layer->m_CifgParameters.m_InputToInputWeights = m_CifgParameters.m_InputToInputWeights ?
108  layer->m_CifgParameters.m_RecurrentToInputWeights = m_CifgParameters.m_RecurrentToInputWeights ?
110  layer->m_CifgParameters.m_InputGateBias = m_CifgParameters.m_InputGateBias ?
112  }
113 
114  if (m_Param.m_ProjectionEnabled)
115  {
116  layer->m_ProjectionParameters.m_ProjectionWeights = m_ProjectionParameters.m_ProjectionWeights ?
118  layer->m_ProjectionParameters.m_ProjectionBias = m_ProjectionParameters.m_ProjectionBias ?
120  }
121 
122  if (m_Param.m_PeepholeEnabled)
123  {
124  if (!m_Param.m_CifgEnabled)
125  {
126  layer->m_PeepholeParameters.m_CellToInputWeights = m_PeepholeParameters.m_CellToInputWeights ?
128  }
129  layer->m_PeepholeParameters.m_CellToForgetWeights = m_PeepholeParameters.m_CellToForgetWeights ?
131  layer->m_PeepholeParameters.m_CellToOutputWeights = m_PeepholeParameters.m_CellToOutputWeights ?
133  }
134 
135  if (m_Param.m_LayerNormEnabled)
136  {
137  layer->m_LayerNormParameters.m_InputLayerNormWeights = m_LayerNormParameters.m_InputLayerNormWeights ?
139  layer->m_LayerNormParameters.m_ForgetLayerNormWeights = m_LayerNormParameters.m_ForgetLayerNormWeights ?
141  layer->m_LayerNormParameters.m_CellLayerNormWeights = m_LayerNormParameters.m_CellLayerNormWeights ?
143  layer->m_LayerNormParameters.m_OutputLayerNormWeights = m_LayerNormParameters.m_OutputLayerNormWeights ?
145  }
146 
147  return std::move(layer);
148 }
149 
150 std::vector<TensorShape> LstmLayer::InferOutputShapes(const std::vector<TensorShape>& inputShapes) const
151 {
152  ARMNN_ASSERT(inputShapes.size() == 3);
153 
154  // Get input values for validation
155  unsigned int batchSize = inputShapes[0][0];
156  unsigned int outputSize = inputShapes[1][1];
157  unsigned int numUnits = inputShapes[2][1];
158 
159  std::vector<TensorShape> outShapes;
160  outShapes.push_back(TensorShape({batchSize, numUnits * (m_Param.m_CifgEnabled ? 3 : 4)}));
161  outShapes.push_back(TensorShape({batchSize, outputSize}));
162  outShapes.push_back(TensorShape({batchSize, numUnits}));
163  outShapes.push_back(TensorShape({batchSize, outputSize}));
164 
165  return outShapes;
166 }
167 
169 {
171 
172  const TensorShape& outputShape = GetOutputSlot(0).GetTensorInfo().GetShape();
173 
175 
176  auto inferredShapes = InferOutputShapes( {
180  });
181 
182  ARMNN_ASSERT(inferredShapes.size() == 4);
183 
184  // Check if the weights are nullptr
186  "LstmLayer: m_BasicParameters.m_InputToForgetWeights should not be null.");
188  "LstmLayer: m_BasicParameters.m_InputToCellWeights should not be null.");
190  "LstmLayer: m_BasicParameters.m_InputToOutputWeights should not be null.");
192  "LstmLayer: m_BasicParameters.m_RecurrentToForgetWeights should not be null.");
194  "LstmLayer: m_BasicParameters.m_RecurrentToCellWeights should not be null.");
196  "LstmLayer: m_BasicParameters.m_RecurrentToOutputWeights should not be null.");
198  "LstmLayer: m_BasicParameters.m_ForgetGateBias should not be null.");
200  "LstmLayer: m_BasicParameters.m_CellBias should not be null.");
202  "LstmLayer: m_BasicParameters.m_OutputGateBias should not be null.");
203 
204  if (!m_Param.m_CifgEnabled)
205  {
207  "LstmLayer: m_CifgParameters.m_InputToInputWeights should not be null.");
209  "LstmLayer: m_CifgParameters.m_RecurrentToInputWeights should not be null.");
211  "LstmLayer: m_CifgParameters.m_InputGateBias should not be null.");
212 
213  ValidateAndCopyShape(outputShape, inferredShapes[0], m_ShapeInferenceMethod, "LstmLayer");
214  }
215  else
216  {
218  "LstmLayer: m_CifgParameters.m_InputToInputWeights should not have a value when CIFG is enabled.");
220  "LstmLayer: m_CifgParameters.m_RecurrentToInputWeights should not have a value when CIFG is enabled.");
222  "LstmLayer: m_CifgParameters.m_InputGateBias should not have a value when CIFG is enabled.");
223 
224  ValidateAndCopyShape(outputShape, inferredShapes[0], m_ShapeInferenceMethod, "LstmLayer");
225  }
226 
228  {
230  "LstmLayer: m_ProjectionParameters.m_ProjectionWeights should not be null.");
231  }
232 
234  {
235  if (!m_Param.m_CifgEnabled)
236  {
238  "LstmLayer: m_PeepholeParameters.m_CellToInputWeights should not be null "
239  "when Peephole is enabled and CIFG is disabled.");
240  }
242  "LstmLayer: m_PeepholeParameters.m_CellToForgetWeights should not be null.");
244  "LstmLayer: m_PeepholeParameters.m_CellToOutputWeights should not be null.");
245  }
246 
248  GetOutputSlot(1).GetTensorInfo().GetShape(), inferredShapes[1], m_ShapeInferenceMethod, "LstmLayer", 1);
250  GetOutputSlot(2).GetTensorInfo().GetShape(), inferredShapes[2], m_ShapeInferenceMethod, "LstmLayer", 2);
252  GetOutputSlot(3).GetTensorInfo().GetShape(), inferredShapes[3], m_ShapeInferenceMethod, "LstmLayer", 3);
253 
255  {
257  {
259  "LstmLayer: m_LayerNormParameters.m_inputLayerNormWeights should not be null.");
260  }
262  "LstmLayer: m_LayerNormParameters.m_forgetLayerNormWeights should not be null.");
264  "LstmLayer: m_LayerNormParameters.m_cellLayerNormWeights should not be null.");
266  "LstmLayer: m_LayerNormParameters.m_outputLayerNormWeights should not be null.");
267  }
268 }
269 
271 {
281 
282  // Cifg parameters
286 
287  // Projection parameters
290 
291  // Peephole parameters
295 
296  // Layer normalisation parameters
301 }
302 
304 void LstmLayer::Accept(ILayerVisitor& visitor) const
305 {
306  LstmInputParams inputParams;
316 
317  // Cifg parameters
321 
322  // Projection parameters
325 
326  // Peephole parameters
330 
331  // Layer normalisation parameters
336 
337  ConstTensor inputToInputWeightsTensor;
339  {
340  ConstTensor inputToInputWeightsTensorCopy(managedInputToInputWeights.GetTensorInfo(),
341  managedInputToInputWeights.Map());
342  inputToInputWeightsTensor = inputToInputWeightsTensorCopy;
343  inputParams.m_InputToInputWeights = &inputToInputWeightsTensor;
344  }
345  ConstTensor inputToForgetWeightsTensor;
347  {
348  ConstTensor inputToForgetWeightsTensorCopy(managedInputToForgetWeights.GetTensorInfo(),
349  managedInputToForgetWeights.Map());
350  inputToForgetWeightsTensor = inputToForgetWeightsTensorCopy;
351  inputParams.m_InputToForgetWeights = &inputToForgetWeightsTensor;
352  }
353  ConstTensor inputToCellWeightsTensor;
355  {
356  ConstTensor inputToCellWeightsTensorCopy(managedInputToCellWeights.GetTensorInfo(),
357  managedInputToCellWeights.Map());
358  inputToCellWeightsTensor = inputToCellWeightsTensorCopy;
359  inputParams.m_InputToCellWeights = &inputToCellWeightsTensor;
360  }
361  ConstTensor inputToOutputWeightsTensor;
363  {
364  ConstTensor inputToOutputWeightsTensorCopy(managedInputToOutputWeights.GetTensorInfo(),
365  managedInputToOutputWeights.Map());
366  inputToOutputWeightsTensor = inputToOutputWeightsTensorCopy;
367  inputParams.m_InputToOutputWeights = &inputToOutputWeightsTensor;
368  }
369  ConstTensor recurrentToInputWeightsTensor;
371  {
372  ConstTensor recurrentToInputWeightsTensorCopy(
373  managedRecurrentToInputWeights.GetTensorInfo(),
374  managedRecurrentToInputWeights.Map());
375  recurrentToInputWeightsTensor = recurrentToInputWeightsTensorCopy;
376  inputParams.m_RecurrentToInputWeights = &recurrentToInputWeightsTensor;
377  }
378  ConstTensor recurrentToForgetWeightsTensor;
380  {
381  ConstTensor recurrentToForgetWeightsTensorCopy(
382  managedRecurrentToForgetWeights.GetTensorInfo(),
383  managedRecurrentToForgetWeights.Map());
384  recurrentToForgetWeightsTensor = recurrentToForgetWeightsTensorCopy;
385  inputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeightsTensor;
386  }
387  ConstTensor recurrentToCellWeightsTensor;
389  {
390  ConstTensor recurrentToCellWeightsTensorCopy(
391  managedRecurrentToCellWeights.GetTensorInfo(),
392  managedRecurrentToCellWeights.Map());
393  recurrentToCellWeightsTensor = recurrentToCellWeightsTensorCopy;
394  inputParams.m_RecurrentToCellWeights = &recurrentToCellWeightsTensor;
395  }
396  ConstTensor recurrentToOutputWeightsTensor;
398  {
399  ConstTensor recurrentToOutputWeightsTensorCopy(
400  managedRecurrentToOutputWeights.GetTensorInfo(),
401  managedRecurrentToOutputWeights.Map());
402  recurrentToOutputWeightsTensor = recurrentToOutputWeightsTensorCopy;
403  inputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeightsTensor;
404  }
405  ConstTensor cellToInputWeightsTensor;
407  {
408  ConstTensor cellToInputWeightsTensorCopy(managedCellToInputWeights.GetTensorInfo(),
409  managedCellToInputWeights.Map());
410  cellToInputWeightsTensor = cellToInputWeightsTensorCopy;
411  inputParams.m_CellToInputWeights = &cellToInputWeightsTensor;
412  }
413  ConstTensor cellToForgetWeightsTensor;
415  {
416  ConstTensor cellToForgetWeightsTensorCopy(managedCellToForgetWeights.GetTensorInfo(),
417  managedCellToForgetWeights.Map());
418  cellToForgetWeightsTensor = cellToForgetWeightsTensorCopy;
419  inputParams.m_CellToForgetWeights = &cellToForgetWeightsTensor;
420  }
421  ConstTensor cellToOutputWeightsTensor;
423  {
424  ConstTensor cellToOutputWeightsTensorCopy(managedCellToOutputWeights.GetTensorInfo(),
425  managedCellToOutputWeights.Map());
426  cellToOutputWeightsTensor = cellToOutputWeightsTensorCopy;
427  inputParams.m_CellToOutputWeights = &cellToOutputWeightsTensor;
428  }
429  ConstTensor inputGateBiasTensor;
430  if (m_CifgParameters.m_InputGateBias != nullptr)
431  {
432  ConstTensor inputGateBiasTensorCopy(managedInputGateBias.GetTensorInfo(),
433  managedInputGateBias.Map());
434  inputGateBiasTensor = inputGateBiasTensorCopy;
435  inputParams.m_InputGateBias = &inputGateBiasTensor;
436  }
437  ConstTensor forgetGateBiasTensor;
438  if (m_BasicParameters.m_ForgetGateBias != nullptr)
439  {
440  ConstTensor forgetGateBiasTensorCopy(managedForgetGateBias.GetTensorInfo(),
441  managedForgetGateBias.Map());
442  forgetGateBiasTensor = forgetGateBiasTensorCopy;
443  inputParams.m_ForgetGateBias = &forgetGateBiasTensor;
444  }
445  ConstTensor cellBiasTensor;
446  if (m_BasicParameters.m_CellBias != nullptr)
447  {
448  ConstTensor cellBiasTensorCopy(managedCellBias.GetTensorInfo(),
449  managedCellBias.Map());
450  cellBiasTensor = cellBiasTensorCopy;
451  inputParams.m_CellBias = &cellBiasTensor;
452  }
453  ConstTensor outputGateBias;
454  if (m_BasicParameters.m_OutputGateBias != nullptr)
455  {
456  ConstTensor outputGateBiasCopy(managedOutputGateBias.GetTensorInfo(),
457  managedOutputGateBias.Map());
458  outputGateBias = outputGateBiasCopy;
459  inputParams.m_OutputGateBias = &outputGateBias;
460  }
461  ConstTensor projectionWeightsTensor;
463  {
464  ConstTensor projectionWeightsTensorCopy(managedProjectionWeights.GetTensorInfo(),
465  managedProjectionWeights.Map());
466  projectionWeightsTensor = projectionWeightsTensorCopy;
467  inputParams.m_ProjectionWeights = &projectionWeightsTensor;
468  }
469  ConstTensor projectionBiasTensor;
471  {
472  ConstTensor projectionBiasTensorCopy(managedProjectionBias.GetTensorInfo(),
473  managedProjectionBias.Map());
474  projectionBiasTensor = projectionBiasTensorCopy;
475  inputParams.m_ProjectionBias = &projectionBiasTensor;
476  }
477  ConstTensor inputLayerNormTensor;
479  {
480  ConstTensor inputLayerNormTensorCopy(managedInputLayerNormWeights.GetTensorInfo(),
481  managedInputLayerNormWeights.Map());
482  inputLayerNormTensor = inputLayerNormTensorCopy;
483  inputParams.m_InputLayerNormWeights = &inputLayerNormTensor;
484  }
485  ConstTensor forgetLayerNormTensor;
487  {
488  ConstTensor forgetLayerNormTensorCopy(managedForgetLayerNormWeights.GetTensorInfo(),
489  managedForgetLayerNormWeights.Map());
490  forgetLayerNormTensor = forgetLayerNormTensorCopy;
491  inputParams.m_ForgetLayerNormWeights = &forgetLayerNormTensor;
492  }
493  ConstTensor cellLayerNormTensor;
495  {
496  ConstTensor cellLayerNormTensorCopy(managedCellLayerNormWeights.GetTensorInfo(),
497  managedCellLayerNormWeights.Map());
498  cellLayerNormTensor = cellLayerNormTensorCopy;
499  inputParams.m_CellLayerNormWeights = &cellLayerNormTensor;
500  }
501  ConstTensor outputLayerNormTensor;
503  {
504  ConstTensor outputLayerNormTensorCopy(managedOutputLayerNormWeights.GetTensorInfo(),
505  managedOutputLayerNormWeights.Map());
506  outputLayerNormTensor = outputLayerNormTensorCopy;
507  inputParams.m_OutputLayerNormWeights = &outputLayerNormTensor;
508  }
509 
510 
511  visitor.VisitLstmLayer(this, GetParameters(), inputParams, GetName());
512 }
514 
516 {
517  std::vector<ConstTensor> constTensors;
518 
519  LstmDescriptor descriptor = GetParameters();
520 
530 
531  // Cifg parameters
535 
536  // Projection parameters
539 
540  // Peephole parameters
544 
545  // Layer normalisation parameters
550 
551  // First add mandatory/basic parameters
553  {
554  constTensors.emplace_back(ConstTensor(managedInputToForgetWeights.GetTensorInfo(),
555  managedInputToForgetWeights.Map()));
556  }
558  {
559  constTensors.emplace_back(ConstTensor(managedInputToCellWeights.GetTensorInfo(),
560  managedInputToCellWeights.Map()));
561  }
563  {
564  constTensors.emplace_back(ConstTensor(managedInputToOutputWeights.GetTensorInfo(),
565  managedInputToOutputWeights.Map()));
566  }
568  {
569  constTensors.emplace_back(ConstTensor(
570  managedRecurrentToForgetWeights.GetTensorInfo(),
571  managedRecurrentToForgetWeights.Map()));
572  }
574  {
575  constTensors.emplace_back(ConstTensor(
576  managedRecurrentToCellWeights.GetTensorInfo(),
577  managedRecurrentToCellWeights.Map()));
578  }
580  {
581  constTensors.emplace_back(ConstTensor(
582  managedRecurrentToOutputWeights.GetTensorInfo(),
583  managedRecurrentToOutputWeights.Map()));
584  }
585  if (m_BasicParameters.m_ForgetGateBias != nullptr)
586  {
587  constTensors.emplace_back(ConstTensor(managedForgetGateBias.GetTensorInfo(),
588  managedForgetGateBias.Map()));
589  }
590  if (m_BasicParameters.m_CellBias != nullptr)
591  {
592  constTensors.emplace_back(ConstTensor(managedCellBias.GetTensorInfo(),
593  managedCellBias.Map()));
594  }
595  if (m_BasicParameters.m_OutputGateBias != nullptr)
596  {
597  constTensors.emplace_back(ConstTensor(managedOutputGateBias.GetTensorInfo(),
598  managedOutputGateBias.Map()));
599  }
600 
601  // Add cifg parameters
602  if (!descriptor.m_CifgEnabled)
603  {
605  {
606  constTensors.emplace_back(ConstTensor(managedInputToInputWeights.GetTensorInfo(),
607  managedInputToInputWeights.Map()));
608  }
610  {
611  constTensors.emplace_back(ConstTensor(
612  managedRecurrentToInputWeights.GetTensorInfo(),
613  managedRecurrentToInputWeights.Map()));
614  }
615  if (m_CifgParameters.m_InputGateBias != nullptr)
616  {
617  constTensors.emplace_back(ConstTensor(managedInputGateBias.GetTensorInfo(),
618  managedInputGateBias.Map()));
619  }
620  }
621 
622  // Add peephole parameters
623  if (descriptor.m_PeepholeEnabled)
624  {
625  if (!descriptor.m_CifgEnabled)
626  {
628  {
629  constTensors.emplace_back(ConstTensor(managedCellToInputWeights.GetTensorInfo(),
630  managedCellToInputWeights.Map()));
631  }
632  }
634  {
635  constTensors.emplace_back(ConstTensor(managedCellToForgetWeights.GetTensorInfo(),
636  managedCellToForgetWeights.Map()));
637  }
639  {
640  constTensors.emplace_back(ConstTensor(managedCellToOutputWeights.GetTensorInfo(),
641  managedCellToOutputWeights.Map()));
642  }
643  }
644 
645  // Add projection parameters
646  if (descriptor.m_ProjectionEnabled)
647  {
649  {
650  constTensors.emplace_back(ConstTensor(managedProjectionWeights.GetTensorInfo(),
651  managedProjectionWeights.Map()));
652  }
654  {
655  constTensors.emplace_back(ConstTensor(managedProjectionBias.GetTensorInfo(),
656  managedProjectionBias.Map()));
657  }
658  }
659 
660  // Add norm parameters
661  if (descriptor.m_LayerNormEnabled)
662  {
663  if (!descriptor.m_CifgEnabled)
664  {
666  {
667  constTensors.emplace_back(ConstTensor(managedInputLayerNormWeights.GetTensorInfo(),
668  managedInputLayerNormWeights.Map()));
669  }
670  }
672  {
673  constTensors.emplace_back(ConstTensor(managedForgetLayerNormWeights.GetTensorInfo(),
674  managedForgetLayerNormWeights.Map()));
675  }
677  {
678  constTensors.emplace_back(ConstTensor(managedCellLayerNormWeights.GetTensorInfo(),
679  managedCellLayerNormWeights.Map()));
680  }
682  {
683  constTensors.emplace_back(ConstTensor(managedOutputLayerNormWeights.GetTensorInfo(),
684  managedOutputLayerNormWeights.Map()));
685  }
686  }
687 
688  strategy.ExecuteStrategy(this, GetParameters(), constTensors, GetName());
689 }
690 
691 } // namespace armnn
std::shared_ptr< ConstTensorHandle > m_ForgetGateBias
A unique pointer to represent 1D weights tensor with dimensions [num_units].
std::shared_ptr< ConstTensorHandle > m_OutputGateBias
A unique pointer to represent 1D weights tensor with dimensions [num_units].
bool m_ProjectionEnabled
Enable/disable the projection layer.
LstmBasicParameters m_BasicParameters
Definition: LstmLayer.hpp:20
const ConstTensor * m_ProjectionWeights
Definition: LstmParams.hpp:55
const ConstTensorHandle * m_ProjectionWeights
const ConstTensorHandle * m_RecurrentToForgetWeights
std::shared_ptr< ConstTensorHandle > m_OutputLayerNormWeights
A unique pointer to represent 1D weights tensor with dimensions [num_units].
const ConstTensor * m_CellBias
Definition: LstmParams.hpp:53
std::shared_ptr< ConstTensorHandle > m_CellToForgetWeights
A unique pointer to represent 1D weights tensor with dimensions [num_units].
LstmDescriptor m_Param
The parameters for the layer (not including tensor-valued weights etc.).
const TensorShape & GetShape() const
Definition: Tensor.hpp:191
ARMNN_NO_DEPRECATE_WARN_END void ExecuteStrategy(IStrategy &strategy) const override
Apply a visitor to this layer.
Definition: LstmLayer.cpp:515
const ConstTensorHandle * m_InputGateBias
const ConstTensor * m_CellToOutputWeights
Definition: LstmParams.hpp:50
#define ARMNN_NO_DEPRECATE_WARN_BEGIN
Definition: Deprecated.hpp:33
const ConstTensorHandle * m_RecurrentToCellWeights
const ConstTensorHandle * m_CellBias
virtual void ExecuteStrategy(const armnn::IConnectableLayer *layer, const armnn::BaseDescriptor &descriptor, const std::vector< armnn::ConstTensor > &constants, const char *name, const armnn::LayerBindingId id=0)=0
virtual std::unique_ptr< IWorkload > CreateLstm(const LstmQueueDescriptor &descriptor, const WorkloadInfo &info) const
const ConstTensor * m_CellToInputWeights
Definition: LstmParams.hpp:48
std::shared_ptr< ConstTensorHandle > m_InputLayerNormWeights
A unique pointer to represent 1D weights tensor with dimensions [num_units].
const ConstTensor * m_InputGateBias
Definition: LstmParams.hpp:51
const ConstTensor * m_RecurrentToCellWeights
Definition: LstmParams.hpp:46
virtual std::unique_ptr< IWorkload > CreateWorkload(const IWorkloadFactory &factory) const override
Makes a workload for the LSTM type.
Definition: LstmLayer.cpp:22
const ConstTensorHandle * m_InputToOutputWeights
std::shared_ptr< ConstTensorHandle > m_ProjectionWeights
A unique pointer to represent 2D weights tensor with dimensions [output_size, num_units].
const ConstTensorHandle * m_OutputLayerNormWeights
const ConstTensor * m_ForgetLayerNormWeights
Definition: LstmParams.hpp:58
void VerifyShapeInferenceType(const TensorShape &outputShape, ShapeInferenceMethod shapeInferenceMethod)
Definition: Layer.cpp:433
const ConstTensor * m_CellToForgetWeights
Definition: LstmParams.hpp:49
const TensorInfo & GetTensorInfo() const
Copyright (c) 2021 ARM Limited and Contributors.
std::shared_ptr< ConstTensorHandle > m_InputToCellWeights
A unique pointer to represent 2D weights tensor with dimensions [input_size, num_units].
This layer represents a LSTM operation.
Definition: LstmLayer.hpp:16
const IOutputSlot * GetConnection() const override
Definition: Layer.hpp:199
void ValidateAndCopyShape(const TensorShape &outputShape, const TensorShape &inferredShape, const ShapeInferenceMethod shapeInferenceMethod, const std::string &layerName, const unsigned int outputSlotIndex=0)
Definition: Layer.cpp:393
const ConstTensor * m_OutputGateBias
Definition: LstmParams.hpp:54
std::shared_ptr< ConstTensorHandle > m_InputGateBias
A unique pointer to represent 1D weights tensor with dimensions [num_units].
const ConstTensorHandle * m_OutputGateBias
std::shared_ptr< ConstTensorHandle > m_CellToOutputWeights
A unique pointer to represent 1D weights tensor with dimensions [num_units].
void VerifyLayerConnections(unsigned int expectedConnections, const CheckLocation &location) const
Definition: Layer.cpp:349
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
Definition: Layer.hpp:316
const ConstTensor * m_InputLayerNormWeights
Definition: LstmParams.hpp:57
std::shared_ptr< ConstTensorHandle > m_RecurrentToCellWeights
A unique pointer to represent 2D weights tensor with dimensions [output_size, num_units].
void ValidateTensorShapesFromInputs() override
Check if the input tensor shape(s) will lead to a valid configuration of LstmLayer.
Definition: LstmLayer.cpp:168
std::shared_ptr< ConstTensorHandle > m_CellBias
A unique pointer to represent 1D weights tensor with dimensions [num_units].
const ConstTensorHandle * m_CellLayerNormWeights
std::vector< std::reference_wrapper< std::shared_ptr< ConstTensorHandle > >> ConstantTensors
Definition: Layer.hpp:393
const ConstTensor * m_RecurrentToOutputWeights
Definition: LstmParams.hpp:47
LstmLayer * Clone(Graph &graph) const override
Creates a dynamically-allocated copy of this layer.
Definition: LstmLayer.cpp:80
An LstmDescriptor for the LstmLayer.
#define ARMNN_NO_DEPRECATE_WARN_END
Definition: Deprecated.hpp:34
#define ARMNN_ASSERT_MSG(COND, MSG)
Definition: Assert.hpp:15
const ConstTensor * m_ProjectionBias
Definition: LstmParams.hpp:56
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
Definition: Tensor.hpp:327
std::shared_ptr< ConstTensorHandle > m_RecurrentToInputWeights
A unique pointer to represent 2D weights tensor with dimensions [input_size, num_units].
bool m_PeepholeEnabled
Enable/disable peephole.
const ConstTensorHandle * m_CellToOutputWeights
std::shared_ptr< ConstTensorHandle > m_CellLayerNormWeights
A unique pointer to represent 1D weights tensor with dimensions [num_units].
#define ARMNN_ASSERT(COND)
Definition: Assert.hpp:14
LstmOptLayerNormParameters m_LayerNormParameters
Definition: LstmLayer.hpp:24
std::shared_ptr< ConstTensorHandle > m_RecurrentToOutputWeights
A unique pointer to represent 2D weights tensor with dimensions [output_size, num_units].
const ConstTensorHandle * m_InputToCellWeights
std::shared_ptr< ConstTensorHandle > m_InputToInputWeights
A unique pointer to represent 2D weights tensor with dimensions [input_size, num_units].
const ConstTensorHandle * m_InputToForgetWeights
ARMNN_NO_DEPRECATE_WARN_BEGIN void Accept(ILayerVisitor &visitor) const override
Definition: LstmLayer.cpp:304
#define CHECK_LOCATION()
Definition: Exceptions.hpp:209
std::shared_ptr< ConstTensorHandle > m_ProjectionBias
A unique pointer to represent 1D weights tensor with dimensions [output_size].
const ConstTensorHandle * m_RecurrentToInputWeights
Layer::ConstantTensors GetConstantTensorsByRef() override
Retrieve the handles to the constant values stored by the layer.
Definition: LstmLayer.cpp:270
LstmOptPeepholeParameters m_PeepholeParameters
Definition: LstmLayer.hpp:23
void SetAdditionalInfo(QueueDescriptor &descriptor) const
Definition: Layer.cpp:245
const ConstTensor * m_CellLayerNormWeights
Definition: LstmParams.hpp:59
const ConstTensor * m_ForgetGateBias
Definition: LstmParams.hpp:52
const ConstTensor * m_InputToCellWeights
Definition: LstmParams.hpp:42
const ConstTensorHandle * m_ForgetGateBias
const ConstTensor * m_InputToOutputWeights
Definition: LstmParams.hpp:43
LstmOptProjectionParameters m_ProjectionParameters
Definition: LstmLayer.hpp:22
bool m_CifgEnabled
Enable/disable cifg (coupled input & forget gate).
std::shared_ptr< ConstTensorHandle > m_InputToForgetWeights
A unique pointer to represent 2D weights tensor with dimensions [input_size, num_units].
const ConstTensor * m_RecurrentToForgetWeights
Definition: LstmParams.hpp:45
std::shared_ptr< ConstTensorHandle > m_RecurrentToForgetWeights
A unique pointer to represent 2D weights tensor with dimensions [output_size, num_units].
const ConstTensorHandle * m_CellToForgetWeights
const ConstTensorHandle * m_ProjectionBias
const ConstTensor * m_RecurrentToInputWeights
Definition: LstmParams.hpp:44
WorkloadInfo PrepInfoAndDesc(QueueDescriptor &descriptor) const
Helper function to reduce duplication in *LayerCreateWorkload.
LstmLayer(const LstmDescriptor &param, const char *name)
Constructor to create a LstmLayer.
Definition: LstmLayer.cpp:17
bool m_LayerNormEnabled
Enable/disable layer normalization.
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
Definition: Layer.hpp:318
const ConstTensorHandle * m_ForgetLayerNormWeights
virtual const TensorInfo & GetTensorInfo() const =0
LstmOptCifgParameters m_CifgParameters
Definition: LstmLayer.hpp:21
const char * GetName() const override
Returns the name of the layer.
Definition: Layer.hpp:311
const ConstTensorHandle * m_InputLayerNormWeights
std::shared_ptr< ConstTensorHandle > m_ForgetLayerNormWeights
A unique pointer to represent 1D weights tensor with dimensions [num_units].
std::vector< TensorShape > InferOutputShapes(const std::vector< TensorShape > &inputShapes) const override
By default returns inputShapes if the number of inputs are equal to number of outputs, otherwise infers the output shapes from given input shapes and layer properties.
Definition: LstmLayer.cpp:150
std::shared_ptr< ConstTensorHandle > m_CellToInputWeights
A unique pointer to represent 1D weights tensor with dimensions [num_units].
const ConstTensor * m_OutputLayerNormWeights
Definition: LstmParams.hpp:60
const void * Map(bool blocking=true)
RAII Managed resource Unmaps MemoryArea once out of scope.
const TensorInfo & GetTensorInfo(const ITensorHandle *tensorHandle)
float32 helpers
const ConstTensorHandle * m_CellToInputWeights
const TensorInfo & GetTensorInfo() const override
Definition: Layer.cpp:63
const ConstTensorHandle * m_InputToInputWeights
ShapeInferenceMethod m_ShapeInferenceMethod
Definition: Layer.hpp:408
const ConstTensor * m_InputToForgetWeights
Definition: LstmParams.hpp:41
std::shared_ptr< ConstTensorHandle > m_InputToOutputWeights
A unique pointer to represent 2D weights tensor with dimensions [input_size, num_units].
LayerType
When adding a new layer, adapt also the LastLayer enum value in the enum class LayerType below...
Definition: Types.hpp:443
const ConstTensor * m_InputToInputWeights
Definition: LstmParams.hpp:40
const ConstTensorHandle * m_RecurrentToOutputWeights