aboutsummaryrefslogtreecommitdiff
path: root/21.02/_lstm_test_impl_8cpp_source.xhtml
blob: ec980277dcfb2cdcc5ad9de718e6e9b1814426bb (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
<!-- Copyright (c) 2020 ARM Limited. -->
<!--                                 -->
<!-- SPDX-License-Identifier: MIT    -->
<!--                                 -->
<!-- HTML header for doxygen 1.8.13-->
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=9"/>
<meta name="generator" content="Doxygen 1.8.13"/>
<meta name="robots" content="NOINDEX, NOFOLLOW" />
<meta name="viewport" content="width=device-width, initial-scale=1"/>
<title>ArmNN: src/backends/backendsCommon/test/layerTests/LstmTestImpl.cpp Source File</title>
<link href="tabs.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="jquery.js"></script>
<script type="text/javascript" src="dynsections.js"></script>
<link href="navtree.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="resize.js"></script>
<script type="text/javascript" src="navtreedata.js"></script>
<script type="text/javascript" src="navtree.js"></script>
<script type="text/javascript">
  $(document).ready(initResizable);
</script>
<link href="search/search.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="search/searchdata.js"></script>
<script type="text/javascript" src="search/search.js"></script>
<script type="text/x-mathjax-config">
  MathJax.Hub.Config({
    extensions: ["tex2jax.js"],
    jax: ["input/TeX","output/HTML-CSS"],
});
</script><script type="text/javascript" src="http://cdn.mathjax.org/mathjax/latest/MathJax.js"></script>
<link href="doxygen.css" rel="stylesheet" type="text/css" />
<link href="stylesheet.css" rel="stylesheet" type="text/css"/>
</head>
<body>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<div id="titlearea">
<table cellspacing="0" cellpadding="0">
 <tbody>
 <tr style="height: 56px;">
  <img alt="ArmNN" src="Arm_NN_horizontal_blue.png" style="max-width: 10rem; margin-top: .5rem; margin-left 10px"/>
  <td style="padding-left: 0.5em;">
   <div id="projectname">
   &#160;<span id="projectnumber">21.02</span>
   </div>
  </td>
 </tr>
 </tbody>
</table>
</div>
<!-- end header part -->
<!-- Generated by Doxygen 1.8.13 -->
<script type="text/javascript">
var searchBox = new SearchBox("searchBox", "search",false,'Search');
</script>
<script type="text/javascript" src="menudata.js"></script>
<script type="text/javascript" src="menu.js"></script>
<script type="text/javascript">
$(function() {
  initMenu('',true,false,'search.php','Search');
  $(document).ready(function() { init_search(); });
});
</script>
<div id="main-nav"></div>
</div><!-- top -->
<div id="side-nav" class="ui-resizable side-nav-resizable">
  <div id="nav-tree">
    <div id="nav-tree-contents">
      <div id="nav-sync" class="sync"></div>
    </div>
  </div>
  <div id="splitbar" style="-moz-user-select:none;" 
       class="ui-resizable-handle">
  </div>
</div>
<script type="text/javascript">
$(document).ready(function(){initNavTree('_lstm_test_impl_8cpp_source.xhtml','');});
</script>
<div id="doc-content">
<!-- window showing the filter options -->
<div id="MSearchSelectWindow"
     onmouseover="return searchBox.OnSearchSelectShow()"
     onmouseout="return searchBox.OnSearchSelectHide()"
     onkeydown="return searchBox.OnSearchSelectKey(event)">
</div>

<!-- iframe showing the search results (closed by default) -->
<div id="MSearchResultsWindow">
<iframe src="javascript:void(0)" frameborder="0" 
        name="MSearchResults" id="MSearchResults">
</iframe>
</div>

<div class="header">
  <div class="headertitle">
<div class="title">LstmTestImpl.cpp</div>  </div>
</div><!--header-->
<div class="contents">
<a href="_lstm_test_impl_8cpp.xhtml">Go to the documentation of this file.</a><div class="fragment"><div class="line"><a name="l00001"></a><span class="lineno">    1</span>&#160;<span class="comment">//</span></div><div class="line"><a name="l00002"></a><span class="lineno">    2</span>&#160;<span class="comment">// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.</span></div><div class="line"><a name="l00003"></a><span class="lineno">    3</span>&#160;<span class="comment">// SPDX-License-Identifier: MIT</span></div><div class="line"><a name="l00004"></a><span class="lineno">    4</span>&#160;<span class="comment">//</span></div><div class="line"><a name="l00005"></a><span class="lineno">    5</span>&#160;</div><div class="line"><a name="l00006"></a><span class="lineno">    6</span>&#160;<span class="preprocessor">#include &quot;<a class="code" href="_lstm_test_impl_8hpp.xhtml">LstmTestImpl.hpp</a>&quot;</span></div><div class="line"><a name="l00007"></a><span class="lineno">    7</span>&#160;</div><div class="line"><a name="l00008"></a><span class="lineno">    8</span>&#160;<span class="preprocessor">#include &lt;<a class="code" href="_quantize_helper_8hpp.xhtml">QuantizeHelper.hpp</a>&gt;</span></div><div class="line"><a name="l00009"></a><span class="lineno">    9</span>&#160;</div><div class="line"><a name="l00010"></a><span class="lineno">   10</span>&#160;<span class="preprocessor">#include &lt;<a class="code" href="_numeric_cast_8hpp.xhtml">armnn/utility/NumericCast.hpp</a>&gt;</span></div><div class="line"><a name="l00011"></a><span class="lineno">   11</span>&#160;</div><div class="line"><a name="l00012"></a><span class="lineno">   12</span>&#160;<span class="preprocessor">#include &lt;<a class="code" href="_cpu_tensor_handle_8hpp.xhtml">backendsCommon/CpuTensorHandle.hpp</a>&gt;</span></div><div class="line"><a name="l00013"></a><span class="lineno">   13</span>&#160;</div><div class="line"><a name="l00014"></a><span class="lineno">   14</span>&#160;<span class="preprocessor">#include &lt;<a class="code" href="_tensor_copy_utils_8hpp.xhtml">backendsCommon/test/TensorCopyUtils.hpp</a>&gt;</span></div><div class="line"><a name="l00015"></a><span class="lineno">   15</span>&#160;<span class="preprocessor">#include &lt;<a class="code" href="_workload_test_utils_8hpp.xhtml">backendsCommon/test/WorkloadTestUtils.hpp</a>&gt;</span></div><div class="line"><a name="l00016"></a><span class="lineno">   16</span>&#160;</div><div class="line"><a name="l00017"></a><span class="lineno">   17</span>&#160;<span class="preprocessor">#include &lt;<a class="code" href="_decoders_8hpp.xhtml">reference/workloads/Decoders.hpp</a>&gt;</span></div><div class="line"><a name="l00018"></a><span class="lineno">   18</span>&#160;<span class="preprocessor">#include &lt;<a class="code" href="_encoders_8hpp.xhtml">reference/workloads/Encoders.hpp</a>&gt;</span></div><div class="line"><a name="l00019"></a><span class="lineno">   19</span>&#160;<span class="preprocessor">#include &lt;<a class="code" href="_lstm_utils_8hpp.xhtml">reference/workloads/LstmUtils.hpp</a>&gt;</span></div><div class="line"><a name="l00020"></a><span class="lineno">   20</span>&#160;</div><div class="line"><a name="l00021"></a><span class="lineno">   21</span>&#160;<span class="preprocessor">#include &lt;<a class="code" href="_tensor_helpers_8hpp.xhtml">test/TensorHelpers.hpp</a>&gt;</span></div><div class="line"><a name="l00022"></a><span class="lineno">   22</span>&#160;</div><div class="line"><a name="l00023"></a><span class="lineno">   23</span>&#160;<span class="preprocessor">#include &lt;boost/multi_array.hpp&gt;</span></div><div class="line"><a name="l00024"></a><span class="lineno">   24</span>&#160;</div><div class="line"><a name="l00025"></a><span class="lineno">   25</span>&#160;<span class="keyword">namespace</span></div><div class="line"><a name="l00026"></a><span class="lineno">   26</span>&#160;{</div><div class="line"><a name="l00027"></a><span class="lineno">   27</span>&#160;</div><div class="line"><a name="l00028"></a><span class="lineno">   28</span>&#160;<span class="keyword">template</span>&lt;armnn::DataType ArmnnType, <span class="keyword">typename</span> T = armnn::ResolveType&lt;ArmnnType&gt;&gt;</div><div class="line"><a name="l00029"></a><span class="lineno">   29</span>&#160;<span class="keywordtype">void</span> LstmUtilsVectorBatchVectorAddTestImpl(</div><div class="line"><a name="l00030"></a><span class="lineno">   30</span>&#160;        boost::multi_array&lt;float, 1&gt;&amp; vec,</div><div class="line"><a name="l00031"></a><span class="lineno">   31</span>&#160;        boost::multi_array&lt;float, 2&gt;&amp; batchVec,</div><div class="line"><a name="l00032"></a><span class="lineno">   32</span>&#160;        uint32_t vSize,</div><div class="line"><a name="l00033"></a><span class="lineno">   33</span>&#160;        uint32_t nBatch,</div><div class="line"><a name="l00034"></a><span class="lineno">   34</span>&#160;        boost::multi_array&lt;float, 2&gt;&amp; expectedOutput )</div><div class="line"><a name="l00035"></a><span class="lineno">   35</span>&#160;{</div><div class="line"><a name="l00036"></a><span class="lineno">   36</span>&#160;    <span class="keywordtype">float</span> qScale = 0.0f;</div><div class="line"><a name="l00037"></a><span class="lineno">   37</span>&#160;    int32_t qOffset = 0;</div><div class="line"><a name="l00038"></a><span class="lineno">   38</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> tensorInfo({nBatch, vSize}, ArmnnType,  qScale, qOffset );</div><div class="line"><a name="l00039"></a><span class="lineno">   39</span>&#160;</div><div class="line"><a name="l00040"></a><span class="lineno">   40</span>&#160;    <span class="comment">// Make encoder and decoder</span></div><div class="line"><a name="l00041"></a><span class="lineno">   41</span>&#160;    std::unique_ptr&lt;armnn::Decoder&lt;float&gt;&gt; vecDecoder = armnn::MakeDecoder&lt;float&gt;(tensorInfo, vec.data());</div><div class="line"><a name="l00042"></a><span class="lineno">   42</span>&#160;    std::unique_ptr&lt;armnn::Decoder&lt;float&gt;&gt; batchVecDecoder = armnn::MakeDecoder&lt;float&gt;(tensorInfo, batchVec.data());</div><div class="line"><a name="l00043"></a><span class="lineno">   43</span>&#160;    std::unique_ptr&lt;armnn::Encoder&lt;float&gt;&gt; batchVecEncoder = armnn::MakeEncoder&lt;float&gt;(tensorInfo, batchVec.data());</div><div class="line"><a name="l00044"></a><span class="lineno">   44</span>&#160;</div><div class="line"><a name="l00045"></a><span class="lineno">   45</span>&#160;    <a class="code" href="_lstm_utils_8cpp.xhtml#a389c4bbafd0fff7060cbb183f20a2134">VectorBatchVectorAdd</a>(*vecDecoder, vSize, *batchVecDecoder, nBatch, *batchVecEncoder);</div><div class="line"><a name="l00046"></a><span class="lineno">   46</span>&#160;</div><div class="line"><a name="l00047"></a><span class="lineno">   47</span>&#160;    <span class="comment">// check shape and compare values</span></div><div class="line"><a name="l00048"></a><span class="lineno">   48</span>&#160;    BOOST_TEST(<a class="code" href="_tensor_helpers_8hpp.xhtml#aa5a4b75c5fa1d312b4f3615b2315ff58">CompareTensors</a>(batchVec, expectedOutput));</div><div class="line"><a name="l00049"></a><span class="lineno">   49</span>&#160;</div><div class="line"><a name="l00050"></a><span class="lineno">   50</span>&#160;    <span class="comment">// check if iterator is back at start position</span></div><div class="line"><a name="l00051"></a><span class="lineno">   51</span>&#160;    batchVecEncoder-&gt;Set(1.0f);</div><div class="line"><a name="l00052"></a><span class="lineno">   52</span>&#160;    BOOST_TEST(batchVec[0][0] == 1.0f);</div><div class="line"><a name="l00053"></a><span class="lineno">   53</span>&#160;}</div><div class="line"><a name="l00054"></a><span class="lineno">   54</span>&#160;</div><div class="line"><a name="l00055"></a><span class="lineno">   55</span>&#160;<span class="keyword">template</span>&lt;armnn::DataType ArmnnType, <span class="keyword">typename</span> T = armnn::ResolveType&lt;ArmnnType&gt;&gt;</div><div class="line"><a name="l00056"></a><span class="lineno">   56</span>&#160;<span class="keywordtype">void</span> LstmUtilsZeroVectorTestImpl(</div><div class="line"><a name="l00057"></a><span class="lineno">   57</span>&#160;        boost::multi_array&lt;float, 1&gt;&amp; input,</div><div class="line"><a name="l00058"></a><span class="lineno">   58</span>&#160;        uint32_t vSize,</div><div class="line"><a name="l00059"></a><span class="lineno">   59</span>&#160;        boost::multi_array&lt;float, 1&gt;&amp; expectedOutput)</div><div class="line"><a name="l00060"></a><span class="lineno">   60</span>&#160;{</div><div class="line"><a name="l00061"></a><span class="lineno">   61</span>&#160;    <span class="keywordtype">float</span> qScale = 0.0f;</div><div class="line"><a name="l00062"></a><span class="lineno">   62</span>&#160;    int32_t qOffset = 0;</div><div class="line"><a name="l00063"></a><span class="lineno">   63</span>&#160;</div><div class="line"><a name="l00064"></a><span class="lineno">   64</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> tensorInfo({vSize}, ArmnnType,  qScale, qOffset );</div><div class="line"><a name="l00065"></a><span class="lineno">   65</span>&#160;</div><div class="line"><a name="l00066"></a><span class="lineno">   66</span>&#160;    <span class="comment">// Make encoder for input</span></div><div class="line"><a name="l00067"></a><span class="lineno">   67</span>&#160;    std::unique_ptr&lt;armnn::Encoder&lt;float&gt;&gt; outputEncoder = armnn::MakeEncoder&lt;float&gt;(tensorInfo, input.data());</div><div class="line"><a name="l00068"></a><span class="lineno">   68</span>&#160;</div><div class="line"><a name="l00069"></a><span class="lineno">   69</span>&#160;    <span class="comment">// call ZeroVector</span></div><div class="line"><a name="l00070"></a><span class="lineno">   70</span>&#160;    <a class="code" href="_lstm_utils_8cpp.xhtml#a4c20bc573b70e89327b334f924da97b5">ZeroVector</a>(*outputEncoder, vSize);</div><div class="line"><a name="l00071"></a><span class="lineno">   71</span>&#160;</div><div class="line"><a name="l00072"></a><span class="lineno">   72</span>&#160;    <span class="comment">// check shape and compare values</span></div><div class="line"><a name="l00073"></a><span class="lineno">   73</span>&#160;    BOOST_TEST(<a class="code" href="_tensor_helpers_8hpp.xhtml#aa5a4b75c5fa1d312b4f3615b2315ff58">CompareTensors</a>(input, expectedOutput));</div><div class="line"><a name="l00074"></a><span class="lineno">   74</span>&#160;</div><div class="line"><a name="l00075"></a><span class="lineno">   75</span>&#160;    <span class="comment">// check if iterator is back at start position</span></div><div class="line"><a name="l00076"></a><span class="lineno">   76</span>&#160;    outputEncoder-&gt;Set(1.0f);</div><div class="line"><a name="l00077"></a><span class="lineno">   77</span>&#160;    BOOST_TEST(input[0] == 1.0f);</div><div class="line"><a name="l00078"></a><span class="lineno">   78</span>&#160;</div><div class="line"><a name="l00079"></a><span class="lineno">   79</span>&#160;}</div><div class="line"><a name="l00080"></a><span class="lineno">   80</span>&#160;</div><div class="line"><a name="l00081"></a><span class="lineno">   81</span>&#160;<span class="keyword">template</span>&lt;armnn::DataType ArmnnType, <span class="keyword">typename</span> T = armnn::ResolveType&lt;ArmnnType&gt;&gt;</div><div class="line"><a name="l00082"></a><span class="lineno">   82</span>&#160;<span class="keywordtype">void</span> LstmUtilsMeanStddevNormalizationTestImpl(</div><div class="line"><a name="l00083"></a><span class="lineno">   83</span>&#160;        boost::multi_array&lt;float, 2&gt;&amp; input,</div><div class="line"><a name="l00084"></a><span class="lineno">   84</span>&#160;        uint32_t vSize,</div><div class="line"><a name="l00085"></a><span class="lineno">   85</span>&#160;        uint32_t nBatch,</div><div class="line"><a name="l00086"></a><span class="lineno">   86</span>&#160;        boost::multi_array&lt;float, 2&gt;&amp; expectedOutput)</div><div class="line"><a name="l00087"></a><span class="lineno">   87</span>&#160;{</div><div class="line"><a name="l00088"></a><span class="lineno">   88</span>&#160;    <span class="keywordtype">float</span> qScale = 0.0f;</div><div class="line"><a name="l00089"></a><span class="lineno">   89</span>&#160;    int32_t qOffset = 0;</div><div class="line"><a name="l00090"></a><span class="lineno">   90</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> tensorInfo({nBatch, vSize}, ArmnnType,  qScale, qOffset );</div><div class="line"><a name="l00091"></a><span class="lineno">   91</span>&#160;</div><div class="line"><a name="l00092"></a><span class="lineno">   92</span>&#160;    <span class="comment">// Make encoder and decoder for input</span></div><div class="line"><a name="l00093"></a><span class="lineno">   93</span>&#160;    std::unique_ptr&lt;armnn::Decoder&lt;float&gt;&gt; inputDecoder = armnn::MakeDecoder&lt;float&gt;(tensorInfo, input.data());</div><div class="line"><a name="l00094"></a><span class="lineno">   94</span>&#160;    std::unique_ptr&lt;armnn::Encoder&lt;float&gt;&gt; outputEncoder = armnn::MakeEncoder&lt;float&gt;(tensorInfo, input.data());</div><div class="line"><a name="l00095"></a><span class="lineno">   95</span>&#160;</div><div class="line"><a name="l00096"></a><span class="lineno">   96</span>&#160;    <a class="code" href="_lstm_utils_8cpp.xhtml#a0ed27dd6d6125a06bf654080f4184360">MeanStddevNormalization</a>(*inputDecoder, *outputEncoder, vSize, nBatch, 1e-8f);</div><div class="line"><a name="l00097"></a><span class="lineno">   97</span>&#160;</div><div class="line"><a name="l00098"></a><span class="lineno">   98</span>&#160;    <span class="comment">// check shape and compare values</span></div><div class="line"><a name="l00099"></a><span class="lineno">   99</span>&#160;    BOOST_TEST(<a class="code" href="_tensor_helpers_8hpp.xhtml#aa5a4b75c5fa1d312b4f3615b2315ff58">CompareTensors</a>(input, expectedOutput));</div><div class="line"><a name="l00100"></a><span class="lineno">  100</span>&#160;</div><div class="line"><a name="l00101"></a><span class="lineno">  101</span>&#160;    <span class="comment">// check if iterator is back at start position</span></div><div class="line"><a name="l00102"></a><span class="lineno">  102</span>&#160;    outputEncoder-&gt;Set(1.0f);</div><div class="line"><a name="l00103"></a><span class="lineno">  103</span>&#160;    BOOST_TEST(input[0][0] == 1.0f);</div><div class="line"><a name="l00104"></a><span class="lineno">  104</span>&#160;}</div><div class="line"><a name="l00105"></a><span class="lineno">  105</span>&#160;</div><div class="line"><a name="l00106"></a><span class="lineno">  106</span>&#160;<span class="keyword">template</span>&lt;armnn::DataType ArmnnType, <span class="keyword">typename</span> T = armnn::ResolveType&lt;ArmnnType&gt;&gt;</div><div class="line"><a name="l00107"></a><span class="lineno">  107</span>&#160;<span class="keywordtype">void</span> LstmUtilsVectorBatchVectorCwiseProductTestImpl(</div><div class="line"><a name="l00108"></a><span class="lineno">  108</span>&#160;        boost::multi_array&lt;float, 1&gt;&amp; vec,</div><div class="line"><a name="l00109"></a><span class="lineno">  109</span>&#160;        boost::multi_array&lt;float, 2&gt;&amp; batchVec,</div><div class="line"><a name="l00110"></a><span class="lineno">  110</span>&#160;        uint32_t vSize,</div><div class="line"><a name="l00111"></a><span class="lineno">  111</span>&#160;        uint32_t nBatch,</div><div class="line"><a name="l00112"></a><span class="lineno">  112</span>&#160;        boost::multi_array&lt;float, 2&gt;&amp; expectedOutput)</div><div class="line"><a name="l00113"></a><span class="lineno">  113</span>&#160;{</div><div class="line"><a name="l00114"></a><span class="lineno">  114</span>&#160;    <span class="keywordtype">float</span> qScale = 0.0f;</div><div class="line"><a name="l00115"></a><span class="lineno">  115</span>&#160;    int32_t qOffset = 0;</div><div class="line"><a name="l00116"></a><span class="lineno">  116</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> tensorInfo({nBatch, vSize}, ArmnnType,  qScale, qOffset );</div><div class="line"><a name="l00117"></a><span class="lineno">  117</span>&#160;</div><div class="line"><a name="l00118"></a><span class="lineno">  118</span>&#160;    <span class="comment">// Make encoder and decoder</span></div><div class="line"><a name="l00119"></a><span class="lineno">  119</span>&#160;    std::unique_ptr&lt;armnn::Decoder&lt;float&gt;&gt; vecDecoder = armnn::MakeDecoder&lt;float&gt;(tensorInfo, vec.data());</div><div class="line"><a name="l00120"></a><span class="lineno">  120</span>&#160;    std::unique_ptr&lt;armnn::Decoder&lt;float&gt;&gt; batchVecDecoder = armnn::MakeDecoder&lt;float&gt;(tensorInfo, batchVec.data());</div><div class="line"><a name="l00121"></a><span class="lineno">  121</span>&#160;    std::unique_ptr&lt;armnn::Encoder&lt;float&gt;&gt; batchVecEncoder = armnn::MakeEncoder&lt;float&gt;(tensorInfo, batchVec.data());</div><div class="line"><a name="l00122"></a><span class="lineno">  122</span>&#160;</div><div class="line"><a name="l00123"></a><span class="lineno">  123</span>&#160;    <a class="code" href="_lstm_utils_8cpp.xhtml#a1d7ad9698b02282a57fdb17b3af745f9">VectorBatchVectorCwiseProduct</a>(*vecDecoder, vSize, *batchVecDecoder, nBatch, *batchVecEncoder);</div><div class="line"><a name="l00124"></a><span class="lineno">  124</span>&#160;</div><div class="line"><a name="l00125"></a><span class="lineno">  125</span>&#160;    <span class="comment">// check shape and compare values</span></div><div class="line"><a name="l00126"></a><span class="lineno">  126</span>&#160;    BOOST_TEST(<a class="code" href="_tensor_helpers_8hpp.xhtml#aa5a4b75c5fa1d312b4f3615b2315ff58">CompareTensors</a>(batchVec, expectedOutput));</div><div class="line"><a name="l00127"></a><span class="lineno">  127</span>&#160;</div><div class="line"><a name="l00128"></a><span class="lineno">  128</span>&#160;    <span class="comment">// check if iterator is back at start position</span></div><div class="line"><a name="l00129"></a><span class="lineno">  129</span>&#160;    batchVecEncoder-&gt;Set(1.0f);</div><div class="line"><a name="l00130"></a><span class="lineno">  130</span>&#160;    BOOST_TEST(batchVec[0][0] == 1.0f);</div><div class="line"><a name="l00131"></a><span class="lineno">  131</span>&#160;}</div><div class="line"><a name="l00132"></a><span class="lineno">  132</span>&#160;</div><div class="line"><a name="l00133"></a><span class="lineno">  133</span>&#160;<span class="comment">// Lstm Layer tests:</span></div><div class="line"><a name="l00134"></a><span class="lineno">  134</span>&#160;<span class="comment">// *********************************** //</span></div><div class="line"><a name="l00135"></a><span class="lineno">  135</span>&#160;<span class="keyword">template</span>&lt;armnn::DataType ArmnnType, <span class="keyword">typename</span> T = armnn::ResolveType&lt;ArmnnType&gt;&gt;</div><div class="line"><a name="l00136"></a><span class="lineno">  136</span>&#160;<a class="code" href="struct_layer_test_result.xhtml">LayerTestResult&lt;T, 2&gt;</a></div><div class="line"><a name="l00137"></a><span class="lineno">  137</span>&#160;LstmNoCifgNoPeepholeNoProjectionTestImpl(</div><div class="line"><a name="l00138"></a><span class="lineno">  138</span>&#160;        <a class="code" href="classarmnn_1_1_i_workload_factory.xhtml">armnn::IWorkloadFactory</a>&amp; workloadFactory,</div><div class="line"><a name="l00139"></a><span class="lineno">  139</span>&#160;        <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_backend_internal.xhtml#a693b40e6b94e958836aeb0410ca186bd">armnn::IBackendInternal::IMemoryManagerSharedPtr</a>&amp; memoryManager,</div><div class="line"><a name="l00140"></a><span class="lineno">  140</span>&#160;        <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml">armnn::ITensorHandleFactory</a>&amp; tensorHandleFactory,</div><div class="line"><a name="l00141"></a><span class="lineno">  141</span>&#160;        <span class="keyword">const</span> boost::multi_array&lt;T, 2&gt;&amp; input,</div><div class="line"><a name="l00142"></a><span class="lineno">  142</span>&#160;        <span class="keyword">const</span> boost::multi_array&lt;T, 2&gt;&amp; outputExpected,</div><div class="line"><a name="l00143"></a><span class="lineno">  143</span>&#160;        <span class="keywordtype">float</span> qScale = 0.0f,</div><div class="line"><a name="l00144"></a><span class="lineno">  144</span>&#160;        int32_t qOffset = 0,</div><div class="line"><a name="l00145"></a><span class="lineno">  145</span>&#160;        <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6">armnn::DataType</a> constantDataType = <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a166495adc0d0f53bee6baecc577f5204">armnn::DataType::Float32</a>)</div><div class="line"><a name="l00146"></a><span class="lineno">  146</span>&#160;{</div><div class="line"><a name="l00147"></a><span class="lineno">  147</span>&#160;    <a class="code" href="namespacearmnn.xhtml#a44affeeb090c3c6a3062830562672e84">IgnoreUnused</a>(memoryManager);</div><div class="line"><a name="l00148"></a><span class="lineno">  148</span>&#160;    <span class="keywordtype">unsigned</span> <span class="keywordtype">int</span> batchSize = <a class="code" href="namespacearmnn.xhtml#a375ca3cff9f1b005d1412dc5f3cf5b6e">armnn::numeric_cast</a>&lt;<span class="keywordtype">unsigned</span> <span class="keywordtype">int</span>&gt;(input.shape()[0]);</div><div class="line"><a name="l00149"></a><span class="lineno">  149</span>&#160;    <span class="keywordtype">unsigned</span> <span class="keywordtype">int</span> inputSize = <a class="code" href="namespacearmnn.xhtml#a375ca3cff9f1b005d1412dc5f3cf5b6e">armnn::numeric_cast</a>&lt;<span class="keywordtype">unsigned</span> <span class="keywordtype">int</span>&gt;(input.shape()[1]);</div><div class="line"><a name="l00150"></a><span class="lineno">  150</span>&#160;    <span class="keywordtype">unsigned</span> <span class="keywordtype">int</span> outputSize = <a class="code" href="namespacearmnn.xhtml#a375ca3cff9f1b005d1412dc5f3cf5b6e">armnn::numeric_cast</a>&lt;<span class="keywordtype">unsigned</span> <span class="keywordtype">int</span>&gt;(outputExpected.shape()[1]);</div><div class="line"><a name="l00151"></a><span class="lineno">  151</span>&#160;    <span class="comment">// cellSize and outputSize have the same size when there is no projection.</span></div><div class="line"><a name="l00152"></a><span class="lineno">  152</span>&#160;    <span class="keywordtype">unsigned</span> numUnits = outputSize;</div><div class="line"><a name="l00153"></a><span class="lineno">  153</span>&#160;</div><div class="line"><a name="l00154"></a><span class="lineno">  154</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> inputTensorInfo({batchSize , inputSize}, ArmnnType,  qScale, qOffset );</div><div class="line"><a name="l00155"></a><span class="lineno">  155</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> cellStateInTensorInfo({batchSize , numUnits}, ArmnnType, qScale, qOffset);</div><div class="line"><a name="l00156"></a><span class="lineno">  156</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> outputStateInTensorInfo({batchSize , outputSize}, ArmnnType, qScale, qOffset);</div><div class="line"><a name="l00157"></a><span class="lineno">  157</span>&#160;</div><div class="line"><a name="l00158"></a><span class="lineno">  158</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> scratchBufferTensorInfo({batchSize, numUnits * 4}, ArmnnType, qScale, qOffset);</div><div class="line"><a name="l00159"></a><span class="lineno">  159</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> cellStateOutTensorInfo({batchSize, numUnits}, ArmnnType, qScale, qOffset);</div><div class="line"><a name="l00160"></a><span class="lineno">  160</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> outputStateOutTensorInfo({batchSize, outputSize}, ArmnnType, qScale, qOffset);</div><div class="line"><a name="l00161"></a><span class="lineno">  161</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> outputTensorInfo({batchSize, outputSize}, ArmnnType, qScale, qOffset);</div><div class="line"><a name="l00162"></a><span class="lineno">  162</span>&#160;</div><div class="line"><a name="l00163"></a><span class="lineno">  163</span>&#160;    <a class="code" href="struct_layer_test_result.xhtml">LayerTestResult&lt;T, 2&gt;</a> ret(outputTensorInfo);</div><div class="line"><a name="l00164"></a><span class="lineno">  164</span>&#160;</div><div class="line"><a name="l00165"></a><span class="lineno">  165</span>&#160;    std::vector&lt;T&gt; inputVector;</div><div class="line"><a name="l00166"></a><span class="lineno">  166</span>&#160;    inputVector.assign(input.data(), input.data() + (batchSize * inputSize));</div><div class="line"><a name="l00167"></a><span class="lineno">  167</span>&#160;    <span class="keyword">auto</span> inputTensor = MakeTensor&lt;T,2&gt;(inputTensorInfo, inputVector);</div><div class="line"><a name="l00168"></a><span class="lineno">  168</span>&#160;</div><div class="line"><a name="l00169"></a><span class="lineno">  169</span>&#160;    std::vector&lt;T&gt; cellStateInVector(batchSize * numUnits, T());</div><div class="line"><a name="l00170"></a><span class="lineno">  170</span>&#160;    <span class="keyword">auto</span> cellStateInTensor = MakeTensor&lt;T,2&gt;(cellStateInTensorInfo, cellStateInVector);</div><div class="line"><a name="l00171"></a><span class="lineno">  171</span>&#160;</div><div class="line"><a name="l00172"></a><span class="lineno">  172</span>&#160;    std::vector&lt;T&gt; outputStateInVector(batchSize * outputSize, T());</div><div class="line"><a name="l00173"></a><span class="lineno">  173</span>&#160;    <span class="keyword">auto</span> outputStateInTensor = MakeTensor&lt;T,2&gt;(outputStateInTensorInfo, outputStateInVector);</div><div class="line"><a name="l00174"></a><span class="lineno">  174</span>&#160;</div><div class="line"><a name="l00175"></a><span class="lineno">  175</span>&#160;    std::vector&lt;T&gt; scratchBufferVector(batchSize * numUnits * 4, T());</div><div class="line"><a name="l00176"></a><span class="lineno">  176</span>&#160;    <span class="keyword">auto</span> scratchBufferTensor = MakeTensor&lt;T,2&gt;(scratchBufferTensorInfo, scratchBufferVector);</div><div class="line"><a name="l00177"></a><span class="lineno">  177</span>&#160;</div><div class="line"><a name="l00178"></a><span class="lineno">  178</span>&#160;    std::vector&lt;T&gt; outputStateOutVector(batchSize * outputSize, T());</div><div class="line"><a name="l00179"></a><span class="lineno">  179</span>&#160;    <span class="keyword">auto</span> outputStateOutTensor = MakeTensor&lt;T,2&gt;(outputStateOutTensorInfo, outputStateOutVector);</div><div class="line"><a name="l00180"></a><span class="lineno">  180</span>&#160;</div><div class="line"><a name="l00181"></a><span class="lineno">  181</span>&#160;    std::vector&lt;T&gt; cellStateOutVector(batchSize * numUnits, T());</div><div class="line"><a name="l00182"></a><span class="lineno">  182</span>&#160;    <span class="keyword">auto</span> cellStateOutTensor = MakeTensor&lt;T,2&gt;(cellStateOutTensorInfo, cellStateOutVector);</div><div class="line"><a name="l00183"></a><span class="lineno">  183</span>&#160;</div><div class="line"><a name="l00184"></a><span class="lineno">  184</span>&#160;    std::vector&lt;T&gt; outputVector;</div><div class="line"><a name="l00185"></a><span class="lineno">  185</span>&#160;    outputVector.assign(outputExpected.data(), outputExpected.data() + (batchSize * outputSize));</div><div class="line"><a name="l00186"></a><span class="lineno">  186</span>&#160;    ret.outputExpected = MakeTensor&lt;T, 2&gt;(outputTensorInfo, outputVector);</div><div class="line"><a name="l00187"></a><span class="lineno">  187</span>&#160;</div><div class="line"><a name="l00188"></a><span class="lineno">  188</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; inputHandle = tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(inputTensorInfo);</div><div class="line"><a name="l00189"></a><span class="lineno">  189</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; cellStateInHandle =</div><div class="line"><a name="l00190"></a><span class="lineno">  190</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(cellStateInTensorInfo);</div><div class="line"><a name="l00191"></a><span class="lineno">  191</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; outputStateInHandle =</div><div class="line"><a name="l00192"></a><span class="lineno">  192</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(outputStateInTensorInfo);</div><div class="line"><a name="l00193"></a><span class="lineno">  193</span>&#160;</div><div class="line"><a name="l00194"></a><span class="lineno">  194</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; scratchHandle =</div><div class="line"><a name="l00195"></a><span class="lineno">  195</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(scratchBufferTensorInfo);</div><div class="line"><a name="l00196"></a><span class="lineno">  196</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; outputStateOutHandle =</div><div class="line"><a name="l00197"></a><span class="lineno">  197</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(outputStateOutTensorInfo);</div><div class="line"><a name="l00198"></a><span class="lineno">  198</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; cellStateOutHandle =</div><div class="line"><a name="l00199"></a><span class="lineno">  199</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(cellStateOutTensorInfo);</div><div class="line"><a name="l00200"></a><span class="lineno">  200</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; outputHandle = tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(outputTensorInfo);</div><div class="line"><a name="l00201"></a><span class="lineno">  201</span>&#160;</div><div class="line"><a name="l00202"></a><span class="lineno">  202</span>&#160;    <a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml">armnn::LstmQueueDescriptor</a> data;</div><div class="line"><a name="l00203"></a><span class="lineno">  203</span>&#160;    <a class="code" href="structarmnn_1_1_workload_info.xhtml">armnn::WorkloadInfo</a> <a class="code" href="namespacearmnn.xhtml#a4dc0adc6737b5944e7671bee71788407acaf9b6b99962bf5c2264824231d7a40c">info</a>;</div><div class="line"><a name="l00204"></a><span class="lineno">  204</span>&#160;</div><div class="line"><a name="l00205"></a><span class="lineno">  205</span>&#160;    AddInputToWorkload(data, info, inputTensorInfo, inputHandle.get());</div><div class="line"><a name="l00206"></a><span class="lineno">  206</span>&#160;    AddInputToWorkload(data, info, outputStateInTensorInfo, outputStateInHandle.get());</div><div class="line"><a name="l00207"></a><span class="lineno">  207</span>&#160;    AddInputToWorkload(data, info, cellStateInTensorInfo, cellStateInHandle.get());</div><div class="line"><a name="l00208"></a><span class="lineno">  208</span>&#160;</div><div class="line"><a name="l00209"></a><span class="lineno">  209</span>&#160;    AddOutputToWorkload(data, info, scratchBufferTensorInfo, scratchHandle.get());</div><div class="line"><a name="l00210"></a><span class="lineno">  210</span>&#160;    AddOutputToWorkload(data, info, outputStateOutTensorInfo, outputStateOutHandle.get());</div><div class="line"><a name="l00211"></a><span class="lineno">  211</span>&#160;    AddOutputToWorkload(data, info, cellStateOutTensorInfo, cellStateOutHandle.get());</div><div class="line"><a name="l00212"></a><span class="lineno">  212</span>&#160;    AddOutputToWorkload(data, info, outputTensorInfo, outputHandle.get());</div><div class="line"><a name="l00213"></a><span class="lineno">  213</span>&#160;</div><div class="line"><a name="l00214"></a><span class="lineno">  214</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> tensorInfo4({numUnits}, constantDataType , qScale, qOffset);</div><div class="line"><a name="l00215"></a><span class="lineno">  215</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> tensorInfo8({numUnits, 2}, constantDataType, qScale, qOffset);</div><div class="line"><a name="l00216"></a><span class="lineno">  216</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> tensorInfo16({numUnits, 4}, constantDataType, qScale, qOffset);</div><div class="line"><a name="l00217"></a><span class="lineno">  217</span>&#160;</div><div class="line"><a name="l00218"></a><span class="lineno">  218</span>&#160;    <span class="keyword">auto</span> inputToInputWeights = MakeTensor&lt;float, 2&gt;(tensorInfo8, {-0.45018822f, -0.02338299f, -0.0870589f,</div><div class="line"><a name="l00219"></a><span class="lineno">  219</span>&#160;                                                                  -0.34550029f, 0.04266912f, -0.15680569f,</div><div class="line"><a name="l00220"></a><span class="lineno">  220</span>&#160;                                                                  -0.34856534f, 0.43890524f});</div><div class="line"><a name="l00221"></a><span class="lineno">  221</span>&#160;</div><div class="line"><a name="l00222"></a><span class="lineno">  222</span>&#160;    <span class="keyword">auto</span> inputToForgetWeights = MakeTensor&lt;float, 2&gt;(tensorInfo8, {0.09701663f, 0.20334584f, -0.50592935f,</div><div class="line"><a name="l00223"></a><span class="lineno">  223</span>&#160;                                                                   -0.31343272f, -0.40032279f, 0.44781327f,</div><div class="line"><a name="l00224"></a><span class="lineno">  224</span>&#160;                                                                   0.01387155f, -0.35593212f});</div><div class="line"><a name="l00225"></a><span class="lineno">  225</span>&#160;</div><div class="line"><a name="l00226"></a><span class="lineno">  226</span>&#160;    <span class="keyword">auto</span> inputToCellWeights = MakeTensor&lt;float, 2&gt;(tensorInfo8, {-0.50013041f, 0.1370284f, 0.11810488f, 0.2013163f,</div><div class="line"><a name="l00227"></a><span class="lineno">  227</span>&#160;                                                                 -0.20583314f, 0.44344562f, 0.22077113f,</div><div class="line"><a name="l00228"></a><span class="lineno">  228</span>&#160;                                                                 -0.29909778f});</div><div class="line"><a name="l00229"></a><span class="lineno">  229</span>&#160;</div><div class="line"><a name="l00230"></a><span class="lineno">  230</span>&#160;    <span class="keyword">auto</span> inputToOutputWeights = MakeTensor&lt;float, 2&gt;(tensorInfo8, {-0.25065863f, -0.28290087f, 0.04613829f,</div><div class="line"><a name="l00231"></a><span class="lineno">  231</span>&#160;                                                                   0.40525138f, 0.44272184f, 0.03897077f,</div><div class="line"><a name="l00232"></a><span class="lineno">  232</span>&#160;                                                                   -0.1556896f, 0.19487578f});</div><div class="line"><a name="l00233"></a><span class="lineno">  233</span>&#160;</div><div class="line"><a name="l00234"></a><span class="lineno">  234</span>&#160;    <span class="keyword">auto</span> recurrentToInputWeights = MakeTensor&lt;float, 2&gt;(tensorInfo16, {-0.0063535f, -0.2042388f, 0.31454784f,</div><div class="line"><a name="l00235"></a><span class="lineno">  235</span>&#160;                                                                       -0.35746509f, 0.28902304f, 0.08183324f,</div><div class="line"><a name="l00236"></a><span class="lineno">  236</span>&#160;                                                                       -0.16555229f, 0.02286911f, -0.13566875f,</div><div class="line"><a name="l00237"></a><span class="lineno">  237</span>&#160;                                                                       0.03034258f, 0.48091322f, -0.12528998f,</div><div class="line"><a name="l00238"></a><span class="lineno">  238</span>&#160;                                                                       0.24077177f, -0.51332325f, -0.33502164f,</div><div class="line"><a name="l00239"></a><span class="lineno">  239</span>&#160;                                                                       0.10629296f});</div><div class="line"><a name="l00240"></a><span class="lineno">  240</span>&#160;</div><div class="line"><a name="l00241"></a><span class="lineno">  241</span>&#160;    <span class="keyword">auto</span> recurrentToForgetWeights = MakeTensor&lt;float, 2&gt;(tensorInfo16, {-0.48684245f, -0.06655136f, 0.42224967f,</div><div class="line"><a name="l00242"></a><span class="lineno">  242</span>&#160;                                                                        0.2112639f, 0.27654213f, 0.20864892f,</div><div class="line"><a name="l00243"></a><span class="lineno">  243</span>&#160;                                                                        -0.07646349f, 0.45877004f, 0.00141793f,</div><div class="line"><a name="l00244"></a><span class="lineno">  244</span>&#160;                                                                        -0.14609534f, 0.36447752f, 0.09196436f,</div><div class="line"><a name="l00245"></a><span class="lineno">  245</span>&#160;                                                                        0.28053468f, 0.01560611f, -0.20127171f,</div><div class="line"><a name="l00246"></a><span class="lineno">  246</span>&#160;                                                                        -0.01140004f});</div><div class="line"><a name="l00247"></a><span class="lineno">  247</span>&#160;</div><div class="line"><a name="l00248"></a><span class="lineno">  248</span>&#160;    <span class="keyword">auto</span> recurrentToCellWeights = MakeTensor&lt;float, 2&gt;(tensorInfo16, {-0.3407414f, 0.24443203f, -0.2078532f,</div><div class="line"><a name="l00249"></a><span class="lineno">  249</span>&#160;                                                                      0.26320225f, 0.05695659f, -0.00123841f,</div><div class="line"><a name="l00250"></a><span class="lineno">  250</span>&#160;                                                                      -0.4744786f, -0.35869038f, -0.06418842f,</div><div class="line"><a name="l00251"></a><span class="lineno">  251</span>&#160;                                                                      -0.13502428f, -0.501764f, 0.22830659f,</div><div class="line"><a name="l00252"></a><span class="lineno">  252</span>&#160;                                                                      -0.46367589f, 0.26016325f, -0.03894562f,</div><div class="line"><a name="l00253"></a><span class="lineno">  253</span>&#160;                                                                      -0.16368064f});</div><div class="line"><a name="l00254"></a><span class="lineno">  254</span>&#160;</div><div class="line"><a name="l00255"></a><span class="lineno">  255</span>&#160;    <span class="keyword">auto</span> recurrentToOutputWeights = MakeTensor&lt;float, 2&gt;(tensorInfo16, {0.43385774f, -0.17194885f, 0.2718237f,</div><div class="line"><a name="l00256"></a><span class="lineno">  256</span>&#160;                                                                        0.09215671f, 0.24107647f, -0.39835793f,</div><div class="line"><a name="l00257"></a><span class="lineno">  257</span>&#160;                                                                        0.18212086f, 0.01301402f, 0.48572797f,</div><div class="line"><a name="l00258"></a><span class="lineno">  258</span>&#160;                                                                        -0.50656658f, 0.20047462f, -0.20607421f,</div><div class="line"><a name="l00259"></a><span class="lineno">  259</span>&#160;                                                                        -0.51818722f, -0.15390486f, 0.0468148f,</div><div class="line"><a name="l00260"></a><span class="lineno">  260</span>&#160;                                                                        0.39922136f});</div><div class="line"><a name="l00261"></a><span class="lineno">  261</span>&#160;</div><div class="line"><a name="l00262"></a><span class="lineno">  262</span>&#160;    <span class="keyword">auto</span> cellToInputWeights = MakeTensor&lt;float, 1&gt;(tensorInfo4, {0., 0., 0., 0.});</div><div class="line"><a name="l00263"></a><span class="lineno">  263</span>&#160;</div><div class="line"><a name="l00264"></a><span class="lineno">  264</span>&#160;    <span class="keyword">auto</span> inputGateBias = MakeTensor&lt;float, 1&gt;(tensorInfo4, {0., 0., 0., 0.});</div><div class="line"><a name="l00265"></a><span class="lineno">  265</span>&#160;</div><div class="line"><a name="l00266"></a><span class="lineno">  266</span>&#160;    <span class="keyword">auto</span> forgetGateBias = MakeTensor&lt;float, 1&gt;(tensorInfo4, {1., 1., 1., 1.});</div><div class="line"><a name="l00267"></a><span class="lineno">  267</span>&#160;</div><div class="line"><a name="l00268"></a><span class="lineno">  268</span>&#160;    <span class="keyword">auto</span> cellBias = MakeTensor&lt;float, 1&gt;(tensorInfo4, {0., 0., 0., 0.});</div><div class="line"><a name="l00269"></a><span class="lineno">  269</span>&#160;</div><div class="line"><a name="l00270"></a><span class="lineno">  270</span>&#160;    <span class="keyword">auto</span> outputGateBias = MakeTensor&lt;float, 1&gt;(tensorInfo4, {0., 0., 0., 0.});</div><div class="line"><a name="l00271"></a><span class="lineno">  271</span>&#160;</div><div class="line"><a name="l00272"></a><span class="lineno">  272</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputToInputWeightsTensor(tensorInfo8);</div><div class="line"><a name="l00273"></a><span class="lineno">  273</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputToForgetWeightsTensor(tensorInfo8);</div><div class="line"><a name="l00274"></a><span class="lineno">  274</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputToCellWeightsTensor(tensorInfo8);</div><div class="line"><a name="l00275"></a><span class="lineno">  275</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputToOutputWeightsTensor(tensorInfo8);</div><div class="line"><a name="l00276"></a><span class="lineno">  276</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> recurrentToInputWeightsTensor(tensorInfo16);</div><div class="line"><a name="l00277"></a><span class="lineno">  277</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> recurrentToForgetWeightsTensor(tensorInfo16);</div><div class="line"><a name="l00278"></a><span class="lineno">  278</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> recurrentToCellWeightsTensor(tensorInfo16);</div><div class="line"><a name="l00279"></a><span class="lineno">  279</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> recurrentToOutputWeightsTensor(tensorInfo16);</div><div class="line"><a name="l00280"></a><span class="lineno">  280</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> cellToInputWeightsTensor(tensorInfo4);</div><div class="line"><a name="l00281"></a><span class="lineno">  281</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputGateBiasTensor(tensorInfo4);</div><div class="line"><a name="l00282"></a><span class="lineno">  282</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> forgetGateBiasTensor(tensorInfo4);</div><div class="line"><a name="l00283"></a><span class="lineno">  283</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> cellBiasTensor(tensorInfo4);</div><div class="line"><a name="l00284"></a><span class="lineno">  284</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> outputGateBiasTensor(tensorInfo4);</div><div class="line"><a name="l00285"></a><span class="lineno">  285</span>&#160;</div><div class="line"><a name="l00286"></a><span class="lineno">  286</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputToInputWeightsTensor, &amp;inputToInputWeights[0][0]);</div><div class="line"><a name="l00287"></a><span class="lineno">  287</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputToForgetWeightsTensor, &amp;inputToForgetWeights[0][0]);</div><div class="line"><a name="l00288"></a><span class="lineno">  288</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputToCellWeightsTensor, &amp;inputToCellWeights[0][0]);</div><div class="line"><a name="l00289"></a><span class="lineno">  289</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputToOutputWeightsTensor, &amp;inputToOutputWeights[0][0]);</div><div class="line"><a name="l00290"></a><span class="lineno">  290</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;recurrentToInputWeightsTensor, &amp;recurrentToInputWeights[0][0]);</div><div class="line"><a name="l00291"></a><span class="lineno">  291</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;recurrentToForgetWeightsTensor, &amp;recurrentToForgetWeights[0][0]);</div><div class="line"><a name="l00292"></a><span class="lineno">  292</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;recurrentToCellWeightsTensor, &amp;recurrentToCellWeights[0][0]);</div><div class="line"><a name="l00293"></a><span class="lineno">  293</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;recurrentToOutputWeightsTensor, &amp;recurrentToOutputWeights[0][0]);</div><div class="line"><a name="l00294"></a><span class="lineno">  294</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;cellToInputWeightsTensor, &amp;cellToInputWeights[0]);</div><div class="line"><a name="l00295"></a><span class="lineno">  295</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputGateBiasTensor, &amp;inputGateBias[0]);</div><div class="line"><a name="l00296"></a><span class="lineno">  296</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;forgetGateBiasTensor, &amp;forgetGateBias[0]);</div><div class="line"><a name="l00297"></a><span class="lineno">  297</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;cellBiasTensor, &amp;cellBias[0]);</div><div class="line"><a name="l00298"></a><span class="lineno">  298</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;outputGateBiasTensor, &amp;outputGateBias[0]);</div><div class="line"><a name="l00299"></a><span class="lineno">  299</span>&#160;</div><div class="line"><a name="l00300"></a><span class="lineno">  300</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a08a1932be591c315a512a877d38b22df">m_InputToInputWeights</a> = &amp;inputToInputWeightsTensor;</div><div class="line"><a name="l00301"></a><span class="lineno">  301</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a3ea82566d98c5a657c76c3d851c47848">m_InputToForgetWeights</a> = &amp;inputToForgetWeightsTensor;</div><div class="line"><a name="l00302"></a><span class="lineno">  302</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a28ad98d17603fd8b12e046f8ece58970">m_InputToCellWeights</a> = &amp;inputToCellWeightsTensor;</div><div class="line"><a name="l00303"></a><span class="lineno">  303</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a83dc9086b2e4a4e4cadb66bd874df798">m_InputToOutputWeights</a> = &amp;inputToOutputWeightsTensor;</div><div class="line"><a name="l00304"></a><span class="lineno">  304</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a98d377149071d8842d610cc0734d1cfe">m_RecurrentToInputWeights</a> = &amp;recurrentToInputWeightsTensor;</div><div class="line"><a name="l00305"></a><span class="lineno">  305</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a45d73e66cbb2b65049e4016c20657ccf">m_RecurrentToForgetWeights</a> = &amp;recurrentToForgetWeightsTensor;</div><div class="line"><a name="l00306"></a><span class="lineno">  306</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#aea142bd50ffb93631c2e08324ec92a1e">m_RecurrentToCellWeights</a> = &amp;recurrentToCellWeightsTensor;</div><div class="line"><a name="l00307"></a><span class="lineno">  307</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#adebc1771e5a1f4b113a7aa594ea74d2c">m_RecurrentToOutputWeights</a> = &amp;recurrentToOutputWeightsTensor;</div><div class="line"><a name="l00308"></a><span class="lineno">  308</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#acb3aade8fae984f7293e222dcbe66030">m_InputGateBias</a> = &amp;inputGateBiasTensor;</div><div class="line"><a name="l00309"></a><span class="lineno">  309</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#aba3ffe91d818266b8785ce971548eb59">m_ForgetGateBias</a> = &amp;forgetGateBiasTensor;</div><div class="line"><a name="l00310"></a><span class="lineno">  310</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a75980b5795efd899a0c678a06a900c6d">m_CellBias</a> = &amp;cellBiasTensor;</div><div class="line"><a name="l00311"></a><span class="lineno">  311</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a332551528a4b3534c2d6c89ce816fcd9">m_OutputGateBias</a> = &amp;outputGateBiasTensor;</div><div class="line"><a name="l00312"></a><span class="lineno">  312</span>&#160;</div><div class="line"><a name="l00313"></a><span class="lineno">  313</span>&#160;    <span class="comment">// Flags to set test configuration</span></div><div class="line"><a name="l00314"></a><span class="lineno">  314</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_lstm_descriptor.xhtml#ae1b07ed928036004bd257169e5aeeef4">m_ActivationFunc</a> = 4;</div><div class="line"><a name="l00315"></a><span class="lineno">  315</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_lstm_descriptor.xhtml#ad474e5c51a0b194ef32e812b86c0cbdb">m_CifgEnabled</a> = <span class="keyword">false</span>;</div><div class="line"><a name="l00316"></a><span class="lineno">  316</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_lstm_descriptor.xhtml#a2837b4396f20c956952d1a7286cab5f8">m_PeepholeEnabled</a> = <span class="keyword">false</span>;</div><div class="line"><a name="l00317"></a><span class="lineno">  317</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_lstm_descriptor.xhtml#a6c9de81fc65b3c4924cab11907075a17">m_ProjectionEnabled</a> = <span class="keyword">false</span>;</div><div class="line"><a name="l00318"></a><span class="lineno">  318</span>&#160;</div><div class="line"><a name="l00319"></a><span class="lineno">  319</span>&#160;    std::unique_ptr&lt;armnn::IWorkload&gt; workload = workloadFactory.<a class="code" href="classarmnn_1_1_i_workload_factory.xhtml#ab6bd7aaf685d4e956d780f8655a6f174">CreateLstm</a>(data, info);</div><div class="line"><a name="l00320"></a><span class="lineno">  320</span>&#160;    inputHandle-&gt;Allocate();</div><div class="line"><a name="l00321"></a><span class="lineno">  321</span>&#160;    outputStateInHandle-&gt;Allocate();</div><div class="line"><a name="l00322"></a><span class="lineno">  322</span>&#160;    cellStateInHandle-&gt;Allocate();</div><div class="line"><a name="l00323"></a><span class="lineno">  323</span>&#160;</div><div class="line"><a name="l00324"></a><span class="lineno">  324</span>&#160;    scratchHandle-&gt;Allocate();</div><div class="line"><a name="l00325"></a><span class="lineno">  325</span>&#160;    outputStateOutHandle-&gt;Allocate();</div><div class="line"><a name="l00326"></a><span class="lineno">  326</span>&#160;    cellStateOutHandle-&gt;Allocate();</div><div class="line"><a name="l00327"></a><span class="lineno">  327</span>&#160;    outputHandle-&gt;Allocate();</div><div class="line"><a name="l00328"></a><span class="lineno">  328</span>&#160;</div><div class="line"><a name="l00329"></a><span class="lineno">  329</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#ae15f1a3c55d2db87683577de9fa4437c">CopyDataToITensorHandle</a>(inputHandle.get(), &amp;inputTensor[0][0]);</div><div class="line"><a name="l00330"></a><span class="lineno">  330</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#ae15f1a3c55d2db87683577de9fa4437c">CopyDataToITensorHandle</a>(outputStateInHandle.get(), &amp;outputStateInTensor[0][0]);</div><div class="line"><a name="l00331"></a><span class="lineno">  331</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#ae15f1a3c55d2db87683577de9fa4437c">CopyDataToITensorHandle</a>(cellStateInHandle.get(), &amp;cellStateInTensor[0][0]);</div><div class="line"><a name="l00332"></a><span class="lineno">  332</span>&#160;</div><div class="line"><a name="l00333"></a><span class="lineno">  333</span>&#160;    workload-&gt;Execute();</div><div class="line"><a name="l00334"></a><span class="lineno">  334</span>&#160;</div><div class="line"><a name="l00335"></a><span class="lineno">  335</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#a99b626c58a926dc7d6df78d22ec186c8">CopyDataFromITensorHandle</a>(&amp;ret.output[0][0], outputHandle.get());</div><div class="line"><a name="l00336"></a><span class="lineno">  336</span>&#160;</div><div class="line"><a name="l00337"></a><span class="lineno">  337</span>&#160;    <span class="keywordflow">return</span> ret;</div><div class="line"><a name="l00338"></a><span class="lineno">  338</span>&#160;}</div><div class="line"><a name="l00339"></a><span class="lineno">  339</span>&#160;</div><div class="line"><a name="l00340"></a><span class="lineno">  340</span>&#160;<span class="keyword">template</span>&lt;armnn::DataType ArmnnType, <span class="keyword">typename</span> T = armnn::ResolveType&lt;ArmnnType&gt;&gt;</div><div class="line"><a name="l00341"></a><span class="lineno">  341</span>&#160;<a class="code" href="struct_layer_test_result.xhtml">LayerTestResult&lt;T, 2&gt;</a></div><div class="line"><a name="l00342"></a><span class="lineno">  342</span>&#160;LstmLayerNoCifgWithPeepholeWithProjectionTestImpl(<a class="code" href="classarmnn_1_1_i_workload_factory.xhtml">armnn::IWorkloadFactory</a>&amp; workloadFactory,</div><div class="line"><a name="l00343"></a><span class="lineno">  343</span>&#160;                                                  <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_backend_internal.xhtml#a693b40e6b94e958836aeb0410ca186bd">armnn::IBackendInternal::IMemoryManagerSharedPtr</a>&amp; memoryManager,</div><div class="line"><a name="l00344"></a><span class="lineno">  344</span>&#160;                                                  <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml">armnn::ITensorHandleFactory</a>&amp; tensorHandleFactory,</div><div class="line"><a name="l00345"></a><span class="lineno">  345</span>&#160;                                                  <span class="keyword">const</span> boost::multi_array&lt;T, 2&gt;&amp; input,</div><div class="line"><a name="l00346"></a><span class="lineno">  346</span>&#160;                                                  <span class="keyword">const</span> boost::multi_array&lt;T, 2&gt;&amp; outputExpected,</div><div class="line"><a name="l00347"></a><span class="lineno">  347</span>&#160;                                                  <span class="keywordtype">float</span> qScale = 0.0f,</div><div class="line"><a name="l00348"></a><span class="lineno">  348</span>&#160;                                                  int32_t qOffset = 0,</div><div class="line"><a name="l00349"></a><span class="lineno">  349</span>&#160;                                                  <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6">armnn::DataType</a> constantDataType = <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a166495adc0d0f53bee6baecc577f5204">armnn::DataType::Float32</a>)</div><div class="line"><a name="l00350"></a><span class="lineno">  350</span>&#160;{</div><div class="line"><a name="l00351"></a><span class="lineno">  351</span>&#160;    <a class="code" href="namespacearmnn.xhtml#a44affeeb090c3c6a3062830562672e84">IgnoreUnused</a>(memoryManager);</div><div class="line"><a name="l00352"></a><span class="lineno">  352</span>&#160;    <span class="keywordtype">unsigned</span> <span class="keywordtype">int</span> batchSize = 2;</div><div class="line"><a name="l00353"></a><span class="lineno">  353</span>&#160;    <span class="keywordtype">unsigned</span> <span class="keywordtype">int</span> outputSize = 16;</div><div class="line"><a name="l00354"></a><span class="lineno">  354</span>&#160;    <span class="keywordtype">unsigned</span> <span class="keywordtype">int</span> inputSize = 5;</div><div class="line"><a name="l00355"></a><span class="lineno">  355</span>&#160;    <span class="keywordtype">unsigned</span> numUnits = 20;</div><div class="line"><a name="l00356"></a><span class="lineno">  356</span>&#160;</div><div class="line"><a name="l00357"></a><span class="lineno">  357</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> inputTensorInfo({batchSize , inputSize}, ArmnnType, qScale, qOffset);</div><div class="line"><a name="l00358"></a><span class="lineno">  358</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> cellStateInTensorInfo({batchSize , numUnits}, ArmnnType, qScale, qOffset);</div><div class="line"><a name="l00359"></a><span class="lineno">  359</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> outputStateInTensorInfo({batchSize , outputSize}, ArmnnType, qScale, qOffset);</div><div class="line"><a name="l00360"></a><span class="lineno">  360</span>&#160;</div><div class="line"><a name="l00361"></a><span class="lineno">  361</span>&#160;    <span class="comment">// Scratch buffer size without CIFG [batchSize, numUnits * 4]</span></div><div class="line"><a name="l00362"></a><span class="lineno">  362</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> scratchBufferTensorInfo({batchSize, numUnits * 4}, ArmnnType, qScale, qOffset);</div><div class="line"><a name="l00363"></a><span class="lineno">  363</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> cellStateOutTensorInfo({batchSize, numUnits}, ArmnnType, qScale, qOffset);</div><div class="line"><a name="l00364"></a><span class="lineno">  364</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> outputStateOutTensorInfo({batchSize, outputSize}, ArmnnType, qScale, qOffset);</div><div class="line"><a name="l00365"></a><span class="lineno">  365</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> outputTensorInfo({batchSize, outputSize}, ArmnnType, qScale, qOffset);</div><div class="line"><a name="l00366"></a><span class="lineno">  366</span>&#160;</div><div class="line"><a name="l00367"></a><span class="lineno">  367</span>&#160;    <a class="code" href="struct_layer_test_result.xhtml">LayerTestResult&lt;T, 2&gt;</a> ret(outputTensorInfo);</div><div class="line"><a name="l00368"></a><span class="lineno">  368</span>&#160;</div><div class="line"><a name="l00369"></a><span class="lineno">  369</span>&#160;    std::vector&lt;T&gt; inputVector;</div><div class="line"><a name="l00370"></a><span class="lineno">  370</span>&#160;    inputVector.assign(input.data(), input.data() + (batchSize * inputSize));</div><div class="line"><a name="l00371"></a><span class="lineno">  371</span>&#160;    <span class="keyword">auto</span> inputTensor = MakeTensor&lt;T,2&gt;(inputTensorInfo, inputVector);</div><div class="line"><a name="l00372"></a><span class="lineno">  372</span>&#160;</div><div class="line"><a name="l00373"></a><span class="lineno">  373</span>&#160;    std::vector&lt;T&gt; cellStateInVector(batchSize * numUnits, T());</div><div class="line"><a name="l00374"></a><span class="lineno">  374</span>&#160;    <span class="keyword">auto</span> cellStateInTensor = MakeTensor&lt;T,2&gt;(cellStateInTensorInfo, cellStateInVector);</div><div class="line"><a name="l00375"></a><span class="lineno">  375</span>&#160;</div><div class="line"><a name="l00376"></a><span class="lineno">  376</span>&#160;    std::vector&lt;T&gt; outputStateInVector(batchSize * outputSize, T());</div><div class="line"><a name="l00377"></a><span class="lineno">  377</span>&#160;    <span class="keyword">auto</span> outputStateInTensor = MakeTensor&lt;T,2&gt;(outputStateInTensorInfo, outputStateInVector);</div><div class="line"><a name="l00378"></a><span class="lineno">  378</span>&#160;</div><div class="line"><a name="l00379"></a><span class="lineno">  379</span>&#160;    std::vector&lt;T&gt; scratchBufferVector(batchSize * numUnits * 4, T());</div><div class="line"><a name="l00380"></a><span class="lineno">  380</span>&#160;    <span class="keyword">auto</span> scratchBufferTensor = MakeTensor&lt;T,2&gt;(scratchBufferTensorInfo, scratchBufferVector);</div><div class="line"><a name="l00381"></a><span class="lineno">  381</span>&#160;</div><div class="line"><a name="l00382"></a><span class="lineno">  382</span>&#160;    std::vector&lt;T&gt; outputStateOutVector(batchSize * outputSize, T());</div><div class="line"><a name="l00383"></a><span class="lineno">  383</span>&#160;    <span class="keyword">auto</span> outputStateOutTensor = MakeTensor&lt;T,2&gt;(outputStateOutTensorInfo, outputStateOutVector);</div><div class="line"><a name="l00384"></a><span class="lineno">  384</span>&#160;</div><div class="line"><a name="l00385"></a><span class="lineno">  385</span>&#160;    std::vector&lt;T&gt; cellStateOutVector(batchSize * numUnits, T());</div><div class="line"><a name="l00386"></a><span class="lineno">  386</span>&#160;    <span class="keyword">auto</span> cellStateOutTensor = MakeTensor&lt;T,2&gt;(cellStateOutTensorInfo, cellStateOutVector);</div><div class="line"><a name="l00387"></a><span class="lineno">  387</span>&#160;</div><div class="line"><a name="l00388"></a><span class="lineno">  388</span>&#160;    std::vector&lt;T&gt; outputVector;</div><div class="line"><a name="l00389"></a><span class="lineno">  389</span>&#160;    outputVector.assign(outputExpected.data(), outputExpected.data() + (batchSize * outputSize));</div><div class="line"><a name="l00390"></a><span class="lineno">  390</span>&#160;    ret.outputExpected = MakeTensor&lt;T, 2&gt;(outputTensorInfo, outputVector);</div><div class="line"><a name="l00391"></a><span class="lineno">  391</span>&#160;</div><div class="line"><a name="l00392"></a><span class="lineno">  392</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; inputHandle = tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(inputTensorInfo);</div><div class="line"><a name="l00393"></a><span class="lineno">  393</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; cellStateInHandle =</div><div class="line"><a name="l00394"></a><span class="lineno">  394</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(cellStateInTensorInfo);</div><div class="line"><a name="l00395"></a><span class="lineno">  395</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; outputStateInHandle =</div><div class="line"><a name="l00396"></a><span class="lineno">  396</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(outputStateInTensorInfo);</div><div class="line"><a name="l00397"></a><span class="lineno">  397</span>&#160;</div><div class="line"><a name="l00398"></a><span class="lineno">  398</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; scratchHandle =</div><div class="line"><a name="l00399"></a><span class="lineno">  399</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(scratchBufferTensorInfo);</div><div class="line"><a name="l00400"></a><span class="lineno">  400</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; outputStateOutHandle =</div><div class="line"><a name="l00401"></a><span class="lineno">  401</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(outputStateOutTensorInfo);</div><div class="line"><a name="l00402"></a><span class="lineno">  402</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; cellStateOutHandle =</div><div class="line"><a name="l00403"></a><span class="lineno">  403</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(cellStateOutTensorInfo);</div><div class="line"><a name="l00404"></a><span class="lineno">  404</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; outputHandle = tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(outputTensorInfo);</div><div class="line"><a name="l00405"></a><span class="lineno">  405</span>&#160;</div><div class="line"><a name="l00406"></a><span class="lineno">  406</span>&#160;    <a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml">armnn::LstmQueueDescriptor</a> data;</div><div class="line"><a name="l00407"></a><span class="lineno">  407</span>&#160;    <a class="code" href="structarmnn_1_1_workload_info.xhtml">armnn::WorkloadInfo</a> info;</div><div class="line"><a name="l00408"></a><span class="lineno">  408</span>&#160;</div><div class="line"><a name="l00409"></a><span class="lineno">  409</span>&#160;    AddInputToWorkload(data, info, inputTensorInfo, inputHandle.get());</div><div class="line"><a name="l00410"></a><span class="lineno">  410</span>&#160;    AddInputToWorkload(data, info, outputStateInTensorInfo, outputStateInHandle.get());</div><div class="line"><a name="l00411"></a><span class="lineno">  411</span>&#160;    AddInputToWorkload(data, info, cellStateInTensorInfo, cellStateInHandle.get());</div><div class="line"><a name="l00412"></a><span class="lineno">  412</span>&#160;</div><div class="line"><a name="l00413"></a><span class="lineno">  413</span>&#160;    AddOutputToWorkload(data, info, scratchBufferTensorInfo, scratchHandle.get());</div><div class="line"><a name="l00414"></a><span class="lineno">  414</span>&#160;    AddOutputToWorkload(data, info, outputStateOutTensorInfo, outputStateOutHandle.get());</div><div class="line"><a name="l00415"></a><span class="lineno">  415</span>&#160;    AddOutputToWorkload(data, info, cellStateOutTensorInfo, cellStateOutHandle.get());</div><div class="line"><a name="l00416"></a><span class="lineno">  416</span>&#160;    AddOutputToWorkload(data, info, outputTensorInfo, outputHandle.get());</div><div class="line"><a name="l00417"></a><span class="lineno">  417</span>&#160;</div><div class="line"><a name="l00418"></a><span class="lineno">  418</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> tensorInfo16({outputSize}, constantDataType, qScale, qOffset);</div><div class="line"><a name="l00419"></a><span class="lineno">  419</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> tensorInfo20({numUnits}, constantDataType, qScale, qOffset);</div><div class="line"><a name="l00420"></a><span class="lineno">  420</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> tensorInfo20x5({numUnits, inputSize}, constantDataType, qScale, qOffset);</div><div class="line"><a name="l00421"></a><span class="lineno">  421</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> tensorInfo20x16({numUnits, outputSize}, constantDataType, qScale, qOffset);</div><div class="line"><a name="l00422"></a><span class="lineno">  422</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> tensorInfo16x20({outputSize, numUnits}, constantDataType, qScale, qOffset);</div><div class="line"><a name="l00423"></a><span class="lineno">  423</span>&#160;</div><div class="line"><a name="l00424"></a><span class="lineno">  424</span>&#160;    <span class="keyword">auto</span> inputToInputWeights =</div><div class="line"><a name="l00425"></a><span class="lineno">  425</span>&#160;            MakeTensor&lt;float, 2&gt;(tensorInfo20x5, {0.021393683f,0.06124551f,  0.046905167f,-0.014657677f,-0.03149463f,</div><div class="line"><a name="l00426"></a><span class="lineno">  426</span>&#160;                                                  0.09171803f, 0.14647801f,0.10797193f,   -0.0057968358f,0.0019193048f,</div><div class="line"><a name="l00427"></a><span class="lineno">  427</span>&#160;                                                  -0.2726754f, 0.10154029f, -0.018539885f, 0.080349885f, -0.10262385f,</div><div class="line"><a name="l00428"></a><span class="lineno">  428</span>&#160;                                                  -0.022599787f,-0.09121155f, -0.008675967f, -0.045206103f,-0.0821282f,</div><div class="line"><a name="l00429"></a><span class="lineno">  429</span>&#160;                                                  -0.008045952f,0.015478081f, 0.055217247f,  0.038719587f, 0.044153627f,</div><div class="line"><a name="l00430"></a><span class="lineno">  430</span>&#160;                                                  -0.06453243f,0.05031825f, -0.046935108f, -0.008164439f, 0.014574226f,</div><div class="line"><a name="l00431"></a><span class="lineno">  431</span>&#160;                                                  -0.1671009f,   -0.15519552f, -0.16819797f,-0.13971269f,-0.11953059f,</div><div class="line"><a name="l00432"></a><span class="lineno">  432</span>&#160;                                                  0.25005487f, -0.22790983f, 0.009855087f,  -0.028140958f, -0.11200698f,</div><div class="line"><a name="l00433"></a><span class="lineno">  433</span>&#160;                                                  0.11295408f, -0.0035217577f, 0.054485075f,  0.05184695f, 0.064711206f,</div><div class="line"><a name="l00434"></a><span class="lineno">  434</span>&#160;                                                  0.10989193f,   0.11674786f,  0.03490607f, 0.07727357f, 0.11390585f,</div><div class="line"><a name="l00435"></a><span class="lineno">  435</span>&#160;                                                  -0.1863375f,  -0.1034451f, -0.13945189f, -0.049401227f, -0.18767063f,</div><div class="line"><a name="l00436"></a><span class="lineno">  436</span>&#160;                                                  0.042483903f, 0.14233552f, 0.13832581f, 0.18350165f,    0.14545603f,</div><div class="line"><a name="l00437"></a><span class="lineno">  437</span>&#160;                                                  -0.028545704f,0.024939531f,0.050929718f,0.0076203286f,-0.0029723682f,</div><div class="line"><a name="l00438"></a><span class="lineno">  438</span>&#160;                                                  -0.042484224f, -0.11827596f, -0.09171104f,  -0.10808628f,-0.16327988f,</div><div class="line"><a name="l00439"></a><span class="lineno">  439</span>&#160;                                                  -0.2273378f,   -0.0993647f, -0.017155107f,0.0023917493f,0.049272764f,</div><div class="line"><a name="l00440"></a><span class="lineno">  440</span>&#160;                                                  0.0038534778f, 0.054764505f,   0.089753784f, 0.06947234f, 0.08014476f,</div><div class="line"><a name="l00441"></a><span class="lineno">  441</span>&#160;                                                  -0.04544234f, -0.0497073f,-0.07135631f,  -0.048929106f,-0.004042012f,</div><div class="line"><a name="l00442"></a><span class="lineno">  442</span>&#160;                                                  -0.009284026f, 0.018042054f, 0.0036860977f,-0.07427302f, -0.11434604f,</div><div class="line"><a name="l00443"></a><span class="lineno">  443</span>&#160;                                                  -0.018995456f, 0.031487543f, 0.012834908f,0.019977754f,0.044256654f,</div><div class="line"><a name="l00444"></a><span class="lineno">  444</span>&#160;                                                  -0.39292613f,  -0.18519334f, -0.11651281f,-0.06809892f, 0.011373677f</div><div class="line"><a name="l00445"></a><span class="lineno">  445</span>&#160;            });</div><div class="line"><a name="l00446"></a><span class="lineno">  446</span>&#160;</div><div class="line"><a name="l00447"></a><span class="lineno">  447</span>&#160;    <span class="keyword">auto</span> inputToForgetWeights =</div><div class="line"><a name="l00448"></a><span class="lineno">  448</span>&#160;            MakeTensor&lt;float, 2&gt;(tensorInfo20x5, {-0.0018401089f, -0.004852237f,0.03698424f, 0.014181704f,0.028273236f,</div><div class="line"><a name="l00449"></a><span class="lineno">  449</span>&#160;                                                   -0.016726194f, -0.05249759f,-0.10204261f, 0.00861066f,-0.040979505f,</div><div class="line"><a name="l00450"></a><span class="lineno">  450</span>&#160;                                                   -0.009899187f,0.01923892f,-0.028177269f, -0.08535103f,-0.14585495f,</div><div class="line"><a name="l00451"></a><span class="lineno">  451</span>&#160;                                                   0.10662567f,-0.01909731f,-0.017883534f,-0.0047269356f,-0.045103323f,</div><div class="line"><a name="l00452"></a><span class="lineno">  452</span>&#160;                                                   0.0030784295f,0.076784775f,0.07463696f, 0.094531395f,0.0814421f,</div><div class="line"><a name="l00453"></a><span class="lineno">  453</span>&#160;                                                   -0.12257899f, -0.033945758f,-0.031303465f, 0.045630626f,0.06843887f,</div><div class="line"><a name="l00454"></a><span class="lineno">  454</span>&#160;                                                   -0.13492945f, -0.012480007f,-0.0811829f, -0.07224499f,-0.09628791f,</div><div class="line"><a name="l00455"></a><span class="lineno">  455</span>&#160;                                                   0.045100946f,0.0012300825f, 0.013964662f, 0.099372394f,0.02543059f,</div><div class="line"><a name="l00456"></a><span class="lineno">  456</span>&#160;                                                   0.06958324f,    0.034257296f, 0.0482646f, 0.06267997f,0.052625068f,</div><div class="line"><a name="l00457"></a><span class="lineno">  457</span>&#160;                                                   0.12784666f,    0.07077897f,  0.025725935f, 0.04165009f,0.07241905f,</div><div class="line"><a name="l00458"></a><span class="lineno">  458</span>&#160;                                                   0.018668644f, -0.037377294f,-0.06277783f,-0.08833636f,-0.040120605f,</div><div class="line"><a name="l00459"></a><span class="lineno">  459</span>&#160;                                                   -0.011405586f,-0.007808335f,-0.010301386f,-0.005102167f,0.027717464f,</div><div class="line"><a name="l00460"></a><span class="lineno">  460</span>&#160;                                                   0.05483423f, 0.11449111f, 0.11289652f,0.10939839f, 0.13396506f,</div><div class="line"><a name="l00461"></a><span class="lineno">  461</span>&#160;                                                   -0.08402166f,-0.01901462f,  -0.044678304f,-0.07720565f,0.014350063f,</div><div class="line"><a name="l00462"></a><span class="lineno">  462</span>&#160;                                                   -0.11757958f, -0.0652038f, -0.08185733f,-0.076754324f,-0.092614375f,</div><div class="line"><a name="l00463"></a><span class="lineno">  463</span>&#160;                                                   0.10405491f, 0.052960336f, 0.035755895f,0.035839386f,-0.012540553f,</div><div class="line"><a name="l00464"></a><span class="lineno">  464</span>&#160;                                                   0.036881298f,   0.02913376f,  0.03420159f,0.05448447f,-0.054523353f,</div><div class="line"><a name="l00465"></a><span class="lineno">  465</span>&#160;                                                   0.02582715f, 0.02327355f, -0.011857179f,-0.0011980024f,-0.034641717f,</div><div class="line"><a name="l00466"></a><span class="lineno">  466</span>&#160;                                                   -0.026125094f,-0.17582615f,-0.15923657f,-0.27486774f,-0.0006143371f,</div><div class="line"><a name="l00467"></a><span class="lineno">  467</span>&#160;                                                   0.0001771948f,  -8.470171e-05f, 0.02651807f,0.045790765f,0.06956496f</div><div class="line"><a name="l00468"></a><span class="lineno">  468</span>&#160;            });</div><div class="line"><a name="l00469"></a><span class="lineno">  469</span>&#160;</div><div class="line"><a name="l00470"></a><span class="lineno">  470</span>&#160;    <span class="keyword">auto</span> inputToCellWeights =</div><div class="line"><a name="l00471"></a><span class="lineno">  471</span>&#160;            MakeTensor&lt;float, 2&gt;(tensorInfo20x5, {-0.04580283f,   -0.09549462f,   -0.032418985f,  -0.06454633f,</div><div class="line"><a name="l00472"></a><span class="lineno">  472</span>&#160;                                                  -0.043528453f,  0.043018587f,   -0.049152344f,  -0.12418144f,</div><div class="line"><a name="l00473"></a><span class="lineno">  473</span>&#160;                                                  -0.078985475f,  -0.07596889f,   0.019484362f,   -0.11434962f,</div><div class="line"><a name="l00474"></a><span class="lineno">  474</span>&#160;                                                  -0.0074034138f, -0.06314844f,   -0.092981495f,  0.0062155537f,</div><div class="line"><a name="l00475"></a><span class="lineno">  475</span>&#160;                                                  -0.025034338f,  -0.0028890965f, 0.048929527f,   0.06235075f,</div><div class="line"><a name="l00476"></a><span class="lineno">  476</span>&#160;                                                  0.10665918f,    -0.032036792f,  -0.08505916f,   -0.10843358f,</div><div class="line"><a name="l00477"></a><span class="lineno">  477</span>&#160;                                                  -0.13002433f,   -0.036816437f,  -0.02130134f,   -0.016518239f,</div><div class="line"><a name="l00478"></a><span class="lineno">  478</span>&#160;                                                  0.0047691227f,  -0.0025825808f, 0.066017866f,   0.029991534f,</div><div class="line"><a name="l00479"></a><span class="lineno">  479</span>&#160;                                                  -0.10652836f,   -0.1037554f,    -0.13056071f,   -0.03266643f,</div><div class="line"><a name="l00480"></a><span class="lineno">  480</span>&#160;                                                  -0.033702414f,  -0.006473424f,  -0.04611692f,   0.014419339f,</div><div class="line"><a name="l00481"></a><span class="lineno">  481</span>&#160;                                                  -0.025174323f,  0.0396852f,     0.081777506f,   0.06157468f,</div><div class="line"><a name="l00482"></a><span class="lineno">  482</span>&#160;                                                  0.10210095f,    -0.009658194f,  0.046511717f,   0.03603906f,</div><div class="line"><a name="l00483"></a><span class="lineno">  483</span>&#160;                                                  0.0069369148f,  0.015960095f,   -0.06507666f,   0.09551598f,</div><div class="line"><a name="l00484"></a><span class="lineno">  484</span>&#160;                                                  0.053568836f,   0.06408714f,    0.12835667f,    -0.008714329f,</div><div class="line"><a name="l00485"></a><span class="lineno">  485</span>&#160;                                                  -0.20211966f,   -0.12093674f,   0.029450472f,   0.2849013f,</div><div class="line"><a name="l00486"></a><span class="lineno">  486</span>&#160;                                                  -0.029227901f,  0.1164364f,     -0.08560263f,   0.09941786f,</div><div class="line"><a name="l00487"></a><span class="lineno">  487</span>&#160;                                                  -0.036999565f,  -0.028842626f,  -0.0033637602f, -0.017012902f,</div><div class="line"><a name="l00488"></a><span class="lineno">  488</span>&#160;                                                  -0.09720865f,   -0.11193351f,   -0.029155117f,  -0.017936034f,</div><div class="line"><a name="l00489"></a><span class="lineno">  489</span>&#160;                                                  -0.009768936f,  -0.04223324f,   -0.036159635f,  0.06505112f,</div><div class="line"><a name="l00490"></a><span class="lineno">  490</span>&#160;                                                  -0.021742892f,  -0.023377212f,  -0.07221364f,   -0.06430552f,</div><div class="line"><a name="l00491"></a><span class="lineno">  491</span>&#160;                                                  0.05453865f,    0.091149814f,   0.06387331f,    0.007518393f,</div><div class="line"><a name="l00492"></a><span class="lineno">  492</span>&#160;                                                  0.055960953f,   0.069779344f,   0.046411168f,   0.10509911f,</div><div class="line"><a name="l00493"></a><span class="lineno">  493</span>&#160;                                                  0.07463894f,    0.0075130584f,  0.012850982f,   0.04555431f,</div><div class="line"><a name="l00494"></a><span class="lineno">  494</span>&#160;                                                  0.056955688f,   0.06555285f,    0.050801456f,   -0.009862683f,</div><div class="line"><a name="l00495"></a><span class="lineno">  495</span>&#160;                                                  0.00826772f,    -0.026555609f,  -0.0073611983f, -0.0014897042f</div><div class="line"><a name="l00496"></a><span class="lineno">  496</span>&#160;            });</div><div class="line"><a name="l00497"></a><span class="lineno">  497</span>&#160;</div><div class="line"><a name="l00498"></a><span class="lineno">  498</span>&#160;    <span class="keyword">auto</span> inputToOutputWeights =</div><div class="line"><a name="l00499"></a><span class="lineno">  499</span>&#160;            MakeTensor&lt;float, 2&gt;(tensorInfo20x5, {-0.0998932f,   -0.07201956f, -0.052803773f,-0.15629593f,-0.15001918f,</div><div class="line"><a name="l00500"></a><span class="lineno">  500</span>&#160;                                                  -0.07650751f,0.02359855f, -0.075155355f, -0.08037709f,  -0.15093534f,</div><div class="line"><a name="l00501"></a><span class="lineno">  501</span>&#160;                                                  0.029517552f, -0.04751393f, 0.010350531f,-0.02664851f, -0.016839722f,</div><div class="line"><a name="l00502"></a><span class="lineno">  502</span>&#160;                                                  -0.023121163f, 0.0077019283f, 0.012851257f, -0.05040649f,-0.0129761f,</div><div class="line"><a name="l00503"></a><span class="lineno">  503</span>&#160;                                                  -0.021737747f,-0.038305793f,-0.06870586f, -0.01481247f,-0.001285394f,</div><div class="line"><a name="l00504"></a><span class="lineno">  504</span>&#160;                                                  0.10124236f,  0.083122835f, 0.053313006f,-0.062235646f,-0.075637154f,</div><div class="line"><a name="l00505"></a><span class="lineno">  505</span>&#160;                                                  -0.027833903f, 0.029774971f,  0.1130802f, 0.09218906f, 0.09506135f,</div><div class="line"><a name="l00506"></a><span class="lineno">  506</span>&#160;                                                  -0.086665764f,-0.037162706f,-0.038880914f,-0.035832845f,-0.014481564f,</div><div class="line"><a name="l00507"></a><span class="lineno">  507</span>&#160;                                                  -0.09825003f,-0.12048569f,-0.097665586f,-0.05287633f, -0.0964047f,</div><div class="line"><a name="l00508"></a><span class="lineno">  508</span>&#160;                                                  -0.11366429f,  0.035777505f,  0.13568819f, 0.052451383f,0.050649304f,</div><div class="line"><a name="l00509"></a><span class="lineno">  509</span>&#160;                                                  0.05798951f, -0.021852335f,-0.099848844f,0.014740475f,-0.078897946f,</div><div class="line"><a name="l00510"></a><span class="lineno">  510</span>&#160;                                                  0.04974699f, 0.014160473f,  0.06973932f,    0.04964942f, 0.033364646f,</div><div class="line"><a name="l00511"></a><span class="lineno">  511</span>&#160;                                                  0.08190124f,   0.025535367f, 0.050893165f, 0.048514254f,0.06945813f,</div><div class="line"><a name="l00512"></a><span class="lineno">  512</span>&#160;                                                  -0.078907564f,-0.06707616f,  -0.11844508f, -0.09986688f,-0.07509403f,</div><div class="line"><a name="l00513"></a><span class="lineno">  513</span>&#160;                                                  0.06263226f,   0.14925587f,   0.20188436f, 0.12098451f,0.14639415f,</div><div class="line"><a name="l00514"></a><span class="lineno">  514</span>&#160;                                                  0.0015017595f, -0.014267382f, -0.03417257f,0.012711468f,0.0028300495f,</div><div class="line"><a name="l00515"></a><span class="lineno">  515</span>&#160;                                                  -0.024758482f, -0.05098548f,-0.0821182f, 0.014225672f,  0.021544158f,</div><div class="line"><a name="l00516"></a><span class="lineno">  516</span>&#160;                                                  0.08949725f,  0.07505268f, -0.0020780868f, 0.04908258f,0.06476295f,</div><div class="line"><a name="l00517"></a><span class="lineno">  517</span>&#160;                                                  -0.022907063f,0.027562456f,0.040185735f, 0.019567577f,-0.015598739f,</div><div class="line"><a name="l00518"></a><span class="lineno">  518</span>&#160;                                                  -0.049097303f, -0.017121866f, -0.083368234f,-0.02332002f,-0.0840956f</div><div class="line"><a name="l00519"></a><span class="lineno">  519</span>&#160;            });</div><div class="line"><a name="l00520"></a><span class="lineno">  520</span>&#160;</div><div class="line"><a name="l00521"></a><span class="lineno">  521</span>&#160;    <span class="keyword">auto</span> inputGateBias =</div><div class="line"><a name="l00522"></a><span class="lineno">  522</span>&#160;            MakeTensor&lt;float, 1&gt;(tensorInfo20, {0.02234832f,  0.14757581f,   0.18176508f,  0.10380666f,  0.053110216f,</div><div class="line"><a name="l00523"></a><span class="lineno">  523</span>&#160;                                                -0.06928846f, -0.13942584f,  -0.11816189f, 0.19483899f,  0.03652339f,</div><div class="line"><a name="l00524"></a><span class="lineno">  524</span>&#160;                                                -0.10250295f, 0.036714908f,  -0.18426876f, 0.036065217f, 0.21810818f,</div><div class="line"><a name="l00525"></a><span class="lineno">  525</span>&#160;                                                0.02383196f,  -0.043370757f, 0.08690144f,  -0.04444982f, 0.00030581196f</div><div class="line"><a name="l00526"></a><span class="lineno">  526</span>&#160;            });</div><div class="line"><a name="l00527"></a><span class="lineno">  527</span>&#160;</div><div class="line"><a name="l00528"></a><span class="lineno">  528</span>&#160;    <span class="keyword">auto</span> forgetGateBias =</div><div class="line"><a name="l00529"></a><span class="lineno">  529</span>&#160;            MakeTensor&lt;float, 1&gt;(tensorInfo20, {0.035185695f, -0.042891346f, -0.03032477f, 0.23027696f,</div><div class="line"><a name="l00530"></a><span class="lineno">  530</span>&#160;                                                0.11098921f,  0.15378423f,   0.09263801f,  0.09790885f,</div><div class="line"><a name="l00531"></a><span class="lineno">  531</span>&#160;                                                0.09508917f,  0.061199076f,  0.07665568f,  -0.015443159f,</div><div class="line"><a name="l00532"></a><span class="lineno">  532</span>&#160;                                                -0.03499149f, 0.046190713f,  0.08895977f,  0.10899629f,</div><div class="line"><a name="l00533"></a><span class="lineno">  533</span>&#160;                                                0.40694186f,  0.06030037f,   0.012413437f, -0.06108739f</div><div class="line"><a name="l00534"></a><span class="lineno">  534</span>&#160;            });</div><div class="line"><a name="l00535"></a><span class="lineno">  535</span>&#160;</div><div class="line"><a name="l00536"></a><span class="lineno">  536</span>&#160;    <span class="keyword">auto</span> cellBias =</div><div class="line"><a name="l00537"></a><span class="lineno">  537</span>&#160;            MakeTensor&lt;float, 1&gt;(tensorInfo20, {-0.024379363f, 0.0055531194f, 0.23377132f,   0.033463873f,</div><div class="line"><a name="l00538"></a><span class="lineno">  538</span>&#160;                                                -0.1483596f,   -0.10639995f,  -0.091433935f, 0.058573797f,</div><div class="line"><a name="l00539"></a><span class="lineno">  539</span>&#160;                                                -0.06809782f,  -0.07889636f,  -0.043246906f, -0.09829136f,</div><div class="line"><a name="l00540"></a><span class="lineno">  540</span>&#160;                                                -0.4279842f,   0.034901652f,  0.18797937f,   0.0075234566f,</div><div class="line"><a name="l00541"></a><span class="lineno">  541</span>&#160;                                                0.016178843f,  0.1749513f,    0.13975595f,   0.92058027f</div><div class="line"><a name="l00542"></a><span class="lineno">  542</span>&#160;            });</div><div class="line"><a name="l00543"></a><span class="lineno">  543</span>&#160;</div><div class="line"><a name="l00544"></a><span class="lineno">  544</span>&#160;    <span class="keyword">auto</span> outputGateBias =</div><div class="line"><a name="l00545"></a><span class="lineno">  545</span>&#160;            MakeTensor&lt;float, 1&gt;(tensorInfo20, {0.046159424f,  -0.0012809046f, 0.03563469f, 0.12648113f, 0.027195795f,</div><div class="line"><a name="l00546"></a><span class="lineno">  546</span>&#160;                                                0.35373217f,   -0.018957434f,  0.008907322f, -0.0762701f, 0.12018895f,</div><div class="line"><a name="l00547"></a><span class="lineno">  547</span>&#160;                                                0.04216877f,   0.0022856654f,  0.040952638f,  0.3147856f,  0.08225149f,</div><div class="line"><a name="l00548"></a><span class="lineno">  548</span>&#160;                                                -0.057416286f, -0.14995944f,   -0.008040261f, 0.13208859f, 0.029760877f</div><div class="line"><a name="l00549"></a><span class="lineno">  549</span>&#160;            });</div><div class="line"><a name="l00550"></a><span class="lineno">  550</span>&#160;</div><div class="line"><a name="l00551"></a><span class="lineno">  551</span>&#160;    <span class="keyword">auto</span> recurrentToInputWeights =</div><div class="line"><a name="l00552"></a><span class="lineno">  552</span>&#160;            MakeTensor&lt;float, 2&gt;(tensorInfo20x16, {-0.001374326f,   -0.078856036f,   0.10672688f,    0.029162422f,</div><div class="line"><a name="l00553"></a><span class="lineno">  553</span>&#160;                                                   -0.11585556f,    0.02557986f,     -0.13446963f,   -0.035785314f,</div><div class="line"><a name="l00554"></a><span class="lineno">  554</span>&#160;                                                   -0.01244275f,    0.025961924f,    -0.02337298f,   -0.044228926f,</div><div class="line"><a name="l00555"></a><span class="lineno">  555</span>&#160;                                                   -0.055839065f,   -0.046598054f,   -0.010546039f,  -0.06900766f,</div><div class="line"><a name="l00556"></a><span class="lineno">  556</span>&#160;                                                   0.027239809f,    0.022582639f,    -0.013296484f,  -0.05459212f,</div><div class="line"><a name="l00557"></a><span class="lineno">  557</span>&#160;                                                   0.08981f,        -0.045407712f,   0.08682226f,    -0.06867011f,</div><div class="line"><a name="l00558"></a><span class="lineno">  558</span>&#160;                                                   -0.14390695f,    -0.02916037f,    0.000996957f,   0.091420636f,</div><div class="line"><a name="l00559"></a><span class="lineno">  559</span>&#160;                                                   0.14283475f,     -0.07390571f,    -0.06402044f,   0.062524505f,</div><div class="line"><a name="l00560"></a><span class="lineno">  560</span>&#160;                                                   -0.093129106f,   0.04860203f,     -0.08364217f,   -0.08119002f,</div><div class="line"><a name="l00561"></a><span class="lineno">  561</span>&#160;                                                   0.009352075f,    0.22920375f,     0.0016303885f,  0.11583097f,</div><div class="line"><a name="l00562"></a><span class="lineno">  562</span>&#160;                                                   -0.13732095f,    0.012405723f,    -0.07551853f,   0.06343048f,</div><div class="line"><a name="l00563"></a><span class="lineno">  563</span>&#160;                                                   0.12162708f,     -0.031923793f,   -0.014335606f,  0.01790974f,</div><div class="line"><a name="l00564"></a><span class="lineno">  564</span>&#160;                                                   -0.10650317f,    -0.0724401f,     0.08554849f,    -0.05727212f,</div><div class="line"><a name="l00565"></a><span class="lineno">  565</span>&#160;                                                   0.06556731f,     -0.042729504f,   -0.043227166f,  0.011683251f,</div><div class="line"><a name="l00566"></a><span class="lineno">  566</span>&#160;                                                   -0.013082158f,   -0.029302018f,   -0.010899579f,  -0.062036745f,</div><div class="line"><a name="l00567"></a><span class="lineno">  567</span>&#160;                                                   -0.022509435f,   -0.00964907f,    -0.01567329f,   0.04260106f,</div><div class="line"><a name="l00568"></a><span class="lineno">  568</span>&#160;                                                   -0.07787477f,    -0.11576462f,    0.017356863f,   0.048673786f,</div><div class="line"><a name="l00569"></a><span class="lineno">  569</span>&#160;                                                   -0.017577527f,   -0.05527947f,    -0.082487635f,  -0.040137455f,</div><div class="line"><a name="l00570"></a><span class="lineno">  570</span>&#160;                                                   -0.10820036f,    -0.04666372f,    0.022746278f,   -0.07851417f,</div><div class="line"><a name="l00571"></a><span class="lineno">  571</span>&#160;                                                   0.01068115f,     0.032956902f,    0.022433773f,   0.0026891115f,</div><div class="line"><a name="l00572"></a><span class="lineno">  572</span>&#160;                                                   0.08944216f,     -0.0685835f,     0.010513544f,   0.07228705f,</div><div class="line"><a name="l00573"></a><span class="lineno">  573</span>&#160;                                                   0.02032331f,     -0.059686817f,   -0.0005566496f, -0.086984694f,</div><div class="line"><a name="l00574"></a><span class="lineno">  574</span>&#160;                                                   0.040414046f,    -0.1380399f,     0.094208956f,   -0.05722982f,</div><div class="line"><a name="l00575"></a><span class="lineno">  575</span>&#160;                                                   0.012092817f,    -0.04989123f,    -0.086576f,     -0.003399834f,</div><div class="line"><a name="l00576"></a><span class="lineno">  576</span>&#160;                                                   -0.04696032f,    -0.045747425f,   0.10091314f,    0.048676282f,</div><div class="line"><a name="l00577"></a><span class="lineno">  577</span>&#160;                                                   -0.029037097f,   0.031399418f,    -0.0040285117f, 0.047237843f,</div><div class="line"><a name="l00578"></a><span class="lineno">  578</span>&#160;                                                   0.09504992f,     0.041799378f,    -0.049185462f,  -0.031518843f,</div><div class="line"><a name="l00579"></a><span class="lineno">  579</span>&#160;                                                   -0.10516937f,    0.026374253f,    0.10058866f,    -0.0033195973f,</div><div class="line"><a name="l00580"></a><span class="lineno">  580</span>&#160;                                                   -0.041975245f,   0.0073591834f,   0.0033782164f,  -0.004325073f,</div><div class="line"><a name="l00581"></a><span class="lineno">  581</span>&#160;                                                   -0.10167381f,    0.042500053f,    -0.01447153f,   0.06464186f,</div><div class="line"><a name="l00582"></a><span class="lineno">  582</span>&#160;                                                   -0.017142897f,   0.03312627f,     0.009205989f,   0.024138335f,</div><div class="line"><a name="l00583"></a><span class="lineno">  583</span>&#160;                                                   -0.011337001f,   0.035530265f,    -0.010912711f,  0.0706555f,</div><div class="line"><a name="l00584"></a><span class="lineno">  584</span>&#160;                                                   -0.005894094f,   0.051841937f,    -0.1401738f,    -0.02351249f,</div><div class="line"><a name="l00585"></a><span class="lineno">  585</span>&#160;                                                   0.0365468f,      0.07590991f,     0.08838724f,    0.021681072f,</div><div class="line"><a name="l00586"></a><span class="lineno">  586</span>&#160;                                                   -0.10086113f,    0.019608743f,    -0.06195883f,   0.077335775f,</div><div class="line"><a name="l00587"></a><span class="lineno">  587</span>&#160;                                                   0.023646897f,    -0.095322326f,   0.02233014f,    0.09756986f,</div><div class="line"><a name="l00588"></a><span class="lineno">  588</span>&#160;                                                   -0.048691444f,   -0.009579111f,   0.07595467f,    0.11480546f,</div><div class="line"><a name="l00589"></a><span class="lineno">  589</span>&#160;                                                   -0.09801813f,    0.019894179f,    0.08502348f,    0.004032281f,</div><div class="line"><a name="l00590"></a><span class="lineno">  590</span>&#160;                                                   0.037211012f,    0.068537936f,    -0.048005626f,  -0.091520436f,</div><div class="line"><a name="l00591"></a><span class="lineno">  591</span>&#160;                                                   -0.028379958f,   -0.01556313f,    0.06554592f,    -0.045599163f,</div><div class="line"><a name="l00592"></a><span class="lineno">  592</span>&#160;                                                   -0.01672207f,    -0.020169014f,   -0.011877351f,  -0.20212261f,</div><div class="line"><a name="l00593"></a><span class="lineno">  593</span>&#160;                                                   0.010889619f,    0.0047078193f,   0.038385306f,   0.08540671f,</div><div class="line"><a name="l00594"></a><span class="lineno">  594</span>&#160;                                                   -0.017140968f,   -0.0035865551f,  0.016678626f,   0.005633034f,</div><div class="line"><a name="l00595"></a><span class="lineno">  595</span>&#160;                                                   0.015963363f,    0.00871737f,     0.060130805f,   0.028611384f,</div><div class="line"><a name="l00596"></a><span class="lineno">  596</span>&#160;                                                   0.10109069f,     -0.015060172f,   -0.07894427f,   0.06401885f,</div><div class="line"><a name="l00597"></a><span class="lineno">  597</span>&#160;                                                   0.011584063f,    -0.024466386f,   0.0047652307f,  -0.09041358f,</div><div class="line"><a name="l00598"></a><span class="lineno">  598</span>&#160;                                                   0.030737216f,    -0.0046374933f,  0.14215417f,    -0.11823516f,</div><div class="line"><a name="l00599"></a><span class="lineno">  599</span>&#160;                                                   0.019899689f,    0.006106124f,    -0.027092824f,  0.0786356f,</div><div class="line"><a name="l00600"></a><span class="lineno">  600</span>&#160;                                                   0.05052217f,     -0.058925f,      -0.011402121f,  -0.024987547f,</div><div class="line"><a name="l00601"></a><span class="lineno">  601</span>&#160;                                                   -0.0013661642f,  -0.06832946f,    -0.015667673f,  -0.1083353f,</div><div class="line"><a name="l00602"></a><span class="lineno">  602</span>&#160;                                                   -0.00096863037f, -0.06988685f,    -0.053350925f,  -0.027275559f,</div><div class="line"><a name="l00603"></a><span class="lineno">  603</span>&#160;                                                   -0.033664223f,   -0.07978348f,    -0.025200296f,  -0.017207067f,</div><div class="line"><a name="l00604"></a><span class="lineno">  604</span>&#160;                                                   -0.058403496f,   -0.055697463f,   0.005798788f,   0.12965427f,</div><div class="line"><a name="l00605"></a><span class="lineno">  605</span>&#160;                                                   -0.062582195f,   0.0013350133f,   -0.10482091f,   0.0379771f,</div><div class="line"><a name="l00606"></a><span class="lineno">  606</span>&#160;                                                   0.072521195f,    -0.0029455067f,  -0.13797039f,   -0.03628521f,</div><div class="line"><a name="l00607"></a><span class="lineno">  607</span>&#160;                                                   0.013806405f,    -0.017858358f,   -0.01008298f,   -0.07700066f,</div><div class="line"><a name="l00608"></a><span class="lineno">  608</span>&#160;                                                   -0.017081132f,   0.019358726f,    0.0027079724f,  0.004635139f,</div><div class="line"><a name="l00609"></a><span class="lineno">  609</span>&#160;                                                   0.062634714f,    -0.02338735f,    -0.039547626f,  -0.02050681f,</div><div class="line"><a name="l00610"></a><span class="lineno">  610</span>&#160;                                                   0.03385117f,     -0.083611414f,   0.002862572f,   -0.09421313f,</div><div class="line"><a name="l00611"></a><span class="lineno">  611</span>&#160;                                                   0.058618143f,    -0.08598433f,    0.00972939f,    0.023867095f,</div><div class="line"><a name="l00612"></a><span class="lineno">  612</span>&#160;                                                   -0.053934585f,   -0.023203006f,   0.07452513f,    -0.048767887f,</div><div class="line"><a name="l00613"></a><span class="lineno">  613</span>&#160;                                                   -0.07314807f,    -0.056307215f,   -0.10433547f,   -0.06440842f,</div><div class="line"><a name="l00614"></a><span class="lineno">  614</span>&#160;                                                   0.04328182f,     0.04389765f,     -0.020006588f,  -0.09076438f,</div><div class="line"><a name="l00615"></a><span class="lineno">  615</span>&#160;                                                   -0.11652589f,    -0.021705797f,   0.03345259f,    -0.010329105f,</div><div class="line"><a name="l00616"></a><span class="lineno">  616</span>&#160;                                                   -0.025767034f,   0.013057034f,    -0.07316461f,   -0.10145612f,</div><div class="line"><a name="l00617"></a><span class="lineno">  617</span>&#160;                                                   0.06358255f,     0.18531723f,     0.07759293f,    0.12006465f,</div><div class="line"><a name="l00618"></a><span class="lineno">  618</span>&#160;                                                   0.1305557f,      0.058638252f,    -0.03393652f,   0.09622831f,</div><div class="line"><a name="l00619"></a><span class="lineno">  619</span>&#160;                                                   -0.16253184f,    -2.4580743e-06f, 0.079869635f,   -0.070196845f,</div><div class="line"><a name="l00620"></a><span class="lineno">  620</span>&#160;                                                   -0.005644518f,   0.06857898f,     -0.12598175f,   -0.035084512f,</div><div class="line"><a name="l00621"></a><span class="lineno">  621</span>&#160;                                                   0.03156317f,     -0.12794146f,    -0.031963028f,  0.04692781f,</div><div class="line"><a name="l00622"></a><span class="lineno">  622</span>&#160;                                                   0.030070418f,    0.0071660685f,   -0.095516115f,  -0.004643372f,</div><div class="line"><a name="l00623"></a><span class="lineno">  623</span>&#160;                                                   0.040170413f,    -0.062104587f,   -0.0037324072f, 0.0554317f,</div><div class="line"><a name="l00624"></a><span class="lineno">  624</span>&#160;                                                   0.08184801f,     -0.019164372f,   0.06791302f,    0.034257166f,</div><div class="line"><a name="l00625"></a><span class="lineno">  625</span>&#160;                                                   -0.10307039f,    0.021943003f,    0.046745934f,   0.0790918f,</div><div class="line"><a name="l00626"></a><span class="lineno">  626</span>&#160;                                                   -0.0265588f,     -0.007824208f,   0.042546265f,   -0.00977924f,</div><div class="line"><a name="l00627"></a><span class="lineno">  627</span>&#160;                                                   -0.0002440307f,  -0.017384544f,   -0.017990116f,  0.12252321f,</div><div class="line"><a name="l00628"></a><span class="lineno">  628</span>&#160;                                                   -0.014512694f,   -0.08251313f,    0.08861942f,    0.13589665f,</div><div class="line"><a name="l00629"></a><span class="lineno">  629</span>&#160;                                                   0.026351685f,    0.012641483f,    0.07466548f,    0.044301085f,</div><div class="line"><a name="l00630"></a><span class="lineno">  630</span>&#160;                                                   -0.045414884f,   -0.051112458f,   0.03444247f,    -0.08502782f,</div><div class="line"><a name="l00631"></a><span class="lineno">  631</span>&#160;                                                   -0.04106223f,    -0.028126027f,   0.028473156f,   0.10467447f</div><div class="line"><a name="l00632"></a><span class="lineno">  632</span>&#160;            });</div><div class="line"><a name="l00633"></a><span class="lineno">  633</span>&#160;</div><div class="line"><a name="l00634"></a><span class="lineno">  634</span>&#160;    <span class="keyword">auto</span> recurrentToForgetWeights =</div><div class="line"><a name="l00635"></a><span class="lineno">  635</span>&#160;            MakeTensor&lt;float, 2&gt;(tensorInfo20x16, {-0.057784554f,  -0.026057621f,  -0.068447545f,   -0.022581743f,</div><div class="line"><a name="l00636"></a><span class="lineno">  636</span>&#160;                                                   0.14811787f,    0.10826372f,    0.09471067f,     0.03987225f,</div><div class="line"><a name="l00637"></a><span class="lineno">  637</span>&#160;                                                   -0.0039523416f, 0.00030638507f, 0.053185795f,    0.10572994f,</div><div class="line"><a name="l00638"></a><span class="lineno">  638</span>&#160;                                                   0.08414449f,    -0.022036452f,  -0.00066928595f, -0.09203576f,</div><div class="line"><a name="l00639"></a><span class="lineno">  639</span>&#160;                                                   0.032950465f,   -0.10985798f,   -0.023809856f,   0.0021431844f,</div><div class="line"><a name="l00640"></a><span class="lineno">  640</span>&#160;                                                   -0.02196096f,   -0.00326074f,   0.00058621005f,  -0.074678116f,</div><div class="line"><a name="l00641"></a><span class="lineno">  641</span>&#160;                                                   -0.06193199f,   0.055729095f,   0.03736828f,     0.020123724f,</div><div class="line"><a name="l00642"></a><span class="lineno">  642</span>&#160;                                                   0.061878487f,   -0.04729229f,   0.034919553f,    -0.07585433f,</div><div class="line"><a name="l00643"></a><span class="lineno">  643</span>&#160;                                                   -0.04421272f,   -0.044019096f,  0.085488975f,    0.04058006f,</div><div class="line"><a name="l00644"></a><span class="lineno">  644</span>&#160;                                                   -0.06890133f,   -0.030951202f,  -0.024628663f,   -0.07672815f,</div><div class="line"><a name="l00645"></a><span class="lineno">  645</span>&#160;                                                   0.034293607f,   0.08556707f,    -0.05293577f,    -0.033561368f,</div><div class="line"><a name="l00646"></a><span class="lineno">  646</span>&#160;                                                   -0.04899627f,   0.0241671f,     0.015736353f,    -0.095442444f,</div><div class="line"><a name="l00647"></a><span class="lineno">  647</span>&#160;                                                   -0.029564252f,  0.016493602f,   -0.035026584f,   0.022337519f,</div><div class="line"><a name="l00648"></a><span class="lineno">  648</span>&#160;                                                   -0.026871363f,  0.004780428f,   0.0077918363f,   -0.03601621f,</div><div class="line"><a name="l00649"></a><span class="lineno">  649</span>&#160;                                                   0.016435321f,   -0.03263031f,   -0.09543275f,    -0.047392778f,</div><div class="line"><a name="l00650"></a><span class="lineno">  650</span>&#160;                                                   0.013454138f,   0.028934088f,   0.01685226f,     -0.086110644f,</div><div class="line"><a name="l00651"></a><span class="lineno">  651</span>&#160;                                                   -0.046250615f,  -0.01847454f,   0.047608484f,    0.07339695f,</div><div class="line"><a name="l00652"></a><span class="lineno">  652</span>&#160;                                                   0.034546845f,   -0.04881143f,   0.009128804f,    -0.08802852f,</div><div class="line"><a name="l00653"></a><span class="lineno">  653</span>&#160;                                                   0.03761666f,    0.008096139f,   -0.014454086f,   0.014361001f,</div><div class="line"><a name="l00654"></a><span class="lineno">  654</span>&#160;                                                   -0.023502491f,  -0.0011840804f, -0.07607001f,    0.001856849f,</div><div class="line"><a name="l00655"></a><span class="lineno">  655</span>&#160;                                                   -0.06509276f,   -0.006021153f,  -0.08570962f,    -0.1451793f,</div><div class="line"><a name="l00656"></a><span class="lineno">  656</span>&#160;                                                   0.060212336f,   0.055259194f,   0.06974018f,     0.049454916f,</div><div class="line"><a name="l00657"></a><span class="lineno">  657</span>&#160;                                                   -0.027794661f,  -0.08077226f,   -0.016179763f,   0.1169753f,</div><div class="line"><a name="l00658"></a><span class="lineno">  658</span>&#160;                                                   0.17213494f,    -0.0056326236f, -0.053934924f,   -0.0124349f,</div><div class="line"><a name="l00659"></a><span class="lineno">  659</span>&#160;                                                   -0.11520337f,   0.05409887f,    0.088759385f,    0.0019655675f,</div><div class="line"><a name="l00660"></a><span class="lineno">  660</span>&#160;                                                   0.0042065294f,  0.03881498f,    0.019844765f,    0.041858196f,</div><div class="line"><a name="l00661"></a><span class="lineno">  661</span>&#160;                                                   -0.05695512f,   0.047233116f,   0.038937137f,    -0.06542224f,</div><div class="line"><a name="l00662"></a><span class="lineno">  662</span>&#160;                                                   0.014429736f,   -0.09719407f,   0.13908425f,     -0.05379757f,</div><div class="line"><a name="l00663"></a><span class="lineno">  663</span>&#160;                                                   0.012321099f,   0.082840554f,   -0.029899208f,   0.044217527f,</div><div class="line"><a name="l00664"></a><span class="lineno">  664</span>&#160;                                                   0.059855383f,   0.07711018f,    -0.045319796f,   0.0948846f,</div><div class="line"><a name="l00665"></a><span class="lineno">  665</span>&#160;                                                   -0.011724666f,  -0.0033288454f, -0.033542685f,   -0.04764985f,</div><div class="line"><a name="l00666"></a><span class="lineno">  666</span>&#160;                                                   -0.13873616f,   0.040668588f,   0.034832682f,    -0.015319203f,</div><div class="line"><a name="l00667"></a><span class="lineno">  667</span>&#160;                                                   -0.018715994f,  0.046002675f,   0.0599172f,      -0.043107376f,</div><div class="line"><a name="l00668"></a><span class="lineno">  668</span>&#160;                                                   0.0294216f,     -0.002314414f,  -0.022424703f,   0.0030315618f,</div><div class="line"><a name="l00669"></a><span class="lineno">  669</span>&#160;                                                   0.0014641669f,  0.0029166266f,  -0.11878115f,    0.013738511f,</div><div class="line"><a name="l00670"></a><span class="lineno">  670</span>&#160;                                                   0.12375372f,    -0.0006038222f, 0.029104086f,    0.087442465f,</div><div class="line"><a name="l00671"></a><span class="lineno">  671</span>&#160;                                                   0.052958444f,   0.07558703f,    0.04817258f,     0.044462286f,</div><div class="line"><a name="l00672"></a><span class="lineno">  672</span>&#160;                                                   -0.015213451f,  -0.08783778f,   -0.0561384f,     -0.003008196f,</div><div class="line"><a name="l00673"></a><span class="lineno">  673</span>&#160;                                                   0.047060397f,   -0.002058388f,  0.03429439f,     -0.018839769f,</div><div class="line"><a name="l00674"></a><span class="lineno">  674</span>&#160;                                                   0.024734668f,   0.024614193f,   -0.042046934f,   0.09597743f,</div><div class="line"><a name="l00675"></a><span class="lineno">  675</span>&#160;                                                   -0.0043254104f, 0.04320769f,    0.0064070094f,   -0.0019131786f,</div><div class="line"><a name="l00676"></a><span class="lineno">  676</span>&#160;                                                   -0.02558259f,   -0.022822596f,  -0.023273505f,   -0.02464396f,</div><div class="line"><a name="l00677"></a><span class="lineno">  677</span>&#160;                                                   -0.10991725f,   -0.006240552f,  0.0074488563f,   0.024044557f,</div><div class="line"><a name="l00678"></a><span class="lineno">  678</span>&#160;                                                   0.04383914f,    -0.046476185f,  0.028658995f,    0.060410924f,</div><div class="line"><a name="l00679"></a><span class="lineno">  679</span>&#160;                                                   0.050786525f,   0.009452605f,   -0.0073054377f,  -0.024810238f,</div><div class="line"><a name="l00680"></a><span class="lineno">  680</span>&#160;                                                   0.0052906186f,  0.0066939713f,  -0.0020913032f,  0.014515517f,</div><div class="line"><a name="l00681"></a><span class="lineno">  681</span>&#160;                                                   0.015898481f,   0.021362653f,   -0.030262267f,   0.016587038f,</div><div class="line"><a name="l00682"></a><span class="lineno">  682</span>&#160;                                                   -0.011442813f,  0.041154444f,   -0.007631438f,   -0.03423484f,</div><div class="line"><a name="l00683"></a><span class="lineno">  683</span>&#160;                                                   -0.010977775f,  0.036152758f,   0.0066366293f,   0.11915515f,</div><div class="line"><a name="l00684"></a><span class="lineno">  684</span>&#160;                                                   0.02318443f,    -0.041350313f,  0.021485701f,    -0.10906167f,</div><div class="line"><a name="l00685"></a><span class="lineno">  685</span>&#160;                                                   -0.028218046f,  -0.00954771f,   0.020531068f,    -0.11995105f,</div><div class="line"><a name="l00686"></a><span class="lineno">  686</span>&#160;                                                   -0.03672871f,   0.024019798f,   0.014255957f,    -0.05221243f,</div><div class="line"><a name="l00687"></a><span class="lineno">  687</span>&#160;                                                   -0.00661567f,   -0.04630967f,   0.033188973f,    0.10107534f,</div><div class="line"><a name="l00688"></a><span class="lineno">  688</span>&#160;                                                   -0.014027541f,  0.030796422f,   -0.10270911f,    -0.035999842f,</div><div class="line"><a name="l00689"></a><span class="lineno">  689</span>&#160;                                                   0.15443139f,    0.07684145f,    0.036571592f,    -0.035900835f,</div><div class="line"><a name="l00690"></a><span class="lineno">  690</span>&#160;                                                   -0.0034699554f, 0.06209149f,    0.015920248f,    -0.031122351f,</div><div class="line"><a name="l00691"></a><span class="lineno">  691</span>&#160;                                                   -0.03858649f,   0.01849943f,    0.13872518f,     0.01503974f,</div><div class="line"><a name="l00692"></a><span class="lineno">  692</span>&#160;                                                   0.069941424f,   -0.06948533f,   -0.0088794185f,  0.061282158f,</div><div class="line"><a name="l00693"></a><span class="lineno">  693</span>&#160;                                                   -0.047401894f,  0.03100163f,    -0.041533746f,   -0.10430945f,</div><div class="line"><a name="l00694"></a><span class="lineno">  694</span>&#160;                                                   0.044574402f,   -0.01425562f,   -0.024290353f,   0.034563623f,</div><div class="line"><a name="l00695"></a><span class="lineno">  695</span>&#160;                                                   0.05866852f,    0.023947537f,   -0.09445152f,    0.035450947f,</div><div class="line"><a name="l00696"></a><span class="lineno">  696</span>&#160;                                                   0.02247216f,    -0.0042998926f, 0.061146557f,    -0.10250651f,</div><div class="line"><a name="l00697"></a><span class="lineno">  697</span>&#160;                                                   0.020881841f,   -0.06747029f,   0.10062043f,     -0.0023941975f,</div><div class="line"><a name="l00698"></a><span class="lineno">  698</span>&#160;                                                   0.03532124f,    -0.016341697f,  0.09685456f,     -0.016764693f,</div><div class="line"><a name="l00699"></a><span class="lineno">  699</span>&#160;                                                   0.051808182f,   0.05875331f,    -0.04536488f,    0.001626336f,</div><div class="line"><a name="l00700"></a><span class="lineno">  700</span>&#160;                                                   -0.028892258f,  -0.01048663f,   -0.009793449f,   -0.017093895f,</div><div class="line"><a name="l00701"></a><span class="lineno">  701</span>&#160;                                                   0.010987891f,   0.02357273f,    -0.00010856845f, 0.0099760275f,</div><div class="line"><a name="l00702"></a><span class="lineno">  702</span>&#160;                                                   -0.001845119f,  -0.03551521f,   0.0018358806f,   0.05763657f,</div><div class="line"><a name="l00703"></a><span class="lineno">  703</span>&#160;                                                   -0.01769146f,   0.040995963f,   0.02235177f,     -0.060430344f,</div><div class="line"><a name="l00704"></a><span class="lineno">  704</span>&#160;                                                   0.11475477f,    -0.023854522f,  0.10071741f,     0.0686208f,</div><div class="line"><a name="l00705"></a><span class="lineno">  705</span>&#160;                                                   -0.014250481f,  0.034261297f,   0.047418304f,    0.08562733f,</div><div class="line"><a name="l00706"></a><span class="lineno">  706</span>&#160;                                                   -0.030519066f,  0.0060542435f,  0.014653856f,    -0.038836084f,</div><div class="line"><a name="l00707"></a><span class="lineno">  707</span>&#160;                                                   0.04096551f,    0.032249358f,   -0.08355519f,    -0.026823482f,</div><div class="line"><a name="l00708"></a><span class="lineno">  708</span>&#160;                                                   0.056386515f,   -0.010401743f,  -0.028396193f,   0.08507674f,</div><div class="line"><a name="l00709"></a><span class="lineno">  709</span>&#160;                                                   0.014410365f,   0.020995233f,   0.17040324f,     0.11511526f,</div><div class="line"><a name="l00710"></a><span class="lineno">  710</span>&#160;                                                   0.02459721f,    0.0066619175f,  0.025853224f,    -0.023133837f,</div><div class="line"><a name="l00711"></a><span class="lineno">  711</span>&#160;                                                   -0.081302024f,  0.017264642f,   -0.009585969f,   0.09491168f,</div><div class="line"><a name="l00712"></a><span class="lineno">  712</span>&#160;                                                   -0.051313367f,  0.054532815f,   -0.014298593f,   0.10657464f,</div><div class="line"><a name="l00713"></a><span class="lineno">  713</span>&#160;                                                   0.007076659f,   0.10964551f,    0.0409152f,      0.008275321f,</div><div class="line"><a name="l00714"></a><span class="lineno">  714</span>&#160;                                                   -0.07283536f,   0.07937492f,    0.04192024f,     -0.1075027f</div><div class="line"><a name="l00715"></a><span class="lineno">  715</span>&#160;            });</div><div class="line"><a name="l00716"></a><span class="lineno">  716</span>&#160;</div><div class="line"><a name="l00717"></a><span class="lineno">  717</span>&#160;    <span class="keyword">auto</span> recurrentToCellWeights =</div><div class="line"><a name="l00718"></a><span class="lineno">  718</span>&#160;            MakeTensor&lt;float, 2&gt;(tensorInfo20x16, {-0.037322544f,   0.018592842f,   0.0056175636f,  -0.06253426f,</div><div class="line"><a name="l00719"></a><span class="lineno">  719</span>&#160;                                                   0.055647098f,    -0.05713207f,   -0.05626563f,   0.005559383f,</div><div class="line"><a name="l00720"></a><span class="lineno">  720</span>&#160;                                                   0.03375411f,     -0.025757805f,  -0.088049285f,  0.06017052f,</div><div class="line"><a name="l00721"></a><span class="lineno">  721</span>&#160;                                                   -0.06570978f,    0.007384076f,   0.035123326f,   -0.07920549f,</div><div class="line"><a name="l00722"></a><span class="lineno">  722</span>&#160;                                                   0.053676967f,    0.044480428f,   -0.07663568f,   0.0071805613f,</div><div class="line"><a name="l00723"></a><span class="lineno">  723</span>&#160;                                                   0.08089997f,     0.05143358f,    0.038261272f,   0.03339287f,</div><div class="line"><a name="l00724"></a><span class="lineno">  724</span>&#160;                                                   -0.027673481f,   0.044746667f,   0.028349208f,   0.020090483f,</div><div class="line"><a name="l00725"></a><span class="lineno">  725</span>&#160;                                                   -0.019443132f,   -0.030755889f,  -0.0040000007f, 0.04465846f,</div><div class="line"><a name="l00726"></a><span class="lineno">  726</span>&#160;                                                   -0.021585021f,   0.0031670958f,  0.0053199246f,  -0.056117613f,</div><div class="line"><a name="l00727"></a><span class="lineno">  727</span>&#160;                                                   -0.10893326f,    0.076739706f,   -0.08509834f,   -0.027997585f,</div><div class="line"><a name="l00728"></a><span class="lineno">  728</span>&#160;                                                   0.037871376f,    0.01449768f,    -0.09002357f,   -0.06111149f,</div><div class="line"><a name="l00729"></a><span class="lineno">  729</span>&#160;                                                   -0.046195522f,   0.0422062f,     -0.005683705f,  -0.1253618f,</div><div class="line"><a name="l00730"></a><span class="lineno">  730</span>&#160;                                                   -0.012925729f,   -0.04890792f,   0.06985068f,    0.037654128f,</div><div class="line"><a name="l00731"></a><span class="lineno">  731</span>&#160;                                                   0.03398274f,     -0.004781977f,  0.007032333f,   -0.031787455f,</div><div class="line"><a name="l00732"></a><span class="lineno">  732</span>&#160;                                                   0.010868644f,    -0.031489216f,  0.09525667f,    0.013939797f,</div><div class="line"><a name="l00733"></a><span class="lineno">  733</span>&#160;                                                   0.0058680447f,   0.0167067f,     0.02668468f,    -0.04797466f,</div><div class="line"><a name="l00734"></a><span class="lineno">  734</span>&#160;                                                   -0.048885044f,   -0.12722108f,   0.035304096f,   0.06554885f,</div><div class="line"><a name="l00735"></a><span class="lineno">  735</span>&#160;                                                   0.00972396f,     -0.039238118f,  -0.05159735f,   -0.11329045f,</div><div class="line"><a name="l00736"></a><span class="lineno">  736</span>&#160;                                                   0.1613692f,      -0.03750952f,   0.06529313f,    -0.071974665f,</div><div class="line"><a name="l00737"></a><span class="lineno">  737</span>&#160;                                                   -0.11769596f,    0.015524369f,   -0.0013754242f, -0.12446318f,</div><div class="line"><a name="l00738"></a><span class="lineno">  738</span>&#160;                                                   0.02786344f,     -0.014179351f,  0.005264273f,   0.14376344f,</div><div class="line"><a name="l00739"></a><span class="lineno">  739</span>&#160;                                                   0.015983658f,    0.03406988f,    -0.06939408f,   0.040699873f,</div><div class="line"><a name="l00740"></a><span class="lineno">  740</span>&#160;                                                   0.02111075f,     0.09669095f,    0.041345075f,   -0.08316494f,</div><div class="line"><a name="l00741"></a><span class="lineno">  741</span>&#160;                                                   -0.07684199f,    -0.045768797f,  0.032298047f,   -0.041805092f,</div><div class="line"><a name="l00742"></a><span class="lineno">  742</span>&#160;                                                   0.0119405f,      0.0061010392f,  0.12652606f,    0.0064572375f,</div><div class="line"><a name="l00743"></a><span class="lineno">  743</span>&#160;                                                   -0.024950314f,   0.11574242f,    0.04508852f,    -0.04335324f,</div><div class="line"><a name="l00744"></a><span class="lineno">  744</span>&#160;                                                   0.06760663f,     -0.027437469f,  0.07216407f,    0.06977076f,</div><div class="line"><a name="l00745"></a><span class="lineno">  745</span>&#160;                                                   -0.05438599f,    0.034033038f,   -0.028602652f,  0.05346137f,</div><div class="line"><a name="l00746"></a><span class="lineno">  746</span>&#160;                                                   0.043184172f,    -0.037189785f,  0.10420091f,    0.00882477f,</div><div class="line"><a name="l00747"></a><span class="lineno">  747</span>&#160;                                                   -0.054019816f,   -0.074273005f,  -0.030617684f,  -0.0028467078f,</div><div class="line"><a name="l00748"></a><span class="lineno">  748</span>&#160;                                                   0.024302477f,    -0.0038869337f, 0.005332455f,   0.0013399826f,</div><div class="line"><a name="l00749"></a><span class="lineno">  749</span>&#160;                                                   0.04361412f,     -0.007001822f,  0.09631092f,    -0.06702025f,</div><div class="line"><a name="l00750"></a><span class="lineno">  750</span>&#160;                                                   -0.042049985f,   -0.035070654f,  -0.04103342f,   -0.10273396f,</div><div class="line"><a name="l00751"></a><span class="lineno">  751</span>&#160;                                                   0.0544271f,      0.037184782f,   -0.13150354f,   -0.0058036847f,</div><div class="line"><a name="l00752"></a><span class="lineno">  752</span>&#160;                                                   -0.008264958f,   0.042035464f,   0.05891794f,    0.029673764f,</div><div class="line"><a name="l00753"></a><span class="lineno">  753</span>&#160;                                                   0.0063542654f,   0.044788733f,   0.054816857f,   0.062257513f,</div><div class="line"><a name="l00754"></a><span class="lineno">  754</span>&#160;                                                   -0.00093483756f, 0.048938446f,   -0.004952862f,  -0.007730018f,</div><div class="line"><a name="l00755"></a><span class="lineno">  755</span>&#160;                                                   -0.04043371f,    -0.017094059f,  0.07229206f,    -0.023670016f,</div><div class="line"><a name="l00756"></a><span class="lineno">  756</span>&#160;                                                   -0.052195564f,   -0.025616996f,  -0.01520939f,   0.045104615f,</div><div class="line"><a name="l00757"></a><span class="lineno">  757</span>&#160;                                                   -0.007376126f,   0.003533447f,   0.006570588f,   0.056037236f,</div><div class="line"><a name="l00758"></a><span class="lineno">  758</span>&#160;                                                   0.12436656f,     0.051817212f,   0.028532185f,   -0.08686856f,</div><div class="line"><a name="l00759"></a><span class="lineno">  759</span>&#160;                                                   0.11868599f,     0.07663395f,    -0.07323171f,   0.03463402f,</div><div class="line"><a name="l00760"></a><span class="lineno">  760</span>&#160;                                                   -0.050708205f,   -0.04458982f,   -0.11590894f,   0.021273347f,</div><div class="line"><a name="l00761"></a><span class="lineno">  761</span>&#160;                                                   0.1251325f,      -0.15313013f,   -0.12224372f,   0.17228661f,</div><div class="line"><a name="l00762"></a><span class="lineno">  762</span>&#160;                                                   0.023029093f,    0.086124025f,   0.006445803f,   -0.03496501f,</div><div class="line"><a name="l00763"></a><span class="lineno">  763</span>&#160;                                                   0.028332196f,    0.04449512f,    -0.042436164f,  -0.026587414f,</div><div class="line"><a name="l00764"></a><span class="lineno">  764</span>&#160;                                                   -0.006041347f,   -0.09292539f,   -0.05678812f,   0.03897832f,</div><div class="line"><a name="l00765"></a><span class="lineno">  765</span>&#160;                                                   0.09465633f,     0.008115513f,   -0.02171956f,   0.08304309f,</div><div class="line"><a name="l00766"></a><span class="lineno">  766</span>&#160;                                                   0.071401566f,    0.019622514f,   0.032163795f,   -0.004167056f,</div><div class="line"><a name="l00767"></a><span class="lineno">  767</span>&#160;                                                   0.02295182f,     0.030739572f,   0.056506045f,   0.004612461f,</div><div class="line"><a name="l00768"></a><span class="lineno">  768</span>&#160;                                                   0.06524936f,     0.059999723f,   0.046395954f,   -0.0045512207f,</div><div class="line"><a name="l00769"></a><span class="lineno">  769</span>&#160;                                                   -0.1335546f,     -0.030136576f,  0.11584653f,    -0.014678886f,</div><div class="line"><a name="l00770"></a><span class="lineno">  770</span>&#160;                                                   0.0020118146f,   -0.09688814f,   -0.0790206f,    0.039770417f,</div><div class="line"><a name="l00771"></a><span class="lineno">  771</span>&#160;                                                   -0.0329582f,     0.07922767f,    0.029322514f,   0.026405897f,</div><div class="line"><a name="l00772"></a><span class="lineno">  772</span>&#160;                                                   0.04207835f,     -0.07073373f,   0.063781224f,   0.0859677f,</div><div class="line"><a name="l00773"></a><span class="lineno">  773</span>&#160;                                                   -0.10925287f,    -0.07011058f,   0.048005477f,   0.03438226f,</div><div class="line"><a name="l00774"></a><span class="lineno">  774</span>&#160;                                                   -0.09606514f,    -0.006669445f,  -0.043381985f,  0.04240257f,</div><div class="line"><a name="l00775"></a><span class="lineno">  775</span>&#160;                                                   -0.06955775f,    -0.06769346f,   0.043903265f,   -0.026784198f,</div><div class="line"><a name="l00776"></a><span class="lineno">  776</span>&#160;                                                   -0.017840602f,   0.024307009f,   -0.040079936f,  -0.019946516f,</div><div class="line"><a name="l00777"></a><span class="lineno">  777</span>&#160;                                                   0.045318738f,    -0.12233574f,   0.026170589f,   0.0074471775f,</div><div class="line"><a name="l00778"></a><span class="lineno">  778</span>&#160;                                                   0.15978073f,     0.10185836f,    0.10298046f,    -0.015476589f,</div><div class="line"><a name="l00779"></a><span class="lineno">  779</span>&#160;                                                   -0.039390966f,   -0.072174534f,  0.0739445f,     -0.1211869f,</div><div class="line"><a name="l00780"></a><span class="lineno">  780</span>&#160;                                                   -0.0347889f,     -0.07943156f,   0.014809798f,   -0.12412325f,</div><div class="line"><a name="l00781"></a><span class="lineno">  781</span>&#160;                                                   -0.0030663363f,  0.039695457f,   0.0647603f,     -0.08291318f,</div><div class="line"><a name="l00782"></a><span class="lineno">  782</span>&#160;                                                   -0.018529687f,   -0.004423833f,  0.0037507233f,  0.084633216f,</div><div class="line"><a name="l00783"></a><span class="lineno">  783</span>&#160;                                                   -0.01514876f,    -0.056505352f,  -0.012800942f,  -0.06994386f,</div><div class="line"><a name="l00784"></a><span class="lineno">  784</span>&#160;                                                   0.012962922f,    -0.031234352f,  0.07029052f,    0.016418684f,</div><div class="line"><a name="l00785"></a><span class="lineno">  785</span>&#160;                                                   0.03618972f,     0.055686004f,   -0.08663945f,   -0.017404709f,</div><div class="line"><a name="l00786"></a><span class="lineno">  786</span>&#160;                                                   -0.054761406f,   0.029065743f,   0.052404847f,   0.020238016f,</div><div class="line"><a name="l00787"></a><span class="lineno">  787</span>&#160;                                                   0.0048197987f,   -0.0214882f,    0.07078733f,    0.013016777f,</div><div class="line"><a name="l00788"></a><span class="lineno">  788</span>&#160;                                                   0.06262858f,     0.009184685f,   0.020785125f,   -0.043904778f,</div><div class="line"><a name="l00789"></a><span class="lineno">  789</span>&#160;                                                   -0.0270329f,     -0.03299152f,   -0.060088247f,  -0.015162964f,</div><div class="line"><a name="l00790"></a><span class="lineno">  790</span>&#160;                                                   -0.001828936f,   0.12642565f,    -0.056757294f,  0.013586685f,</div><div class="line"><a name="l00791"></a><span class="lineno">  791</span>&#160;                                                   0.09232601f,     -0.035886683f,  0.06000002f,    0.05229691f,</div><div class="line"><a name="l00792"></a><span class="lineno">  792</span>&#160;                                                   -0.052580316f,   -0.082029596f,  -0.010794592f,  0.012947712f,</div><div class="line"><a name="l00793"></a><span class="lineno">  793</span>&#160;                                                   -0.036429964f,   -0.085508935f,  -0.13127148f,   -0.017744139f,</div><div class="line"><a name="l00794"></a><span class="lineno">  794</span>&#160;                                                   0.031502828f,    0.036232427f,   -0.031581745f,  0.023051167f,</div><div class="line"><a name="l00795"></a><span class="lineno">  795</span>&#160;                                                   -0.05325106f,    -0.03421577f,   0.028793324f,   -0.034633752f,</div><div class="line"><a name="l00796"></a><span class="lineno">  796</span>&#160;                                                   -0.009881397f,   -0.043551125f,  -0.018609839f,  0.0019097115f,</div><div class="line"><a name="l00797"></a><span class="lineno">  797</span>&#160;                                                   -0.008799762f,   0.056595087f,   0.0022273948f,  0.055752404f</div><div class="line"><a name="l00798"></a><span class="lineno">  798</span>&#160;            });</div><div class="line"><a name="l00799"></a><span class="lineno">  799</span>&#160;</div><div class="line"><a name="l00800"></a><span class="lineno">  800</span>&#160;    <span class="keyword">auto</span> recurrentToOutputWeights =</div><div class="line"><a name="l00801"></a><span class="lineno">  801</span>&#160;            MakeTensor&lt;float, 2&gt;(tensorInfo20x16, {0.025825322f, -0.05813119f, 0.09495884f,-0.045984812f, -0.01255415f,</div><div class="line"><a name="l00802"></a><span class="lineno">  802</span>&#160;                                                    -0.0026479573f,-0.08196161f,-0.054914974f,-0.0046604523f,</div><div class="line"><a name="l00803"></a><span class="lineno">  803</span>&#160;                                                   -0.029587349f, -0.044576716f,  -0.07480124f,  -0.082868785f,</div><div class="line"><a name="l00804"></a><span class="lineno">  804</span>&#160;                                                   0.023254942f,    0.027502948f, -0.0039728214f, -0.08683098f,</div><div class="line"><a name="l00805"></a><span class="lineno">  805</span>&#160;                                                   -0.08116779f,  -0.014675607f,   -0.037924774f, -0.023314456f,</div><div class="line"><a name="l00806"></a><span class="lineno">  806</span>&#160;                                                   -0.007401714f, -0.09255757f,  0.029460307f,    -0.08829125f,</div><div class="line"><a name="l00807"></a><span class="lineno">  807</span>&#160;                                                    -0.005139627f,  -0.08989442f,  -0.0555066f,   0.13596267f,</div><div class="line"><a name="l00808"></a><span class="lineno">  808</span>&#160;                                                   -0.025062224f, -0.048351806f,  -0.03850004f,  0.07266485f,</div><div class="line"><a name="l00809"></a><span class="lineno">  809</span>&#160;                                                   -0.022414139f,   0.05940088f, 0.075114764f,   0.09597592f,</div><div class="line"><a name="l00810"></a><span class="lineno">  810</span>&#160;                                                   -0.010211725f, -0.0049794707f,  -0.011523867f, -0.025980417f,</div><div class="line"><a name="l00811"></a><span class="lineno">  811</span>&#160;                                                   0.072999895f,  0.11091378f,   -0.081685916f,   0.014416728f,</div><div class="line"><a name="l00812"></a><span class="lineno">  812</span>&#160;                                                    0.043229222f,   0.034178585f,  -0.07530371f,  0.035837382f,</div><div class="line"><a name="l00813"></a><span class="lineno">  813</span>&#160;                                                   -0.085607f, -0.007721233f,  -0.03287832f,  -0.043848954f,</div><div class="line"><a name="l00814"></a><span class="lineno">  814</span>&#160;                                                   -0.06404588f,    -0.06632928f, -0.073643476f,  0.008214239f,</div><div class="line"><a name="l00815"></a><span class="lineno">  815</span>&#160;                                                   -0.045984086f, 0.039764922f,    0.03474462f, 0.060612556f,</div><div class="line"><a name="l00816"></a><span class="lineno">  816</span>&#160;                                                   -0.080590084f, 0.049127717f,  0.04151091f,     -0.030063879f,</div><div class="line"><a name="l00817"></a><span class="lineno">  817</span>&#160;                                                    0.008801774f,   -0.023021035f, -0.019558564f, 0.05158114f,</div><div class="line"><a name="l00818"></a><span class="lineno">  818</span>&#160;                                                   -0.010947698f, -0.011825728f,  0.0075720972f, 0.0699727f,</div><div class="line"><a name="l00819"></a><span class="lineno">  819</span>&#160;                                                   -0.0039981045f,  0.069350146f, 0.08799282f,    0.016156472f,</div><div class="line"><a name="l00820"></a><span class="lineno">  820</span>&#160;                                                   0.035502106f,  0.11695009f,     0.006217345f, 0.13392477f,</div><div class="line"><a name="l00821"></a><span class="lineno">  821</span>&#160;                                                   -0.037875112f, 0.025745004f,  0.08940699f,     -0.00924166f,</div><div class="line"><a name="l00822"></a><span class="lineno">  822</span>&#160;                                                    0.0046702605f,  -0.036598757f, -0.08811812f,  0.10522024f,</div><div class="line"><a name="l00823"></a><span class="lineno">  823</span>&#160;                                                   -0.032441203f, 0.008176899f,   -0.04454919f,  0.07058152f,</div><div class="line"><a name="l00824"></a><span class="lineno">  824</span>&#160;                                                   0.0067963637f,   0.039206743f, 0.03259838f,    0.03725492f,</div><div class="line"><a name="l00825"></a><span class="lineno">  825</span>&#160;                                                   -0.09515802f,  0.013326398f,    -0.052055415f, -0.025676316f,</div><div class="line"><a name="l00826"></a><span class="lineno">  826</span>&#160;                                                   0.03198509f,   -0.015951829f, -0.058556724f,   0.036879618f,</div><div class="line"><a name="l00827"></a><span class="lineno">  827</span>&#160;                                                    0.043357447f,   0.028362012f,  -0.05908629f,  0.0059240665f,</div><div class="line"><a name="l00828"></a><span class="lineno">  828</span>&#160;                                                   -0.04995891f, -0.019187413f,0.0276265f, -0.01628143f, 0.0025863599f,</div><div class="line"><a name="l00829"></a><span class="lineno">  829</span>&#160;                                                   0.08800015f, 0.035250366f,   -0.022165963f, -0.07328642f,</div><div class="line"><a name="l00830"></a><span class="lineno">  830</span>&#160;                                                   -0.009415526f,   -0.07455109f, 0.11690406f,    0.0363299f,</div><div class="line"><a name="l00831"></a><span class="lineno">  831</span>&#160;                                                   0.07411125f,   0.042103454f,    -0.009660886f, 0.019076364f,</div><div class="line"><a name="l00832"></a><span class="lineno">  832</span>&#160;                                                   0.018299393f, -0.046004917f, 0.08891175f,0.0431396f, -0.026327137f,</div><div class="line"><a name="l00833"></a><span class="lineno">  833</span>&#160;                                                   -0.051502608f, 0.08979574f,   -0.051670972f,   0.04940282f,</div><div class="line"><a name="l00834"></a><span class="lineno">  834</span>&#160;                                                    -0.07491107f,   -0.021240504f, 0.022596184f,  -0.034280192f,</div><div class="line"><a name="l00835"></a><span class="lineno">  835</span>&#160;                                                   0.060163025f, -0.058211457f,  -0.051837247f, -0.01349775f,</div><div class="line"><a name="l00836"></a><span class="lineno">  836</span>&#160;                                                   -0.04639988f,    -0.035936575f, -0.011681591f,  0.064818054f,</div><div class="line"><a name="l00837"></a><span class="lineno">  837</span>&#160;                                                   0.0073146066f, -0.021745546f,   -0.043124277f, -0.06471268f,</div><div class="line"><a name="l00838"></a><span class="lineno">  838</span>&#160;                                                   -0.07053354f,  -0.029321948f, -0.05330136f,    0.016933719f,</div><div class="line"><a name="l00839"></a><span class="lineno">  839</span>&#160;                                                    -0.053782392f,  0.13747959f,   -0.1361751f,   -0.11569455f,</div><div class="line"><a name="l00840"></a><span class="lineno">  840</span>&#160;                                                   0.0033329215f, 0.05693899f,    -0.053219706f, 0.063698f,</div><div class="line"><a name="l00841"></a><span class="lineno">  841</span>&#160;                                                   0.07977434f,     -0.07924483f, 0.06936997f,    0.0034815092f,</div><div class="line"><a name="l00842"></a><span class="lineno">  842</span>&#160;                                                   -0.007305279f, -0.037325785f,   -0.07251102f, -0.033633437f,</div><div class="line"><a name="l00843"></a><span class="lineno">  843</span>&#160;                                                   -0.08677009f,  0.091591336f,  -0.14165086f,    0.021752775f,</div><div class="line"><a name="l00844"></a><span class="lineno">  844</span>&#160;                                                    0.019683983f,   0.0011612234f, -0.058154266f, 0.049996935f,</div><div class="line"><a name="l00845"></a><span class="lineno">  845</span>&#160;                                                   0.0288841f, -0.0024567875f, -0.14345716f, 0.010955264f,-0.10234828f,</div><div class="line"><a name="l00846"></a><span class="lineno">  846</span>&#160;                                                   0.1183656f, -0.0010731248f, -0.023590032f,-0.072285876f,-0.0724771f,</div><div class="line"><a name="l00847"></a><span class="lineno">  847</span>&#160;                                                   -0.026382286f, -0.0014920527f, 0.042667855f,  0.0018776858f,</div><div class="line"><a name="l00848"></a><span class="lineno">  848</span>&#160;                                                   0.02986552f,     0.009814309f, 0.0733756f,     0.12289186f,</div><div class="line"><a name="l00849"></a><span class="lineno">  849</span>&#160;                                                   0.018043943f,  -0.0458958f,     0.049412545f, 0.033632483f,</div><div class="line"><a name="l00850"></a><span class="lineno">  850</span>&#160;                                                   0.05495232f,   0.036686596f,  -0.013781798f,   -0.010036754f,</div><div class="line"><a name="l00851"></a><span class="lineno">  851</span>&#160;                                                    0.02576849f,    -0.08307328f,  0.010112348f,  0.042521734f,</div><div class="line"><a name="l00852"></a><span class="lineno">  852</span>&#160;                                                   -0.05869831f, -0.071689695f, 0.03876447f, -0.13275425f, -0.0352966f,</div><div class="line"><a name="l00853"></a><span class="lineno">  853</span>&#160;                                                   -0.023077697f, 0.10285965f,    0.084736146f,  0.15568255f,</div><div class="line"><a name="l00854"></a><span class="lineno">  854</span>&#160;                                                   -0.00040734606f, 0.027835453f, -0.10292561f,   -0.032401145f,</div><div class="line"><a name="l00855"></a><span class="lineno">  855</span>&#160;                                                   0.10053256f,   -0.026142767f,   -0.08271222f, -0.0030240538f,</div><div class="line"><a name="l00856"></a><span class="lineno">  856</span>&#160;                                                   -0.016368777f, 0.1070414f,    0.042672627f,    0.013456989f,</div><div class="line"><a name="l00857"></a><span class="lineno">  857</span>&#160;                                                    -0.0437609f,    -0.022309763f, 0.11576483f,   0.04108048f,</div><div class="line"><a name="l00858"></a><span class="lineno">  858</span>&#160;                                                   0.061026827f, -0.0190714f,  -0.0869359f, 0.037901703f,  0.0610107f,</div><div class="line"><a name="l00859"></a><span class="lineno">  859</span>&#160;                                                   0.07202949f, 0.01675338f,    0.086139716f,  -0.08795751f,</div><div class="line"><a name="l00860"></a><span class="lineno">  860</span>&#160;                                                   -0.014898893f,   -0.023771819f, -0.01965048f,   0.007955471f,</div><div class="line"><a name="l00861"></a><span class="lineno">  861</span>&#160;                                                   -0.043740474f, 0.03346837f,     -0.10549954f, 0.090567775f,</div><div class="line"><a name="l00862"></a><span class="lineno">  862</span>&#160;                                                   0.042013682f,  -0.03176985f,  0.12569028f,     -0.02421228f,</div><div class="line"><a name="l00863"></a><span class="lineno">  863</span>&#160;                                                    -0.029526481f,  0.023851605f,  0.031539805f,  0.05292009f,</div><div class="line"><a name="l00864"></a><span class="lineno">  864</span>&#160;                                                   -0.02344001f, -0.07811758f,   -0.08834428f,  0.10094801f,</div><div class="line"><a name="l00865"></a><span class="lineno">  865</span>&#160;                                                   0.16594367f,     -0.06861939f, -0.021256343f,  -0.041093912f,</div><div class="line"><a name="l00866"></a><span class="lineno">  866</span>&#160;                                                   -0.06669611f,  0.035498552f,    0.021757556f, -0.09302526f,</div><div class="line"><a name="l00867"></a><span class="lineno">  867</span>&#160;                                                   -0.015403468f, -0.06614931f,  -0.051798206f,   -0.013874718f,</div><div class="line"><a name="l00868"></a><span class="lineno">  868</span>&#160;                                                    0.03630673f,    0.010412845f,  -0.08077351f,  0.046185967f,</div><div class="line"><a name="l00869"></a><span class="lineno">  869</span>&#160;                                                   0.0035662893f, 0.03541868f,    -0.094149634f, -0.034814864f,</div><div class="line"><a name="l00870"></a><span class="lineno">  870</span>&#160;                                                   0.003128424f,    -0.020674974f, -0.03944324f,   -0.008110165f,</div><div class="line"><a name="l00871"></a><span class="lineno">  871</span>&#160;                                                   -0.11113267f,  0.08484226f,     0.043586485f, 0.040582247f,</div><div class="line"><a name="l00872"></a><span class="lineno">  872</span>&#160;                                                   0.0968012f,    -0.065249965f, -0.028036479f,   0.0050708856f,</div><div class="line"><a name="l00873"></a><span class="lineno">  873</span>&#160;                                                    0.0017462453f,  0.0326779f,    0.041296225f,  0.09164146f,</div><div class="line"><a name="l00874"></a><span class="lineno">  874</span>&#160;                                                   -0.047743853f, -0.015952192f,  -0.034451712f, 0.084197424f,</div><div class="line"><a name="l00875"></a><span class="lineno">  875</span>&#160;                                                   -0.05347844f,    -0.11768019f, 0.085926116f,   -0.08251791f,</div><div class="line"><a name="l00876"></a><span class="lineno">  876</span>&#160;                                                   -0.045081906f, 0.0948852f,      0.068401024f, 0.024856757f,</div><div class="line"><a name="l00877"></a><span class="lineno">  877</span>&#160;                                                   0.06978981f,   -0.057309967f, -0.012775832f,   -0.0032452994f,</div><div class="line"><a name="l00878"></a><span class="lineno">  878</span>&#160;                                                    0.01977615f, -0.041040014f, -0.024264973f,0.063464895f, 0.05431621f</div><div class="line"><a name="l00879"></a><span class="lineno">  879</span>&#160;            });</div><div class="line"><a name="l00880"></a><span class="lineno">  880</span>&#160;</div><div class="line"><a name="l00881"></a><span class="lineno">  881</span>&#160;    <span class="keyword">auto</span> cellToInputWeights =</div><div class="line"><a name="l00882"></a><span class="lineno">  882</span>&#160;            MakeTensor&lt;float, 1&gt;(tensorInfo20, {0.040369894f, 0.030746894f,  0.24704495f,  0.018586371f, -0.037586458f,</div><div class="line"><a name="l00883"></a><span class="lineno">  883</span>&#160;                                                -0.15312155f, -0.11812848f,  -0.11465643f, 0.20259799f,   0.11418174f,</div><div class="line"><a name="l00884"></a><span class="lineno">  884</span>&#160;                                                -0.10116027f, -0.011334949f, 0.12411352f, -0.076769054f,-0.052169047f,</div><div class="line"><a name="l00885"></a><span class="lineno">  885</span>&#160;                                                0.21198851f,  -0.38871562f,  -0.09061183f, -0.09683246f,  -0.21929175f</div><div class="line"><a name="l00886"></a><span class="lineno">  886</span>&#160;            });</div><div class="line"><a name="l00887"></a><span class="lineno">  887</span>&#160;</div><div class="line"><a name="l00888"></a><span class="lineno">  888</span>&#160;</div><div class="line"><a name="l00889"></a><span class="lineno">  889</span>&#160;    <span class="keyword">auto</span> cellToForgetWeights =</div><div class="line"><a name="l00890"></a><span class="lineno">  890</span>&#160;            MakeTensor&lt;float, 1&gt;(tensorInfo20, {-0.01998659f,-0.15568835f,-0.24248174f,   -0.012770197f, 0.041331276f,</div><div class="line"><a name="l00891"></a><span class="lineno">  891</span>&#160;                                                -0.072311886f, -0.052123554f,-0.0066330447f,-0.043891653f,0.036225766f,</div><div class="line"><a name="l00892"></a><span class="lineno">  892</span>&#160;                                                -0.047248036f, 0.021479502f,0.033189066f, 0.11952997f,   -0.020432774f,</div><div class="line"><a name="l00893"></a><span class="lineno">  893</span>&#160;                                                0.64658105f,   -0.06650122f,  -0.03467612f,  0.095340036f, 0.23647355f</div><div class="line"><a name="l00894"></a><span class="lineno">  894</span>&#160;            });</div><div class="line"><a name="l00895"></a><span class="lineno">  895</span>&#160;</div><div class="line"><a name="l00896"></a><span class="lineno">  896</span>&#160;    <span class="keyword">auto</span> cellToOutputWeights =</div><div class="line"><a name="l00897"></a><span class="lineno">  897</span>&#160;            MakeTensor&lt;float, 1&gt;(tensorInfo20, {0.08286371f,  -0.08261836f, -0.51210177f, 0.002913762f, 0.17764764f,</div><div class="line"><a name="l00898"></a><span class="lineno">  898</span>&#160;                                                -0.5495371f,  -0.08460716f, -0.24552552f, 0.030037103f, 0.04123544f,</div><div class="line"><a name="l00899"></a><span class="lineno">  899</span>&#160;                                                -0.11940523f, 0.007358328f, 0.1890978f,   0.4833202f,   -0.34441817f,</div><div class="line"><a name="l00900"></a><span class="lineno">  900</span>&#160;                                                0.36312827f,  -0.26375428f, 0.1457655f,   -0.19724406f, 0.15548733f</div><div class="line"><a name="l00901"></a><span class="lineno">  901</span>&#160;            });</div><div class="line"><a name="l00902"></a><span class="lineno">  902</span>&#160;</div><div class="line"><a name="l00903"></a><span class="lineno">  903</span>&#160;    <span class="keyword">auto</span> projectionWeights =</div><div class="line"><a name="l00904"></a><span class="lineno">  904</span>&#160;            MakeTensor&lt;float, 2&gt;(tensorInfo16x20,</div><div class="line"><a name="l00905"></a><span class="lineno">  905</span>&#160;                                 {-0.009802181f,  0.09401916f,    0.0717386f,     -0.13895074f,  0.09641832f,</div><div class="line"><a name="l00906"></a><span class="lineno">  906</span>&#160;                                  0.060420845f,   0.08539281f,    0.054285463f,   0.061395317f,  0.034448683f,</div><div class="line"><a name="l00907"></a><span class="lineno">  907</span>&#160;                                  -0.042991187f,  0.019801661f,   -0.16840284f,   -0.015726732f, -0.23041931f,</div><div class="line"><a name="l00908"></a><span class="lineno">  908</span>&#160;                                  -0.024478018f,  -0.10959692f,   -0.013875541f,  0.18600968f,   -0.061274476f,</div><div class="line"><a name="l00909"></a><span class="lineno">  909</span>&#160;                                  0.0138165f,     -0.08160894f,   -0.07661644f,   0.032372914f,  0.16169067f,</div><div class="line"><a name="l00910"></a><span class="lineno">  910</span>&#160;                                  0.22465782f,    -0.03993472f,   -0.004017731f,  0.08633481f,   -0.28869787f,</div><div class="line"><a name="l00911"></a><span class="lineno">  911</span>&#160;                                  0.08682067f,    0.17240396f,    0.014975425f,   0.056431185f,  0.031037588f,</div><div class="line"><a name="l00912"></a><span class="lineno">  912</span>&#160;                                  0.16702051f,    0.0077946745f,  0.15140012f,    0.29405436f,   0.120285f,</div><div class="line"><a name="l00913"></a><span class="lineno">  913</span>&#160;                                  -0.188994f,     -0.027265169f,  0.043389652f,   -0.022061434f, 0.014777949f,</div><div class="line"><a name="l00914"></a><span class="lineno">  914</span>&#160;                                  -0.20203483f,   0.094781205f,   0.19100232f,    0.13987629f,   -0.036132768f,</div><div class="line"><a name="l00915"></a><span class="lineno">  915</span>&#160;                                  -0.06426278f,   -0.05108664f,   0.13221376f,    0.009441198f,  -0.16715929f,</div><div class="line"><a name="l00916"></a><span class="lineno">  916</span>&#160;                                  0.15859416f,    -0.040437475f,  0.050779544f,   -0.022187516f, 0.012166504f,</div><div class="line"><a name="l00917"></a><span class="lineno">  917</span>&#160;                                  0.027685808f,   -0.07675938f,   -0.0055694645f, -0.09444123f,  0.0046453946f,</div><div class="line"><a name="l00918"></a><span class="lineno">  918</span>&#160;                                  0.050794356f,   0.10770313f,    -0.20790008f,   -0.07149004f,  -0.11425117f,</div><div class="line"><a name="l00919"></a><span class="lineno">  919</span>&#160;                                  0.008225835f,   -0.035802525f,  0.14374903f,    0.15262283f,   0.048710253f,</div><div class="line"><a name="l00920"></a><span class="lineno">  920</span>&#160;                                  0.1847461f,     -0.007487823f,  0.11000021f,    -0.09542012f,  0.22619456f,</div><div class="line"><a name="l00921"></a><span class="lineno">  921</span>&#160;                                  -0.029149994f,  0.08527916f,    0.009043713f,   0.0042746216f, 0.016261552f,</div><div class="line"><a name="l00922"></a><span class="lineno">  922</span>&#160;                                  0.022461696f,   0.12689082f,    -0.043589946f,  -0.12035478f,  -0.08361797f,</div><div class="line"><a name="l00923"></a><span class="lineno">  923</span>&#160;                                  -0.050666027f,  -0.1248618f,    -0.1275799f,    -0.071875185f, 0.07377272f,</div><div class="line"><a name="l00924"></a><span class="lineno">  924</span>&#160;                                  0.09944291f,    -0.18897448f,   -0.1593054f,    -0.06526116f,  -0.040107165f,</div><div class="line"><a name="l00925"></a><span class="lineno">  925</span>&#160;                                  -0.004618631f,  -0.067624845f,  -0.007576253f,  0.10727444f,   0.041546922f,</div><div class="line"><a name="l00926"></a><span class="lineno">  926</span>&#160;                                  -0.20424393f,   0.06907816f,    0.050412357f,   0.00724631f,   0.039827548f,</div><div class="line"><a name="l00927"></a><span class="lineno">  927</span>&#160;                                  0.12449835f,    0.10747581f,    0.13708383f,    0.09134148f,   -0.12617786f,</div><div class="line"><a name="l00928"></a><span class="lineno">  928</span>&#160;                                  -0.06428341f,   0.09956831f,    0.1208086f,     -0.14676677f,  -0.0727722f,</div><div class="line"><a name="l00929"></a><span class="lineno">  929</span>&#160;                                  0.1126304f,     0.010139365f,   0.015571211f,   -0.038128063f, 0.022913318f,</div><div class="line"><a name="l00930"></a><span class="lineno">  930</span>&#160;                                  -0.042050496f,  0.16842307f,    -0.060597885f,  0.10531834f,   -0.06411776f,</div><div class="line"><a name="l00931"></a><span class="lineno">  931</span>&#160;                                  -0.07451711f,   -0.03410368f,   -0.13393489f,   0.06534304f,   0.003620307f,</div><div class="line"><a name="l00932"></a><span class="lineno">  932</span>&#160;                                  0.04490757f,    0.05970546f,    0.05197996f,    0.02839995f,   0.10434969f,</div><div class="line"><a name="l00933"></a><span class="lineno">  933</span>&#160;                                  -0.013699693f,  -0.028353551f,  -0.07260381f,   0.047201227f,  -0.024575593f,</div><div class="line"><a name="l00934"></a><span class="lineno">  934</span>&#160;                                  -0.036445823f,  0.07155557f,    0.009672501f,   -0.02328883f,  0.009533515f,</div><div class="line"><a name="l00935"></a><span class="lineno">  935</span>&#160;                                  -0.03606021f,   -0.07421458f,   -0.028082801f,  -0.2678904f,   -0.13221288f,</div><div class="line"><a name="l00936"></a><span class="lineno">  936</span>&#160;                                  0.18419984f,    -0.13012612f,   -0.014588381f,  -0.035059117f, -0.04824723f,</div><div class="line"><a name="l00937"></a><span class="lineno">  937</span>&#160;                                  0.07830115f,    -0.056184657f,  0.03277091f,    0.025466874f,  0.14494097f,</div><div class="line"><a name="l00938"></a><span class="lineno">  938</span>&#160;                                  -0.12522776f,   -0.098633975f,  -0.10766018f,   -0.08317623f,  0.08594209f,</div><div class="line"><a name="l00939"></a><span class="lineno">  939</span>&#160;                                  0.07749552f,    0.039474737f,   0.1776665f,     -0.07409566f,  -0.0477268f,</div><div class="line"><a name="l00940"></a><span class="lineno">  940</span>&#160;                                  0.29323658f,    0.10801441f,    0.1154011f,     0.013952499f,  0.10739139f,</div><div class="line"><a name="l00941"></a><span class="lineno">  941</span>&#160;                                  0.10708251f,    -0.051456142f,  0.0074137426f,  -0.10430189f,  0.10034707f,</div><div class="line"><a name="l00942"></a><span class="lineno">  942</span>&#160;                                  0.045594677f,   0.0635285f,     -0.0715442f,    -0.089667566f, -0.10811871f,</div><div class="line"><a name="l00943"></a><span class="lineno">  943</span>&#160;                                  0.00026344223f, 0.08298446f,    -0.009525053f,  0.006585689f,  -0.24567553f,</div><div class="line"><a name="l00944"></a><span class="lineno">  944</span>&#160;                                  -0.09450807f,   0.09648481f,    0.026996298f,   -0.06419476f,  -0.04752702f,</div><div class="line"><a name="l00945"></a><span class="lineno">  945</span>&#160;                                  -0.11063944f,   -0.23441927f,   -0.17608605f,   -0.052156363f, 0.067035615f,</div><div class="line"><a name="l00946"></a><span class="lineno">  946</span>&#160;                                  0.19271925f,    -0.0032889997f, -0.043264326f,  0.09663576f,   -0.057112187f,</div><div class="line"><a name="l00947"></a><span class="lineno">  947</span>&#160;                                  -0.10100678f,   0.0628376f,     0.04447668f,    0.017961001f,  -0.10094388f,</div><div class="line"><a name="l00948"></a><span class="lineno">  948</span>&#160;                                  -0.10190601f,   0.18335468f,    0.10494553f,    -0.052095775f, -0.0026118709f,</div><div class="line"><a name="l00949"></a><span class="lineno">  949</span>&#160;                                  0.10539724f,    -0.04383912f,   -0.042349473f,  0.08438151f,   -0.1947263f,</div><div class="line"><a name="l00950"></a><span class="lineno">  950</span>&#160;                                  0.02251204f,    0.11216432f,    -0.10307853f,   0.17351969f,   -0.039091777f,</div><div class="line"><a name="l00951"></a><span class="lineno">  951</span>&#160;                                  0.08066188f,    -0.00561982f,   0.12633002f,    0.11335965f,   -0.0088127935f,</div><div class="line"><a name="l00952"></a><span class="lineno">  952</span>&#160;                                  -0.019777594f,  0.06864014f,    -0.059751723f,  0.016233567f,  -0.06894641f,</div><div class="line"><a name="l00953"></a><span class="lineno">  953</span>&#160;                                  -0.28651384f,   -0.004228674f,  0.019708522f,   -0.16305895f,  -0.07468996f,</div><div class="line"><a name="l00954"></a><span class="lineno">  954</span>&#160;                                  -0.0855457f,    0.099339016f,   -0.07580735f,   -0.13775392f,  0.08434318f,</div><div class="line"><a name="l00955"></a><span class="lineno">  955</span>&#160;                                  0.08330512f,    -0.12131499f,   0.031935584f,   0.09180414f,   -0.08876437f,</div><div class="line"><a name="l00956"></a><span class="lineno">  956</span>&#160;                                  -0.08049874f,   0.008753825f,   0.03498998f,    0.030215185f,  0.03907079f,</div><div class="line"><a name="l00957"></a><span class="lineno">  957</span>&#160;                                  0.089751154f,   0.029194152f,   -0.03337423f,   -0.019092513f, 0.04331237f,</div><div class="line"><a name="l00958"></a><span class="lineno">  958</span>&#160;                                  0.04299654f,    -0.036394123f,  -0.12915532f,   0.09793732f,   0.07512415f,</div><div class="line"><a name="l00959"></a><span class="lineno">  959</span>&#160;                                  -0.11319543f,   -0.032502122f,  0.15661901f,    0.07671967f,   -0.005491124f,</div><div class="line"><a name="l00960"></a><span class="lineno">  960</span>&#160;                                  -0.19379048f,   -0.218606f,     0.21448623f,    0.017840758f,  0.1416943f,</div><div class="line"><a name="l00961"></a><span class="lineno">  961</span>&#160;                                  -0.07051762f,   0.19488361f,    0.02664691f,    -0.18104725f,  -0.09334311f,</div><div class="line"><a name="l00962"></a><span class="lineno">  962</span>&#160;                                  0.15026465f,    -0.15493552f,   -0.057762887f,  -0.11604192f,  -0.262013f,</div><div class="line"><a name="l00963"></a><span class="lineno">  963</span>&#160;                                  -0.01391798f,   0.012185008f,   0.11156489f,    -0.07483202f,  0.06693364f,</div><div class="line"><a name="l00964"></a><span class="lineno">  964</span>&#160;                                  -0.26151478f,   0.046425626f,   0.036540434f,   -0.16435726f,  0.17338543f,</div><div class="line"><a name="l00965"></a><span class="lineno">  965</span>&#160;                                  -0.21401681f,   -0.11385144f,   -0.08283257f,   -0.069031075f, 0.030635102f,</div><div class="line"><a name="l00966"></a><span class="lineno">  966</span>&#160;                                  0.010969227f,   0.11109743f,    0.010919218f,   0.027526086f,  0.13519906f,</div><div class="line"><a name="l00967"></a><span class="lineno">  967</span>&#160;                                  0.01891392f,    -0.046839405f,  -0.040167913f,  0.017953383f,  -0.09700955f,</div><div class="line"><a name="l00968"></a><span class="lineno">  968</span>&#160;                                  0.0061885654f,  -0.07000971f,   0.026893595f,   -0.038844477f, 0.14543656f</div><div class="line"><a name="l00969"></a><span class="lineno">  969</span>&#160;                                 });</div><div class="line"><a name="l00970"></a><span class="lineno">  970</span>&#160;</div><div class="line"><a name="l00971"></a><span class="lineno">  971</span>&#160;    std::vector&lt;float&gt; projectionBiasVector(outputSize, 0.f);</div><div class="line"><a name="l00972"></a><span class="lineno">  972</span>&#160;    <span class="keyword">auto</span> projectionBias = MakeTensor&lt;float,1&gt;(tensorInfo16, projectionBiasVector);</div><div class="line"><a name="l00973"></a><span class="lineno">  973</span>&#160;</div><div class="line"><a name="l00974"></a><span class="lineno">  974</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputToInputWeightsTensor(tensorInfo20x5);</div><div class="line"><a name="l00975"></a><span class="lineno">  975</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputToForgetWeightsTensor(tensorInfo20x5);</div><div class="line"><a name="l00976"></a><span class="lineno">  976</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputToCellWeightsTensor(tensorInfo20x5);</div><div class="line"><a name="l00977"></a><span class="lineno">  977</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputToOutputWeightsTensor(tensorInfo20x5);</div><div class="line"><a name="l00978"></a><span class="lineno">  978</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> recurrentToForgetWeightsTensor(tensorInfo20x16);</div><div class="line"><a name="l00979"></a><span class="lineno">  979</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> recurrentToInputWeightsTensor(tensorInfo20x16);</div><div class="line"><a name="l00980"></a><span class="lineno">  980</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> recurrentToCellWeightsTensor(tensorInfo20x16);</div><div class="line"><a name="l00981"></a><span class="lineno">  981</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> recurrentToOutputWeightsTensor(tensorInfo20x16);</div><div class="line"><a name="l00982"></a><span class="lineno">  982</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> cellToInputWeightsTensor(tensorInfo20);</div><div class="line"><a name="l00983"></a><span class="lineno">  983</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputGateBiasTensor(tensorInfo20);</div><div class="line"><a name="l00984"></a><span class="lineno">  984</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> forgetGateBiasTensor(tensorInfo20);</div><div class="line"><a name="l00985"></a><span class="lineno">  985</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> cellBiasTensor(tensorInfo20);</div><div class="line"><a name="l00986"></a><span class="lineno">  986</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> outputGateBiasTensor(tensorInfo20);</div><div class="line"><a name="l00987"></a><span class="lineno">  987</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> cellToForgetWeightsTensor(tensorInfo20);</div><div class="line"><a name="l00988"></a><span class="lineno">  988</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> cellToOutputWeightsTensor(tensorInfo20);</div><div class="line"><a name="l00989"></a><span class="lineno">  989</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> projectionWeightsTensor(tensorInfo16x20);</div><div class="line"><a name="l00990"></a><span class="lineno">  990</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> projectionBiasTensor(tensorInfo16);</div><div class="line"><a name="l00991"></a><span class="lineno">  991</span>&#160;</div><div class="line"><a name="l00992"></a><span class="lineno">  992</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputToInputWeightsTensor, &amp;inputToInputWeights[0][0]);</div><div class="line"><a name="l00993"></a><span class="lineno">  993</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputToForgetWeightsTensor, &amp;inputToForgetWeights[0][0]);</div><div class="line"><a name="l00994"></a><span class="lineno">  994</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputToCellWeightsTensor, &amp;inputToCellWeights[0][0]);</div><div class="line"><a name="l00995"></a><span class="lineno">  995</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputToOutputWeightsTensor, &amp;inputToOutputWeights[0][0]);</div><div class="line"><a name="l00996"></a><span class="lineno">  996</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;recurrentToInputWeightsTensor, &amp;recurrentToInputWeights[0][0]);</div><div class="line"><a name="l00997"></a><span class="lineno">  997</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;recurrentToForgetWeightsTensor, &amp;recurrentToForgetWeights[0][0]);</div><div class="line"><a name="l00998"></a><span class="lineno">  998</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;recurrentToCellWeightsTensor, &amp;recurrentToCellWeights[0][0]);</div><div class="line"><a name="l00999"></a><span class="lineno">  999</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;recurrentToOutputWeightsTensor, &amp;recurrentToOutputWeights[0][0]);</div><div class="line"><a name="l01000"></a><span class="lineno"> 1000</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;cellToInputWeightsTensor, &amp;cellToInputWeights[0]);</div><div class="line"><a name="l01001"></a><span class="lineno"> 1001</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputGateBiasTensor, &amp;inputGateBias[0]);</div><div class="line"><a name="l01002"></a><span class="lineno"> 1002</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;forgetGateBiasTensor, &amp;forgetGateBias[0]);</div><div class="line"><a name="l01003"></a><span class="lineno"> 1003</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;cellBiasTensor, &amp;cellBias[0]);</div><div class="line"><a name="l01004"></a><span class="lineno"> 1004</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;outputGateBiasTensor, &amp;outputGateBias[0]);</div><div class="line"><a name="l01005"></a><span class="lineno"> 1005</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;cellToForgetWeightsTensor, &amp;cellToForgetWeights[0]);</div><div class="line"><a name="l01006"></a><span class="lineno"> 1006</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;cellToOutputWeightsTensor, &amp;cellToOutputWeights[0]);</div><div class="line"><a name="l01007"></a><span class="lineno"> 1007</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;projectionWeightsTensor, &amp;projectionWeights[0][0]);</div><div class="line"><a name="l01008"></a><span class="lineno"> 1008</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;projectionBiasTensor, &amp;projectionBias[0]);</div><div class="line"><a name="l01009"></a><span class="lineno"> 1009</span>&#160;</div><div class="line"><a name="l01010"></a><span class="lineno"> 1010</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a08a1932be591c315a512a877d38b22df">m_InputToInputWeights</a> = &amp;inputToInputWeightsTensor;</div><div class="line"><a name="l01011"></a><span class="lineno"> 1011</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a3ea82566d98c5a657c76c3d851c47848">m_InputToForgetWeights</a> = &amp;inputToForgetWeightsTensor;</div><div class="line"><a name="l01012"></a><span class="lineno"> 1012</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a28ad98d17603fd8b12e046f8ece58970">m_InputToCellWeights</a> = &amp;inputToCellWeightsTensor;</div><div class="line"><a name="l01013"></a><span class="lineno"> 1013</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a83dc9086b2e4a4e4cadb66bd874df798">m_InputToOutputWeights</a> = &amp;inputToOutputWeightsTensor;</div><div class="line"><a name="l01014"></a><span class="lineno"> 1014</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a98d377149071d8842d610cc0734d1cfe">m_RecurrentToInputWeights</a> = &amp;recurrentToInputWeightsTensor;</div><div class="line"><a name="l01015"></a><span class="lineno"> 1015</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a45d73e66cbb2b65049e4016c20657ccf">m_RecurrentToForgetWeights</a> = &amp;recurrentToForgetWeightsTensor;</div><div class="line"><a name="l01016"></a><span class="lineno"> 1016</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#aea142bd50ffb93631c2e08324ec92a1e">m_RecurrentToCellWeights</a> = &amp;recurrentToCellWeightsTensor;</div><div class="line"><a name="l01017"></a><span class="lineno"> 1017</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#adebc1771e5a1f4b113a7aa594ea74d2c">m_RecurrentToOutputWeights</a> = &amp;recurrentToOutputWeightsTensor;</div><div class="line"><a name="l01018"></a><span class="lineno"> 1018</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a5c1c0a7ead7273788976c9e97cffaab7">m_CellToInputWeights</a> = &amp;cellToInputWeightsTensor;</div><div class="line"><a name="l01019"></a><span class="lineno"> 1019</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#acb3aade8fae984f7293e222dcbe66030">m_InputGateBias</a> = &amp;inputGateBiasTensor;</div><div class="line"><a name="l01020"></a><span class="lineno"> 1020</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#aba3ffe91d818266b8785ce971548eb59">m_ForgetGateBias</a> = &amp;forgetGateBiasTensor;</div><div class="line"><a name="l01021"></a><span class="lineno"> 1021</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a75980b5795efd899a0c678a06a900c6d">m_CellBias</a> = &amp;cellBiasTensor;</div><div class="line"><a name="l01022"></a><span class="lineno"> 1022</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a332551528a4b3534c2d6c89ce816fcd9">m_OutputGateBias</a> = &amp;outputGateBiasTensor;</div><div class="line"><a name="l01023"></a><span class="lineno"> 1023</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#acefa49d7faf26933e27e473e7bdb4175">m_CellToForgetWeights</a> = &amp;cellToForgetWeightsTensor;</div><div class="line"><a name="l01024"></a><span class="lineno"> 1024</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a6f74071b0e07bbe2cb20a8f78826e084">m_CellToOutputWeights</a> = &amp;cellToOutputWeightsTensor;</div><div class="line"><a name="l01025"></a><span class="lineno"> 1025</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#af3c52626a6f05597d82ed095d0765962">m_ProjectionWeights</a> = &amp;projectionWeightsTensor;</div><div class="line"><a name="l01026"></a><span class="lineno"> 1026</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a2ba352eb1fdf6dc5ecf7f2e6b6b48f94">m_ProjectionBias</a> = &amp;projectionBiasTensor;</div><div class="line"><a name="l01027"></a><span class="lineno"> 1027</span>&#160;</div><div class="line"><a name="l01028"></a><span class="lineno"> 1028</span>&#160;    <span class="comment">// Flags to set test configuration</span></div><div class="line"><a name="l01029"></a><span class="lineno"> 1029</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_lstm_descriptor.xhtml#ae1b07ed928036004bd257169e5aeeef4">m_ActivationFunc</a> = 4;</div><div class="line"><a name="l01030"></a><span class="lineno"> 1030</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_lstm_descriptor.xhtml#ad474e5c51a0b194ef32e812b86c0cbdb">m_CifgEnabled</a> = <span class="keyword">false</span>;</div><div class="line"><a name="l01031"></a><span class="lineno"> 1031</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_lstm_descriptor.xhtml#a2837b4396f20c956952d1a7286cab5f8">m_PeepholeEnabled</a> = <span class="keyword">true</span>;</div><div class="line"><a name="l01032"></a><span class="lineno"> 1032</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_lstm_descriptor.xhtml#a6c9de81fc65b3c4924cab11907075a17">m_ProjectionEnabled</a> = <span class="keyword">true</span>;</div><div class="line"><a name="l01033"></a><span class="lineno"> 1033</span>&#160;</div><div class="line"><a name="l01034"></a><span class="lineno"> 1034</span>&#160;</div><div class="line"><a name="l01035"></a><span class="lineno"> 1035</span>&#160;    std::unique_ptr&lt;armnn::IWorkload&gt; workload = workloadFactory.<a class="code" href="classarmnn_1_1_i_workload_factory.xhtml#ab6bd7aaf685d4e956d780f8655a6f174">CreateLstm</a>(data, info);</div><div class="line"><a name="l01036"></a><span class="lineno"> 1036</span>&#160;    inputHandle-&gt;Allocate();</div><div class="line"><a name="l01037"></a><span class="lineno"> 1037</span>&#160;    outputStateInHandle-&gt;Allocate();</div><div class="line"><a name="l01038"></a><span class="lineno"> 1038</span>&#160;    cellStateInHandle-&gt;Allocate();</div><div class="line"><a name="l01039"></a><span class="lineno"> 1039</span>&#160;</div><div class="line"><a name="l01040"></a><span class="lineno"> 1040</span>&#160;    scratchHandle-&gt;Allocate();</div><div class="line"><a name="l01041"></a><span class="lineno"> 1041</span>&#160;    outputStateOutHandle-&gt;Allocate();</div><div class="line"><a name="l01042"></a><span class="lineno"> 1042</span>&#160;    cellStateOutHandle-&gt;Allocate();</div><div class="line"><a name="l01043"></a><span class="lineno"> 1043</span>&#160;    outputHandle-&gt;Allocate();</div><div class="line"><a name="l01044"></a><span class="lineno"> 1044</span>&#160;</div><div class="line"><a name="l01045"></a><span class="lineno"> 1045</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#ae15f1a3c55d2db87683577de9fa4437c">CopyDataToITensorHandle</a>(inputHandle.get(), &amp;inputTensor[0][0]);</div><div class="line"><a name="l01046"></a><span class="lineno"> 1046</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#ae15f1a3c55d2db87683577de9fa4437c">CopyDataToITensorHandle</a>(outputStateInHandle.get(), &amp;outputStateInTensor[0][0]);</div><div class="line"><a name="l01047"></a><span class="lineno"> 1047</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#ae15f1a3c55d2db87683577de9fa4437c">CopyDataToITensorHandle</a>(cellStateInHandle.get(), &amp;cellStateInTensor[0][0]);</div><div class="line"><a name="l01048"></a><span class="lineno"> 1048</span>&#160;</div><div class="line"><a name="l01049"></a><span class="lineno"> 1049</span>&#160;    workload-&gt;Execute();</div><div class="line"><a name="l01050"></a><span class="lineno"> 1050</span>&#160;</div><div class="line"><a name="l01051"></a><span class="lineno"> 1051</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#a99b626c58a926dc7d6df78d22ec186c8">CopyDataFromITensorHandle</a>(&amp;ret.output[0][0], outputHandle.get());</div><div class="line"><a name="l01052"></a><span class="lineno"> 1052</span>&#160;</div><div class="line"><a name="l01053"></a><span class="lineno"> 1053</span>&#160;    <span class="keywordflow">return</span> ret;</div><div class="line"><a name="l01054"></a><span class="lineno"> 1054</span>&#160;</div><div class="line"><a name="l01055"></a><span class="lineno"> 1055</span>&#160;}</div><div class="line"><a name="l01056"></a><span class="lineno"> 1056</span>&#160;</div><div class="line"><a name="l01057"></a><span class="lineno"> 1057</span>&#160;<span class="keyword">template</span>&lt;armnn::DataType ArmnnType, <span class="keyword">typename</span> T = armnn::ResolveType&lt;ArmnnType&gt;&gt;</div><div class="line"><a name="l01058"></a><span class="lineno"> 1058</span>&#160;<a class="code" href="struct_layer_test_result.xhtml">LayerTestResult&lt;T, 2&gt;</a> LstmLayerWithCifgWithPeepholeNoProjectionTestImpl(</div><div class="line"><a name="l01059"></a><span class="lineno"> 1059</span>&#160;        <a class="code" href="classarmnn_1_1_i_workload_factory.xhtml">armnn::IWorkloadFactory</a>&amp; workloadFactory,</div><div class="line"><a name="l01060"></a><span class="lineno"> 1060</span>&#160;        <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_backend_internal.xhtml#a693b40e6b94e958836aeb0410ca186bd">armnn::IBackendInternal::IMemoryManagerSharedPtr</a>&amp; memoryManager,</div><div class="line"><a name="l01061"></a><span class="lineno"> 1061</span>&#160;        <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml">armnn::ITensorHandleFactory</a>&amp; tensorHandleFactory,</div><div class="line"><a name="l01062"></a><span class="lineno"> 1062</span>&#160;        <span class="keyword">const</span> boost::multi_array&lt;T, 2&gt;&amp; input,</div><div class="line"><a name="l01063"></a><span class="lineno"> 1063</span>&#160;        <span class="keyword">const</span> boost::multi_array&lt;T, 2&gt;&amp; outputExpected,</div><div class="line"><a name="l01064"></a><span class="lineno"> 1064</span>&#160;        <span class="keywordtype">float</span> qScale = 0.0f,</div><div class="line"><a name="l01065"></a><span class="lineno"> 1065</span>&#160;        int32_t qOffset = 0,</div><div class="line"><a name="l01066"></a><span class="lineno"> 1066</span>&#160;        <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6">armnn::DataType</a> constantDataType = <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a166495adc0d0f53bee6baecc577f5204">armnn::DataType::Float32</a>)</div><div class="line"><a name="l01067"></a><span class="lineno"> 1067</span>&#160;{</div><div class="line"><a name="l01068"></a><span class="lineno"> 1068</span>&#160;    <a class="code" href="namespacearmnn.xhtml#a44affeeb090c3c6a3062830562672e84">IgnoreUnused</a>(memoryManager);</div><div class="line"><a name="l01069"></a><span class="lineno"> 1069</span>&#160;    <span class="keywordtype">bool</span> cifgEnabled = <span class="keyword">true</span>;</div><div class="line"><a name="l01070"></a><span class="lineno"> 1070</span>&#160;    <span class="keywordtype">bool</span> peepholeEnabled = <span class="keyword">true</span>;</div><div class="line"><a name="l01071"></a><span class="lineno"> 1071</span>&#160;    <span class="keywordtype">bool</span> projectionEnabled = <span class="keyword">false</span>;</div><div class="line"><a name="l01072"></a><span class="lineno"> 1072</span>&#160;    <span class="comment">// These are not the input and the output of Lstm yet</span></div><div class="line"><a name="l01073"></a><span class="lineno"> 1073</span>&#160;    <span class="keywordtype">unsigned</span> <span class="keywordtype">int</span> batchSize = <a class="code" href="namespacearmnn.xhtml#a375ca3cff9f1b005d1412dc5f3cf5b6e">armnn::numeric_cast</a>&lt;<span class="keywordtype">unsigned</span> <span class="keywordtype">int</span>&gt;(input.shape()[0]);</div><div class="line"><a name="l01074"></a><span class="lineno"> 1074</span>&#160;    <span class="keywordtype">unsigned</span> <span class="keywordtype">int</span> inputSize = <a class="code" href="namespacearmnn.xhtml#a375ca3cff9f1b005d1412dc5f3cf5b6e">armnn::numeric_cast</a>&lt;<span class="keywordtype">unsigned</span> <span class="keywordtype">int</span>&gt;(input.shape()[1]);</div><div class="line"><a name="l01075"></a><span class="lineno"> 1075</span>&#160;</div><div class="line"><a name="l01076"></a><span class="lineno"> 1076</span>&#160;    <span class="keywordtype">unsigned</span> <span class="keywordtype">int</span> outputSize = <a class="code" href="namespacearmnn.xhtml#a375ca3cff9f1b005d1412dc5f3cf5b6e">armnn::numeric_cast</a>&lt;<span class="keywordtype">unsigned</span> <span class="keywordtype">int</span>&gt;(outputExpected.shape()[1]);</div><div class="line"><a name="l01077"></a><span class="lineno"> 1077</span>&#160;</div><div class="line"><a name="l01078"></a><span class="lineno"> 1078</span>&#160;    <span class="keyword">const</span> <span class="keywordtype">unsigned</span> <span class="keywordtype">int</span> cellSize = outputSize;</div><div class="line"><a name="l01079"></a><span class="lineno"> 1079</span>&#160;</div><div class="line"><a name="l01080"></a><span class="lineno"> 1080</span>&#160;    <span class="comment">// Decide the shape of all input tensors</span></div><div class="line"><a name="l01081"></a><span class="lineno"> 1081</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> inputTensorInfo({batchSize , inputSize}, ArmnnType, qScale, qOffset); <span class="comment">// change to ArmnnType</span></div><div class="line"><a name="l01082"></a><span class="lineno"> 1082</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> outputStateInTensorInfo({batchSize, outputSize}, ArmnnType, qScale, qOffset);</div><div class="line"><a name="l01083"></a><span class="lineno"> 1083</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> cellStateInTensorInfo({batchSize, cellSize}, ArmnnType, qScale, qOffset);</div><div class="line"><a name="l01084"></a><span class="lineno"> 1084</span>&#160;</div><div class="line"><a name="l01085"></a><span class="lineno"> 1085</span>&#160;    <span class="keywordtype">unsigned</span> <span class="keywordtype">int</span> scratchBufferSize = cifgEnabled ? cellSize * 3 : cellSize * 4;</div><div class="line"><a name="l01086"></a><span class="lineno"> 1086</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> scratchBufferTensorInfo({batchSize, scratchBufferSize}, ArmnnType, qScale, qOffset);</div><div class="line"><a name="l01087"></a><span class="lineno"> 1087</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> outputStateOutTensorInfo({batchSize, outputSize}, ArmnnType, qScale, qOffset);</div><div class="line"><a name="l01088"></a><span class="lineno"> 1088</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> cellStateOutTensorInfo({batchSize, cellSize}, ArmnnType, qScale, qOffset);</div><div class="line"><a name="l01089"></a><span class="lineno"> 1089</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> outputTensorInfo({batchSize, outputSize}, ArmnnType, qScale, qOffset);</div><div class="line"><a name="l01090"></a><span class="lineno"> 1090</span>&#160;</div><div class="line"><a name="l01091"></a><span class="lineno"> 1091</span>&#160;    <span class="comment">// List of inputs</span></div><div class="line"><a name="l01092"></a><span class="lineno"> 1092</span>&#160;    std::vector&lt;float&gt; inputData;</div><div class="line"><a name="l01093"></a><span class="lineno"> 1093</span>&#160;    inputData.assign(input.data(), input.data() + batchSize*inputSize);</div><div class="line"><a name="l01094"></a><span class="lineno"> 1094</span>&#160;    <span class="keyword">auto</span> inputTensor = MakeTensor&lt;float,2&gt;(inputTensorInfo, inputData);</div><div class="line"><a name="l01095"></a><span class="lineno"> 1095</span>&#160;</div><div class="line"><a name="l01096"></a><span class="lineno"> 1096</span>&#160;    std::vector&lt;float&gt; outputStateInVector(batchSize * outputSize, 0.f);</div><div class="line"><a name="l01097"></a><span class="lineno"> 1097</span>&#160;    <span class="keyword">auto</span> outputStateInTensor = MakeTensor&lt;float, 2&gt;(outputStateInTensorInfo, outputStateInVector);</div><div class="line"><a name="l01098"></a><span class="lineno"> 1098</span>&#160;</div><div class="line"><a name="l01099"></a><span class="lineno"> 1099</span>&#160;    std::vector&lt;float&gt; cellStateInVector(batchSize * cellSize, 0.f);</div><div class="line"><a name="l01100"></a><span class="lineno"> 1100</span>&#160;    <span class="keyword">auto</span> cellStateInTensor = MakeTensor&lt;float, 2&gt;(cellStateInTensorInfo, cellStateInVector);</div><div class="line"><a name="l01101"></a><span class="lineno"> 1101</span>&#160;</div><div class="line"><a name="l01102"></a><span class="lineno"> 1102</span>&#160;</div><div class="line"><a name="l01103"></a><span class="lineno"> 1103</span>&#160;    <span class="comment">// Prepare all the weights in the descriptor for LSTM</span></div><div class="line"><a name="l01104"></a><span class="lineno"> 1104</span>&#160;    <a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml">armnn::LstmQueueDescriptor</a> data;</div><div class="line"><a name="l01105"></a><span class="lineno"> 1105</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> tensorInfoInput({cellSize, inputSize}, constantDataType, qScale, qOffset);</div><div class="line"><a name="l01106"></a><span class="lineno"> 1106</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> tensorInfoOutput({cellSize, outputSize}, constantDataType, qScale, qOffset);</div><div class="line"><a name="l01107"></a><span class="lineno"> 1107</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> tensorInfoNumUnits({cellSize}, constantDataType, qScale, qOffset);</div><div class="line"><a name="l01108"></a><span class="lineno"> 1108</span>&#160;</div><div class="line"><a name="l01109"></a><span class="lineno"> 1109</span>&#160;    <span class="keyword">auto</span> inputToCellWeights = MakeTensor&lt;float, 2&gt;(tensorInfoInput,</div><div class="line"><a name="l01110"></a><span class="lineno"> 1110</span>&#160;                                                     {-0.49770179f, -0.27711356f, -0.09624726f, 0.05100781f,</div><div class="line"><a name="l01111"></a><span class="lineno"> 1111</span>&#160;                                                     0.04717243f, 0.48944736f, -0.38535351f,</div><div class="line"><a name="l01112"></a><span class="lineno"> 1112</span>&#160;                                                     -0.17212132f});</div><div class="line"><a name="l01113"></a><span class="lineno"> 1113</span>&#160;    <span class="keyword">auto</span> inputToForgetWeights = MakeTensor&lt;float, 2&gt;(tensorInfoInput,</div><div class="line"><a name="l01114"></a><span class="lineno"> 1114</span>&#160;                                                     {-0.55291498f, -0.42866567f, 0.13056988f,</div><div class="line"><a name="l01115"></a><span class="lineno"> 1115</span>&#160;                                                       -0.3633365f, -0.22755712f, 0.28253698f, 0.24407166f,</div><div class="line"><a name="l01116"></a><span class="lineno"> 1116</span>&#160;                                                       0.33826375f});</div><div class="line"><a name="l01117"></a><span class="lineno"> 1117</span>&#160;    <span class="keyword">auto</span> inputToOutputWeights = MakeTensor&lt;float, 2&gt;(tensorInfoInput,</div><div class="line"><a name="l01118"></a><span class="lineno"> 1118</span>&#160;                                                     {0.10725588f, -0.02335852f, -0.55932593f,</div><div class="line"><a name="l01119"></a><span class="lineno"> 1119</span>&#160;                                                       -0.09426838f, -0.44257352f, 0.54939759f,</div><div class="line"><a name="l01120"></a><span class="lineno"> 1120</span>&#160;                                                       0.01533556f, 0.42751634f});</div><div class="line"><a name="l01121"></a><span class="lineno"> 1121</span>&#160;    <span class="keyword">auto</span> cellBias = MakeTensor&lt;float, 1&gt;(tensorInfoNumUnits, {0.f, 0.f, 0.f, 0.f});</div><div class="line"><a name="l01122"></a><span class="lineno"> 1122</span>&#160;    <span class="keyword">auto</span> forgetGateBias = MakeTensor&lt;float, 1&gt;(tensorInfoNumUnits, {1.f, 1.f, 1.f, 1.f});</div><div class="line"><a name="l01123"></a><span class="lineno"> 1123</span>&#160;    <span class="keyword">auto</span> outputGateBias = MakeTensor&lt;float, 1&gt;(tensorInfoNumUnits, {0.f, 0.f, 0.f, 0.f});</div><div class="line"><a name="l01124"></a><span class="lineno"> 1124</span>&#160;</div><div class="line"><a name="l01125"></a><span class="lineno"> 1125</span>&#160;    <span class="keyword">auto</span> recurrentToCellWeights = MakeTensor&lt;float, 2&gt;(tensorInfoOutput,</div><div class="line"><a name="l01126"></a><span class="lineno"> 1126</span>&#160;                {0.54066205f, -0.32668582f, -0.43562764f, -0.56094903f, 0.42957711f,</div><div class="line"><a name="l01127"></a><span class="lineno"> 1127</span>&#160;                 0.01841056f, -0.32764608f, -0.33027974f, -0.10826075f, 0.20675004f,</div><div class="line"><a name="l01128"></a><span class="lineno"> 1128</span>&#160;                 0.19069612f, -0.03026325f, -0.54532051f, 0.33003211f, 0.44901288f,</div><div class="line"><a name="l01129"></a><span class="lineno"> 1129</span>&#160;                 0.21193194f});</div><div class="line"><a name="l01130"></a><span class="lineno"> 1130</span>&#160;    <span class="keyword">auto</span> recurrentToForgetWeights = MakeTensor&lt;float, 2&gt;(tensorInfoOutput,</div><div class="line"><a name="l01131"></a><span class="lineno"> 1131</span>&#160;                 {-0.13832897f, -0.0515101f, -0.2359007f, -0.16661474f, -0.14340827f,</div><div class="line"><a name="l01132"></a><span class="lineno"> 1132</span>&#160;                  0.36986142f, 0.23414481f, 0.55899f, 0.10798943f, -0.41174671f, 0.17751795f,</div><div class="line"><a name="l01133"></a><span class="lineno"> 1133</span>&#160;                  -0.34484994f, -0.35874045f, -0.11352962f, 0.27268326f, 0.54058349f});</div><div class="line"><a name="l01134"></a><span class="lineno"> 1134</span>&#160;</div><div class="line"><a name="l01135"></a><span class="lineno"> 1135</span>&#160;    <span class="keyword">auto</span> recurrentToOutputWeights = MakeTensor&lt;float, 2&gt;(tensorInfoOutput,</div><div class="line"><a name="l01136"></a><span class="lineno"> 1136</span>&#160;                {0.41613156f, 0.42610586f, -0.16495961f, -0.5663873f, 0.30579174f, -0.05115908f,</div><div class="line"><a name="l01137"></a><span class="lineno"> 1137</span>&#160;                 -0.33941799f, 0.23364776f, 0.11178309f, 0.09481031f, -0.26424935f, 0.46261835f,</div><div class="line"><a name="l01138"></a><span class="lineno"> 1138</span>&#160;                 0.50248802f, 0.26114327f, -0.43736315f, 0.33149987f});</div><div class="line"><a name="l01139"></a><span class="lineno"> 1139</span>&#160;</div><div class="line"><a name="l01140"></a><span class="lineno"> 1140</span>&#160;    <span class="keyword">auto</span> cellToForgetWeights = MakeTensor&lt;float, 1&gt;(tensorInfoNumUnits,</div><div class="line"><a name="l01141"></a><span class="lineno"> 1141</span>&#160;                {0.47485286f, -0.51955009f, -0.24458408f, 0.31544167f});</div><div class="line"><a name="l01142"></a><span class="lineno"> 1142</span>&#160;    <span class="keyword">auto</span> cellToOutputWeights = MakeTensor&lt;float, 1&gt;(tensorInfoNumUnits,</div><div class="line"><a name="l01143"></a><span class="lineno"> 1143</span>&#160;                {-0.17135078f, 0.82760304f, 0.85573703f, -0.77109635f});</div><div class="line"><a name="l01144"></a><span class="lineno"> 1144</span>&#160;</div><div class="line"><a name="l01145"></a><span class="lineno"> 1145</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputToCellWeightsTensor(tensorInfoInput);</div><div class="line"><a name="l01146"></a><span class="lineno"> 1146</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputToForgetWeightsTensor(tensorInfoInput);</div><div class="line"><a name="l01147"></a><span class="lineno"> 1147</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputToOutputWeightsTensor(tensorInfoInput);</div><div class="line"><a name="l01148"></a><span class="lineno"> 1148</span>&#160;</div><div class="line"><a name="l01149"></a><span class="lineno"> 1149</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> cellBiasTensor(tensorInfoNumUnits);</div><div class="line"><a name="l01150"></a><span class="lineno"> 1150</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> forgetGateBiasTensor(tensorInfoNumUnits);</div><div class="line"><a name="l01151"></a><span class="lineno"> 1151</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> outputGateBiasTensor(tensorInfoNumUnits);</div><div class="line"><a name="l01152"></a><span class="lineno"> 1152</span>&#160;</div><div class="line"><a name="l01153"></a><span class="lineno"> 1153</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> recurrentToCellWeightsTensor(tensorInfoOutput);</div><div class="line"><a name="l01154"></a><span class="lineno"> 1154</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> recurrentToForgetWeightsTensor(tensorInfoOutput);</div><div class="line"><a name="l01155"></a><span class="lineno"> 1155</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> recurrentToOutputWeightsTensor(tensorInfoOutput);</div><div class="line"><a name="l01156"></a><span class="lineno"> 1156</span>&#160;</div><div class="line"><a name="l01157"></a><span class="lineno"> 1157</span>&#160;</div><div class="line"><a name="l01158"></a><span class="lineno"> 1158</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> cellToForgetWeightsTensor(tensorInfoNumUnits);</div><div class="line"><a name="l01159"></a><span class="lineno"> 1159</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> cellToOutputWeightsTensor(tensorInfoNumUnits);</div><div class="line"><a name="l01160"></a><span class="lineno"> 1160</span>&#160;</div><div class="line"><a name="l01161"></a><span class="lineno"> 1161</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputToCellWeightsTensor, &amp;inputToCellWeights[0][0]);</div><div class="line"><a name="l01162"></a><span class="lineno"> 1162</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputToForgetWeightsTensor, &amp;inputToForgetWeights[0][0]);</div><div class="line"><a name="l01163"></a><span class="lineno"> 1163</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputToOutputWeightsTensor, &amp;inputToOutputWeights[0][0]);</div><div class="line"><a name="l01164"></a><span class="lineno"> 1164</span>&#160;</div><div class="line"><a name="l01165"></a><span class="lineno"> 1165</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;cellBiasTensor, &amp;cellBias[0]);</div><div class="line"><a name="l01166"></a><span class="lineno"> 1166</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;forgetGateBiasTensor, &amp;forgetGateBias[0]);</div><div class="line"><a name="l01167"></a><span class="lineno"> 1167</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;outputGateBiasTensor, &amp;outputGateBias[0]);</div><div class="line"><a name="l01168"></a><span class="lineno"> 1168</span>&#160;</div><div class="line"><a name="l01169"></a><span class="lineno"> 1169</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;recurrentToCellWeightsTensor, &amp;recurrentToCellWeights[0][0]);</div><div class="line"><a name="l01170"></a><span class="lineno"> 1170</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;recurrentToForgetWeightsTensor, &amp;recurrentToForgetWeights[0][0]);</div><div class="line"><a name="l01171"></a><span class="lineno"> 1171</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;recurrentToOutputWeightsTensor, &amp;recurrentToOutputWeights[0][0]);</div><div class="line"><a name="l01172"></a><span class="lineno"> 1172</span>&#160;</div><div class="line"><a name="l01173"></a><span class="lineno"> 1173</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;cellToForgetWeightsTensor, &amp;cellToForgetWeights[0]);</div><div class="line"><a name="l01174"></a><span class="lineno"> 1174</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;cellToOutputWeightsTensor, &amp;cellToOutputWeights[0]);</div><div class="line"><a name="l01175"></a><span class="lineno"> 1175</span>&#160;</div><div class="line"><a name="l01176"></a><span class="lineno"> 1176</span>&#160;</div><div class="line"><a name="l01177"></a><span class="lineno"> 1177</span>&#160;    data.m_InputToCellWeights = &amp;inputToCellWeightsTensor;</div><div class="line"><a name="l01178"></a><span class="lineno"> 1178</span>&#160;    data.m_InputToForgetWeights = &amp;inputToForgetWeightsTensor;</div><div class="line"><a name="l01179"></a><span class="lineno"> 1179</span>&#160;    data.m_InputToOutputWeights = &amp;inputToOutputWeightsTensor;</div><div class="line"><a name="l01180"></a><span class="lineno"> 1180</span>&#160;</div><div class="line"><a name="l01181"></a><span class="lineno"> 1181</span>&#160;    data.m_CellBias = &amp;cellBiasTensor;</div><div class="line"><a name="l01182"></a><span class="lineno"> 1182</span>&#160;    data.m_ForgetGateBias = &amp;forgetGateBiasTensor;</div><div class="line"><a name="l01183"></a><span class="lineno"> 1183</span>&#160;    data.m_OutputGateBias = &amp;outputGateBiasTensor;</div><div class="line"><a name="l01184"></a><span class="lineno"> 1184</span>&#160;</div><div class="line"><a name="l01185"></a><span class="lineno"> 1185</span>&#160;    data.m_RecurrentToCellWeights = &amp;recurrentToCellWeightsTensor;</div><div class="line"><a name="l01186"></a><span class="lineno"> 1186</span>&#160;    data.m_RecurrentToForgetWeights = &amp;recurrentToForgetWeightsTensor;</div><div class="line"><a name="l01187"></a><span class="lineno"> 1187</span>&#160;    data.m_RecurrentToOutputWeights = &amp;recurrentToOutputWeightsTensor;</div><div class="line"><a name="l01188"></a><span class="lineno"> 1188</span>&#160;</div><div class="line"><a name="l01189"></a><span class="lineno"> 1189</span>&#160;    data.m_CellToForgetWeights = &amp;cellToForgetWeightsTensor;</div><div class="line"><a name="l01190"></a><span class="lineno"> 1190</span>&#160;    data.m_CellToOutputWeights = &amp;cellToOutputWeightsTensor;</div><div class="line"><a name="l01191"></a><span class="lineno"> 1191</span>&#160;</div><div class="line"><a name="l01192"></a><span class="lineno"> 1192</span>&#160;    <span class="comment">// other parameters for the descriptor</span></div><div class="line"><a name="l01193"></a><span class="lineno"> 1193</span>&#160;    data.m_Parameters.m_CifgEnabled = cifgEnabled;</div><div class="line"><a name="l01194"></a><span class="lineno"> 1194</span>&#160;    data.m_Parameters.m_ProjectionEnabled = projectionEnabled;</div><div class="line"><a name="l01195"></a><span class="lineno"> 1195</span>&#160;    data.m_Parameters.m_PeepholeEnabled = peepholeEnabled;</div><div class="line"><a name="l01196"></a><span class="lineno"> 1196</span>&#160;</div><div class="line"><a name="l01197"></a><span class="lineno"> 1197</span>&#160;    data.m_Parameters.m_ActivationFunc = 4;</div><div class="line"><a name="l01198"></a><span class="lineno"> 1198</span>&#160;    data.m_Parameters.m_ClippingThresProj = 0.0;</div><div class="line"><a name="l01199"></a><span class="lineno"> 1199</span>&#160;    data.m_Parameters.m_ClippingThresCell = 0.0;</div><div class="line"><a name="l01200"></a><span class="lineno"> 1200</span>&#160;</div><div class="line"><a name="l01201"></a><span class="lineno"> 1201</span>&#160;</div><div class="line"><a name="l01202"></a><span class="lineno"> 1202</span>&#160;    <span class="comment">// List of outputs</span></div><div class="line"><a name="l01203"></a><span class="lineno"> 1203</span>&#160;    std::vector&lt;T&gt; scratchBufferVector(batchSize * scratchBufferSize, T());</div><div class="line"><a name="l01204"></a><span class="lineno"> 1204</span>&#160;    <span class="keyword">auto</span> scratchBufferTensor = MakeTensor&lt;T,2&gt;(scratchBufferTensorInfo, scratchBufferVector);</div><div class="line"><a name="l01205"></a><span class="lineno"> 1205</span>&#160;    <a class="code" href="struct_layer_test_result.xhtml">LayerTestResult&lt;T, 2&gt;</a> ret0(scratchBufferTensorInfo);</div><div class="line"><a name="l01206"></a><span class="lineno"> 1206</span>&#160;</div><div class="line"><a name="l01207"></a><span class="lineno"> 1207</span>&#160;    <span class="comment">// Output state for a certain time step</span></div><div class="line"><a name="l01208"></a><span class="lineno"> 1208</span>&#160;    std::vector&lt;T&gt; outputStateOutVector(batchSize * outputSize, T());</div><div class="line"><a name="l01209"></a><span class="lineno"> 1209</span>&#160;    <span class="keyword">auto</span> outputStateOutTensor = MakeTensor&lt;T,2&gt;(outputStateOutTensorInfo, outputStateOutVector);</div><div class="line"><a name="l01210"></a><span class="lineno"> 1210</span>&#160;    <a class="code" href="struct_layer_test_result.xhtml">LayerTestResult&lt;T, 2&gt;</a> ret1(outputStateOutTensorInfo);</div><div class="line"><a name="l01211"></a><span class="lineno"> 1211</span>&#160;</div><div class="line"><a name="l01212"></a><span class="lineno"> 1212</span>&#160;    <span class="comment">// Cell state for a certain time step</span></div><div class="line"><a name="l01213"></a><span class="lineno"> 1213</span>&#160;    std::vector&lt;T&gt; cellStateOutVector(batchSize * cellSize, T());</div><div class="line"><a name="l01214"></a><span class="lineno"> 1214</span>&#160;    <span class="keyword">auto</span> cellStateOutTensor = MakeTensor&lt;T,2&gt;(cellStateOutTensorInfo, cellStateOutVector);</div><div class="line"><a name="l01215"></a><span class="lineno"> 1215</span>&#160;    <a class="code" href="struct_layer_test_result.xhtml">LayerTestResult&lt;T, 2&gt;</a> ret2(cellStateOutTensorInfo);</div><div class="line"><a name="l01216"></a><span class="lineno"> 1216</span>&#160;</div><div class="line"><a name="l01217"></a><span class="lineno"> 1217</span>&#160;    <span class="comment">// Output for a certain time step</span></div><div class="line"><a name="l01218"></a><span class="lineno"> 1218</span>&#160;    std::vector&lt;T&gt; outputVector(batchSize * outputSize, T());</div><div class="line"><a name="l01219"></a><span class="lineno"> 1219</span>&#160;    <span class="keyword">auto</span> outputTensor = MakeTensor&lt;T, 2&gt;(outputTensorInfo, outputVector);</div><div class="line"><a name="l01220"></a><span class="lineno"> 1220</span>&#160;    std::vector&lt;T&gt; outputData;</div><div class="line"><a name="l01221"></a><span class="lineno"> 1221</span>&#160;    outputData.assign(outputExpected.data(), outputExpected.data() + batchSize*outputSize);</div><div class="line"><a name="l01222"></a><span class="lineno"> 1222</span>&#160;    <a class="code" href="struct_layer_test_result.xhtml">LayerTestResult&lt;T, 2&gt;</a> ret3(outputTensorInfo);</div><div class="line"><a name="l01223"></a><span class="lineno"> 1223</span>&#160;    ret3.outputExpected = MakeTensor&lt;T, 2&gt;(outputTensorInfo, outputData);</div><div class="line"><a name="l01224"></a><span class="lineno"> 1224</span>&#160;</div><div class="line"><a name="l01225"></a><span class="lineno"> 1225</span>&#160;    <span class="comment">// Prepare the inputs and outputs for the workload</span></div><div class="line"><a name="l01226"></a><span class="lineno"> 1226</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; inputHandle =</div><div class="line"><a name="l01227"></a><span class="lineno"> 1227</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(inputTensorInfo);</div><div class="line"><a name="l01228"></a><span class="lineno"> 1228</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; outputStateInHandle =</div><div class="line"><a name="l01229"></a><span class="lineno"> 1229</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(outputStateInTensorInfo);</div><div class="line"><a name="l01230"></a><span class="lineno"> 1230</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; cellStateInHandle =</div><div class="line"><a name="l01231"></a><span class="lineno"> 1231</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(cellStateInTensorInfo);</div><div class="line"><a name="l01232"></a><span class="lineno"> 1232</span>&#160;</div><div class="line"><a name="l01233"></a><span class="lineno"> 1233</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; scratchBufferHandle =</div><div class="line"><a name="l01234"></a><span class="lineno"> 1234</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(scratchBufferTensorInfo);</div><div class="line"><a name="l01235"></a><span class="lineno"> 1235</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; outputStateOutHandle =</div><div class="line"><a name="l01236"></a><span class="lineno"> 1236</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(outputStateOutTensorInfo);</div><div class="line"><a name="l01237"></a><span class="lineno"> 1237</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; cellStateOutHandle =</div><div class="line"><a name="l01238"></a><span class="lineno"> 1238</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(cellStateOutTensorInfo);</div><div class="line"><a name="l01239"></a><span class="lineno"> 1239</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; outputHandle =</div><div class="line"><a name="l01240"></a><span class="lineno"> 1240</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(outputTensorInfo);</div><div class="line"><a name="l01241"></a><span class="lineno"> 1241</span>&#160;</div><div class="line"><a name="l01242"></a><span class="lineno"> 1242</span>&#160;    <a class="code" href="structarmnn_1_1_workload_info.xhtml">armnn::WorkloadInfo</a> info;</div><div class="line"><a name="l01243"></a><span class="lineno"> 1243</span>&#160;    AddInputToWorkload(data, info, inputTensorInfo, inputHandle.get());</div><div class="line"><a name="l01244"></a><span class="lineno"> 1244</span>&#160;    AddInputToWorkload(data, info, outputStateInTensorInfo, outputStateInHandle.get());</div><div class="line"><a name="l01245"></a><span class="lineno"> 1245</span>&#160;    AddInputToWorkload(data, info, cellStateInTensorInfo, cellStateInHandle.get());</div><div class="line"><a name="l01246"></a><span class="lineno"> 1246</span>&#160;</div><div class="line"><a name="l01247"></a><span class="lineno"> 1247</span>&#160;    AddOutputToWorkload(data, info, scratchBufferTensorInfo, scratchBufferHandle.get());</div><div class="line"><a name="l01248"></a><span class="lineno"> 1248</span>&#160;    AddOutputToWorkload(data, info, outputStateOutTensorInfo, outputStateOutHandle.get());</div><div class="line"><a name="l01249"></a><span class="lineno"> 1249</span>&#160;    AddOutputToWorkload(data, info, cellStateOutTensorInfo, cellStateOutHandle.get());</div><div class="line"><a name="l01250"></a><span class="lineno"> 1250</span>&#160;    AddOutputToWorkload(data, info, outputTensorInfo, outputHandle.get());</div><div class="line"><a name="l01251"></a><span class="lineno"> 1251</span>&#160;</div><div class="line"><a name="l01252"></a><span class="lineno"> 1252</span>&#160;    std::unique_ptr&lt;armnn::IWorkload&gt; workload = workloadFactory.<a class="code" href="classarmnn_1_1_i_workload_factory.xhtml#ab6bd7aaf685d4e956d780f8655a6f174">CreateLstm</a>(data, info);</div><div class="line"><a name="l01253"></a><span class="lineno"> 1253</span>&#160;</div><div class="line"><a name="l01254"></a><span class="lineno"> 1254</span>&#160;</div><div class="line"><a name="l01255"></a><span class="lineno"> 1255</span>&#160;    inputHandle-&gt;Allocate();</div><div class="line"><a name="l01256"></a><span class="lineno"> 1256</span>&#160;    outputStateInHandle-&gt;Allocate();</div><div class="line"><a name="l01257"></a><span class="lineno"> 1257</span>&#160;    cellStateInHandle-&gt;Allocate();</div><div class="line"><a name="l01258"></a><span class="lineno"> 1258</span>&#160;</div><div class="line"><a name="l01259"></a><span class="lineno"> 1259</span>&#160;    scratchBufferHandle-&gt;Allocate();</div><div class="line"><a name="l01260"></a><span class="lineno"> 1260</span>&#160;    outputStateOutHandle-&gt;Allocate();</div><div class="line"><a name="l01261"></a><span class="lineno"> 1261</span>&#160;    cellStateOutHandle-&gt;Allocate();</div><div class="line"><a name="l01262"></a><span class="lineno"> 1262</span>&#160;    outputHandle-&gt;Allocate();</div><div class="line"><a name="l01263"></a><span class="lineno"> 1263</span>&#160;</div><div class="line"><a name="l01264"></a><span class="lineno"> 1264</span>&#160;</div><div class="line"><a name="l01265"></a><span class="lineno"> 1265</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#ae15f1a3c55d2db87683577de9fa4437c">CopyDataToITensorHandle</a>(inputHandle.get(), &amp;inputTensor[0][0]);</div><div class="line"><a name="l01266"></a><span class="lineno"> 1266</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#ae15f1a3c55d2db87683577de9fa4437c">CopyDataToITensorHandle</a>(outputStateInHandle.get(), &amp;outputStateInTensor[0][0]);</div><div class="line"><a name="l01267"></a><span class="lineno"> 1267</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#ae15f1a3c55d2db87683577de9fa4437c">CopyDataToITensorHandle</a>(cellStateInHandle.get(), &amp;cellStateInTensor[0][0]);</div><div class="line"><a name="l01268"></a><span class="lineno"> 1268</span>&#160;</div><div class="line"><a name="l01269"></a><span class="lineno"> 1269</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#ae15f1a3c55d2db87683577de9fa4437c">CopyDataToITensorHandle</a>(scratchBufferHandle.get(), &amp;scratchBufferTensor[0][0]);</div><div class="line"><a name="l01270"></a><span class="lineno"> 1270</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#ae15f1a3c55d2db87683577de9fa4437c">CopyDataToITensorHandle</a>(outputStateOutHandle.get(), &amp;outputStateOutTensor[0][0]);</div><div class="line"><a name="l01271"></a><span class="lineno"> 1271</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#ae15f1a3c55d2db87683577de9fa4437c">CopyDataToITensorHandle</a>(cellStateOutHandle.get(), &amp;cellStateOutTensor[0][0]);</div><div class="line"><a name="l01272"></a><span class="lineno"> 1272</span>&#160;</div><div class="line"><a name="l01273"></a><span class="lineno"> 1273</span>&#160;    workload-&gt;Execute();</div><div class="line"><a name="l01274"></a><span class="lineno"> 1274</span>&#160;</div><div class="line"><a name="l01275"></a><span class="lineno"> 1275</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#a99b626c58a926dc7d6df78d22ec186c8">CopyDataFromITensorHandle</a>(&amp;ret0.output[0][0], scratchBufferHandle.get());</div><div class="line"><a name="l01276"></a><span class="lineno"> 1276</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#a99b626c58a926dc7d6df78d22ec186c8">CopyDataFromITensorHandle</a>(&amp;ret1.output[0][0], outputStateOutHandle.get());</div><div class="line"><a name="l01277"></a><span class="lineno"> 1277</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#a99b626c58a926dc7d6df78d22ec186c8">CopyDataFromITensorHandle</a>(&amp;ret2.output[0][0], cellStateOutHandle.get());</div><div class="line"><a name="l01278"></a><span class="lineno"> 1278</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#a99b626c58a926dc7d6df78d22ec186c8">CopyDataFromITensorHandle</a>(&amp;ret3.output[0][0], outputHandle.get());</div><div class="line"><a name="l01279"></a><span class="lineno"> 1279</span>&#160;</div><div class="line"><a name="l01280"></a><span class="lineno"> 1280</span>&#160;    <span class="keywordflow">return</span> ret3;</div><div class="line"><a name="l01281"></a><span class="lineno"> 1281</span>&#160;}</div><div class="line"><a name="l01282"></a><span class="lineno"> 1282</span>&#160;</div><div class="line"><a name="l01283"></a><span class="lineno"> 1283</span>&#160;<span class="keyword">template</span>&lt;armnn::DataType ArmnnType, <span class="keyword">typename</span> T = armnn::ResolveType&lt;ArmnnType&gt;&gt;</div><div class="line"><a name="l01284"></a><span class="lineno"> 1284</span>&#160;<a class="code" href="struct_layer_test_result.xhtml">LayerTestResult&lt;T, 2&gt;</a></div><div class="line"><a name="l01285"></a><span class="lineno"> 1285</span>&#160;LstmLayerNoCifgWithPeepholeWithProjectionWithLayerNormTestImpl(<a class="code" href="classarmnn_1_1_i_workload_factory.xhtml">armnn::IWorkloadFactory</a>&amp; workloadFactory,</div><div class="line"><a name="l01286"></a><span class="lineno"> 1286</span>&#160;                                                  <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_backend_internal.xhtml#a693b40e6b94e958836aeb0410ca186bd">armnn::IBackendInternal::IMemoryManagerSharedPtr</a>&amp; memoryManager,</div><div class="line"><a name="l01287"></a><span class="lineno"> 1287</span>&#160;                                                  <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml">armnn::ITensorHandleFactory</a>&amp; tensorHandleFactory,</div><div class="line"><a name="l01288"></a><span class="lineno"> 1288</span>&#160;                                                  <span class="keyword">const</span> boost::multi_array&lt;T, 2&gt;&amp; input,</div><div class="line"><a name="l01289"></a><span class="lineno"> 1289</span>&#160;                                                  <span class="keyword">const</span> boost::multi_array&lt;T, 2&gt;&amp; outputExpected,</div><div class="line"><a name="l01290"></a><span class="lineno"> 1290</span>&#160;                                                  <span class="keywordtype">float</span> qScale = 0.0f,</div><div class="line"><a name="l01291"></a><span class="lineno"> 1291</span>&#160;                                                  int32_t qOffset = 0,</div><div class="line"><a name="l01292"></a><span class="lineno"> 1292</span>&#160;                                                  <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6">armnn::DataType</a> constantDataType = <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a166495adc0d0f53bee6baecc577f5204">armnn::DataType::Float32</a>)</div><div class="line"><a name="l01293"></a><span class="lineno"> 1293</span>&#160;{</div><div class="line"><a name="l01294"></a><span class="lineno"> 1294</span>&#160;    <a class="code" href="namespacearmnn.xhtml#a44affeeb090c3c6a3062830562672e84">IgnoreUnused</a>(memoryManager);</div><div class="line"><a name="l01295"></a><span class="lineno"> 1295</span>&#160;    <span class="keywordtype">unsigned</span> <span class="keywordtype">int</span> batchSize = 2;</div><div class="line"><a name="l01296"></a><span class="lineno"> 1296</span>&#160;    <span class="keywordtype">unsigned</span> <span class="keywordtype">int</span> outputSize = 3;</div><div class="line"><a name="l01297"></a><span class="lineno"> 1297</span>&#160;    <span class="keywordtype">unsigned</span> <span class="keywordtype">int</span> inputSize = 5;</div><div class="line"><a name="l01298"></a><span class="lineno"> 1298</span>&#160;    <span class="keywordtype">unsigned</span> numUnits = 4;</div><div class="line"><a name="l01299"></a><span class="lineno"> 1299</span>&#160;</div><div class="line"><a name="l01300"></a><span class="lineno"> 1300</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> inputTensorInfo({batchSize , inputSize}, ArmnnType, qScale, qOffset);</div><div class="line"><a name="l01301"></a><span class="lineno"> 1301</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> cellStateInTensorInfo({batchSize , numUnits}, ArmnnType, qScale, qOffset);</div><div class="line"><a name="l01302"></a><span class="lineno"> 1302</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> outputStateInTensorInfo({batchSize , outputSize}, ArmnnType, qScale, qOffset);</div><div class="line"><a name="l01303"></a><span class="lineno"> 1303</span>&#160;</div><div class="line"><a name="l01304"></a><span class="lineno"> 1304</span>&#160;    <span class="comment">// Scratch buffer size without CIFG [batchSize, numUnits * 4]</span></div><div class="line"><a name="l01305"></a><span class="lineno"> 1305</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> scratchBufferTensorInfo({batchSize, numUnits * 4}, ArmnnType, qScale, qOffset);</div><div class="line"><a name="l01306"></a><span class="lineno"> 1306</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> cellStateOutTensorInfo({batchSize, numUnits}, ArmnnType, qScale, qOffset);</div><div class="line"><a name="l01307"></a><span class="lineno"> 1307</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> outputStateOutTensorInfo({batchSize, outputSize}, ArmnnType, qScale, qOffset);</div><div class="line"><a name="l01308"></a><span class="lineno"> 1308</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> outputTensorInfo({batchSize, outputSize}, ArmnnType, qScale, qOffset);</div><div class="line"><a name="l01309"></a><span class="lineno"> 1309</span>&#160;</div><div class="line"><a name="l01310"></a><span class="lineno"> 1310</span>&#160;    <a class="code" href="struct_layer_test_result.xhtml">LayerTestResult&lt;T, 2&gt;</a> ret(outputTensorInfo);</div><div class="line"><a name="l01311"></a><span class="lineno"> 1311</span>&#160;</div><div class="line"><a name="l01312"></a><span class="lineno"> 1312</span>&#160;    std::vector&lt;float&gt; inputVector;</div><div class="line"><a name="l01313"></a><span class="lineno"> 1313</span>&#160;    inputVector.assign(input.data(), input.data() + (batchSize * inputSize));</div><div class="line"><a name="l01314"></a><span class="lineno"> 1314</span>&#160;    <span class="keyword">auto</span> inputTensor = MakeTensor&lt;float,2&gt;(inputTensorInfo, inputVector);</div><div class="line"><a name="l01315"></a><span class="lineno"> 1315</span>&#160;</div><div class="line"><a name="l01316"></a><span class="lineno"> 1316</span>&#160;    std::vector&lt;float&gt; cellStateInVector(batchSize * numUnits, 0.f);</div><div class="line"><a name="l01317"></a><span class="lineno"> 1317</span>&#160;    <span class="keyword">auto</span> cellStateInTensor = MakeTensor&lt;float,2&gt;(cellStateInTensorInfo, cellStateInVector);</div><div class="line"><a name="l01318"></a><span class="lineno"> 1318</span>&#160;</div><div class="line"><a name="l01319"></a><span class="lineno"> 1319</span>&#160;    std::vector&lt;float&gt; outputStateInVector(batchSize * outputSize, 0.f);</div><div class="line"><a name="l01320"></a><span class="lineno"> 1320</span>&#160;    <span class="keyword">auto</span> outputStateInTensor = MakeTensor&lt;float,2&gt;(outputStateInTensorInfo, outputStateInVector);</div><div class="line"><a name="l01321"></a><span class="lineno"> 1321</span>&#160;</div><div class="line"><a name="l01322"></a><span class="lineno"> 1322</span>&#160;    std::vector&lt;float&gt; scratchBufferVector(batchSize * numUnits * 4, 0.f);</div><div class="line"><a name="l01323"></a><span class="lineno"> 1323</span>&#160;    <span class="keyword">auto</span> scratchBufferTensor = MakeTensor&lt;float,2&gt;(scratchBufferTensorInfo, scratchBufferVector);</div><div class="line"><a name="l01324"></a><span class="lineno"> 1324</span>&#160;</div><div class="line"><a name="l01325"></a><span class="lineno"> 1325</span>&#160;    std::vector&lt;float&gt; outputStateOutVector(batchSize * outputSize, 0.f);</div><div class="line"><a name="l01326"></a><span class="lineno"> 1326</span>&#160;    <span class="keyword">auto</span> outputStateOutTensor = MakeTensor&lt;float,2&gt;(outputStateOutTensorInfo, outputStateOutVector);</div><div class="line"><a name="l01327"></a><span class="lineno"> 1327</span>&#160;</div><div class="line"><a name="l01328"></a><span class="lineno"> 1328</span>&#160;    std::vector&lt;float&gt; cellStateOutVector(batchSize * numUnits, 0.f);</div><div class="line"><a name="l01329"></a><span class="lineno"> 1329</span>&#160;    <span class="keyword">auto</span> cellStateOutTensor = MakeTensor&lt;float,2&gt;(cellStateOutTensorInfo, cellStateOutVector);</div><div class="line"><a name="l01330"></a><span class="lineno"> 1330</span>&#160;</div><div class="line"><a name="l01331"></a><span class="lineno"> 1331</span>&#160;    std::vector&lt;float&gt; outputVector;</div><div class="line"><a name="l01332"></a><span class="lineno"> 1332</span>&#160;    outputVector.assign(outputExpected.data(), outputExpected.data() + (batchSize * outputSize));</div><div class="line"><a name="l01333"></a><span class="lineno"> 1333</span>&#160;    ret.outputExpected = MakeTensor&lt;float, 2&gt;(outputTensorInfo, outputVector);</div><div class="line"><a name="l01334"></a><span class="lineno"> 1334</span>&#160;</div><div class="line"><a name="l01335"></a><span class="lineno"> 1335</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; inputHandle = tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(inputTensorInfo);</div><div class="line"><a name="l01336"></a><span class="lineno"> 1336</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; cellStateInHandle =</div><div class="line"><a name="l01337"></a><span class="lineno"> 1337</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(cellStateInTensorInfo);</div><div class="line"><a name="l01338"></a><span class="lineno"> 1338</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; outputStateInHandle =</div><div class="line"><a name="l01339"></a><span class="lineno"> 1339</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(outputStateInTensorInfo);</div><div class="line"><a name="l01340"></a><span class="lineno"> 1340</span>&#160;</div><div class="line"><a name="l01341"></a><span class="lineno"> 1341</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; scratchHandle =</div><div class="line"><a name="l01342"></a><span class="lineno"> 1342</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(scratchBufferTensorInfo);</div><div class="line"><a name="l01343"></a><span class="lineno"> 1343</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; outputStateOutHandle =</div><div class="line"><a name="l01344"></a><span class="lineno"> 1344</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(outputStateOutTensorInfo);</div><div class="line"><a name="l01345"></a><span class="lineno"> 1345</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; cellStateOutHandle =</div><div class="line"><a name="l01346"></a><span class="lineno"> 1346</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(cellStateOutTensorInfo);</div><div class="line"><a name="l01347"></a><span class="lineno"> 1347</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; outputHandle = tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(outputTensorInfo);</div><div class="line"><a name="l01348"></a><span class="lineno"> 1348</span>&#160;</div><div class="line"><a name="l01349"></a><span class="lineno"> 1349</span>&#160;    <a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml">armnn::LstmQueueDescriptor</a> data;</div><div class="line"><a name="l01350"></a><span class="lineno"> 1350</span>&#160;    <a class="code" href="structarmnn_1_1_workload_info.xhtml">armnn::WorkloadInfo</a> info;</div><div class="line"><a name="l01351"></a><span class="lineno"> 1351</span>&#160;</div><div class="line"><a name="l01352"></a><span class="lineno"> 1352</span>&#160;    AddInputToWorkload(data, info, inputTensorInfo, inputHandle.get());</div><div class="line"><a name="l01353"></a><span class="lineno"> 1353</span>&#160;    AddInputToWorkload(data, info, outputStateInTensorInfo, outputStateInHandle.get());</div><div class="line"><a name="l01354"></a><span class="lineno"> 1354</span>&#160;    AddInputToWorkload(data, info, cellStateInTensorInfo, cellStateInHandle.get());</div><div class="line"><a name="l01355"></a><span class="lineno"> 1355</span>&#160;</div><div class="line"><a name="l01356"></a><span class="lineno"> 1356</span>&#160;    AddOutputToWorkload(data, info, scratchBufferTensorInfo, scratchHandle.get());</div><div class="line"><a name="l01357"></a><span class="lineno"> 1357</span>&#160;    AddOutputToWorkload(data, info, outputStateOutTensorInfo, outputStateOutHandle.get());</div><div class="line"><a name="l01358"></a><span class="lineno"> 1358</span>&#160;    AddOutputToWorkload(data, info, cellStateOutTensorInfo, cellStateOutHandle.get());</div><div class="line"><a name="l01359"></a><span class="lineno"> 1359</span>&#160;    AddOutputToWorkload(data, info, outputTensorInfo, outputHandle.get());</div><div class="line"><a name="l01360"></a><span class="lineno"> 1360</span>&#160;</div><div class="line"><a name="l01361"></a><span class="lineno"> 1361</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> tensorInfo3({outputSize}, constantDataType, qScale, qOffset);</div><div class="line"><a name="l01362"></a><span class="lineno"> 1362</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> tensorInfo4({numUnits}, constantDataType, qScale, qOffset);</div><div class="line"><a name="l01363"></a><span class="lineno"> 1363</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> tensorInfo4x5({numUnits, inputSize}, constantDataType, qScale, qOffset);</div><div class="line"><a name="l01364"></a><span class="lineno"> 1364</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> tensorInfo4x3({numUnits, outputSize}, constantDataType, qScale, qOffset);</div><div class="line"><a name="l01365"></a><span class="lineno"> 1365</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> tensorInfo3x4({outputSize, numUnits}, constantDataType, qScale, qOffset);</div><div class="line"><a name="l01366"></a><span class="lineno"> 1366</span>&#160;</div><div class="line"><a name="l01367"></a><span class="lineno"> 1367</span>&#160;    <span class="keyword">auto</span> inputToInputWeights =</div><div class="line"><a name="l01368"></a><span class="lineno"> 1368</span>&#160;            MakeTensor&lt;float, 2&gt;(tensorInfo4x5, { 0.5f,  0.6f,  0.7f, -0.8f, -0.9f,</div><div class="line"><a name="l01369"></a><span class="lineno"> 1369</span>&#160;                                                  0.1f,  0.2f,  0.3f, -0.4f,  0.5f,</div><div class="line"><a name="l01370"></a><span class="lineno"> 1370</span>&#160;                                                 -0.8f,  0.7f, -0.6f,  0.5f, -0.4f,</div><div class="line"><a name="l01371"></a><span class="lineno"> 1371</span>&#160;                                                 -0.5f, -0.4f, -0.3f, -0.2f, -0.1f});  <span class="comment">//{numUnits, inputSize}</span></div><div class="line"><a name="l01372"></a><span class="lineno"> 1372</span>&#160;</div><div class="line"><a name="l01373"></a><span class="lineno"> 1373</span>&#160;    <span class="keyword">auto</span> inputToForgetWeights =</div><div class="line"><a name="l01374"></a><span class="lineno"> 1374</span>&#160;            MakeTensor&lt;float, 2&gt;(tensorInfo4x5, {-0.6f, -0.1f,  0.3f,  0.2f,  0.9f,</div><div class="line"><a name="l01375"></a><span class="lineno"> 1375</span>&#160;                                                 -0.5f, -0.2f, -0.4f,  0.3f, -0.8f,</div><div class="line"><a name="l01376"></a><span class="lineno"> 1376</span>&#160;                                                 -0.4f,  0.3f, -0.5f, -0.4f, -0.6f,</div><div class="line"><a name="l01377"></a><span class="lineno"> 1377</span>&#160;                                                  0.3f, -0.4f, -0.6f, -0.5f, -0.5f});  <span class="comment">//{numUnits, inputSize}</span></div><div class="line"><a name="l01378"></a><span class="lineno"> 1378</span>&#160;</div><div class="line"><a name="l01379"></a><span class="lineno"> 1379</span>&#160;    <span class="keyword">auto</span> inputToCellWeights =</div><div class="line"><a name="l01380"></a><span class="lineno"> 1380</span>&#160;            MakeTensor&lt;float, 2&gt;(tensorInfo4x5, {-0.4f, -0.3f, -0.2f, -0.1f, -0.5f,</div><div class="line"><a name="l01381"></a><span class="lineno"> 1381</span>&#160;                                                  0.5f, -0.2f, -0.3f, -0.2f, -0.6f,</div><div class="line"><a name="l01382"></a><span class="lineno"> 1382</span>&#160;                                                  0.6f, -0.1f, -0.4f, -0.3f, -0.7f,</div><div class="line"><a name="l01383"></a><span class="lineno"> 1383</span>&#160;                                                  0.7f, -0.9f, -0.5f,  0.8f,  0.6f});  <span class="comment">//{numUnits, inputSize}</span></div><div class="line"><a name="l01384"></a><span class="lineno"> 1384</span>&#160;</div><div class="line"><a name="l01385"></a><span class="lineno"> 1385</span>&#160;    <span class="keyword">auto</span> inputToOutputWeights =</div><div class="line"><a name="l01386"></a><span class="lineno"> 1386</span>&#160;            MakeTensor&lt;float, 2&gt;(tensorInfo4x5, {-0.8f, -0.4f, -0.2f, -0.9f, -0.1f,</div><div class="line"><a name="l01387"></a><span class="lineno"> 1387</span>&#160;                                                 -0.7f,  0.3f, -0.3f, -0.8f, -0.2f,</div><div class="line"><a name="l01388"></a><span class="lineno"> 1388</span>&#160;                                                  0.6f, -0.2f,  0.4f, -0.7f, -0.3f,</div><div class="line"><a name="l01389"></a><span class="lineno"> 1389</span>&#160;                                                 -0.5f,  0.1f,  0.5f, -0.6f, -0.4f}); <span class="comment">//{numUnits, inputSize}</span></div><div class="line"><a name="l01390"></a><span class="lineno"> 1390</span>&#160;</div><div class="line"><a name="l01391"></a><span class="lineno"> 1391</span>&#160;    <span class="keyword">auto</span> inputGateBias =</div><div class="line"><a name="l01392"></a><span class="lineno"> 1392</span>&#160;            MakeTensor&lt;float, 1&gt;(tensorInfo4, {0.03f, 0.15f, 0.22f, 0.38f});  <span class="comment">//{numUnits}</span></div><div class="line"><a name="l01393"></a><span class="lineno"> 1393</span>&#160;</div><div class="line"><a name="l01394"></a><span class="lineno"> 1394</span>&#160;    <span class="keyword">auto</span> forgetGateBias =</div><div class="line"><a name="l01395"></a><span class="lineno"> 1395</span>&#160;            MakeTensor&lt;float, 1&gt;(tensorInfo4, {0.1f, -0.3f, -0.2f, 0.1f});    <span class="comment">//{numUnits}</span></div><div class="line"><a name="l01396"></a><span class="lineno"> 1396</span>&#160;</div><div class="line"><a name="l01397"></a><span class="lineno"> 1397</span>&#160;    <span class="keyword">auto</span> cellBias =</div><div class="line"><a name="l01398"></a><span class="lineno"> 1398</span>&#160;            MakeTensor&lt;float, 1&gt;(tensorInfo4, {-0.05f, 0.72f, 0.25f, 0.08f}); <span class="comment">//{numUnits}</span></div><div class="line"><a name="l01399"></a><span class="lineno"> 1399</span>&#160;</div><div class="line"><a name="l01400"></a><span class="lineno"> 1400</span>&#160;    <span class="keyword">auto</span> outputGateBias =</div><div class="line"><a name="l01401"></a><span class="lineno"> 1401</span>&#160;            MakeTensor&lt;float, 1&gt;(tensorInfo4, {0.05f, -0.01f, 0.2f, 0.1f});   <span class="comment">//{numUnits}</span></div><div class="line"><a name="l01402"></a><span class="lineno"> 1402</span>&#160;</div><div class="line"><a name="l01403"></a><span class="lineno"> 1403</span>&#160;    <span class="keyword">auto</span> recurrentToInputWeights =</div><div class="line"><a name="l01404"></a><span class="lineno"> 1404</span>&#160;            MakeTensor&lt;float, 2&gt;(tensorInfo4x3, {-0.2f, -0.3f,  0.4f,</div><div class="line"><a name="l01405"></a><span class="lineno"> 1405</span>&#160;                                                  0.1f, -0.5f,  0.9f,</div><div class="line"><a name="l01406"></a><span class="lineno"> 1406</span>&#160;                                                 -0.2f, -0.3f, -0.7f,</div><div class="line"><a name="l01407"></a><span class="lineno"> 1407</span>&#160;                                                 0.05f, -0.2f, -0.6f});  <span class="comment">//{numUnits, outputSize}</span></div><div class="line"><a name="l01408"></a><span class="lineno"> 1408</span>&#160;</div><div class="line"><a name="l01409"></a><span class="lineno"> 1409</span>&#160;    <span class="keyword">auto</span> recurrentToCellWeights =</div><div class="line"><a name="l01410"></a><span class="lineno"> 1410</span>&#160;            MakeTensor&lt;float, 2&gt;(tensorInfo4x3, {-0.3f,  0.2f,   0.1f,</div><div class="line"><a name="l01411"></a><span class="lineno"> 1411</span>&#160;                                                 -0.3f,  0.8f, -0.08f,</div><div class="line"><a name="l01412"></a><span class="lineno"> 1412</span>&#160;                                                 -0.2f,  0.3f,   0.8f,</div><div class="line"><a name="l01413"></a><span class="lineno"> 1413</span>&#160;                                                 -0.6f, -0.1f,   0.2f}); <span class="comment">//{numUnits, outputSize}</span></div><div class="line"><a name="l01414"></a><span class="lineno"> 1414</span>&#160;</div><div class="line"><a name="l01415"></a><span class="lineno"> 1415</span>&#160;    <span class="keyword">auto</span> recurrentToForgetWeights =</div><div class="line"><a name="l01416"></a><span class="lineno"> 1416</span>&#160;            MakeTensor&lt;float, 2&gt;(tensorInfo4x3, {-0.5f, -0.3f, -0.5f,</div><div class="line"><a name="l01417"></a><span class="lineno"> 1417</span>&#160;                                                 -0.2f,  0.6f,  0.4f,</div><div class="line"><a name="l01418"></a><span class="lineno"> 1418</span>&#160;                                                  0.9f,  0.3f, -0.1f,</div><div class="line"><a name="l01419"></a><span class="lineno"> 1419</span>&#160;                                                  0.2f,  0.5f,  0.2f});  <span class="comment">//{numUnits, outputSize}</span></div><div class="line"><a name="l01420"></a><span class="lineno"> 1420</span>&#160;</div><div class="line"><a name="l01421"></a><span class="lineno"> 1421</span>&#160;    <span class="keyword">auto</span> recurrentToOutputWeights =</div><div class="line"><a name="l01422"></a><span class="lineno"> 1422</span>&#160;            MakeTensor&lt;float, 2&gt;(tensorInfo4x3, { 0.3f, -0.1f,  0.1f,</div><div class="line"><a name="l01423"></a><span class="lineno"> 1423</span>&#160;                                                 -0.2f, -0.5f, -0.7f,</div><div class="line"><a name="l01424"></a><span class="lineno"> 1424</span>&#160;                                                 -0.2f, -0.6f, -0.1f,</div><div class="line"><a name="l01425"></a><span class="lineno"> 1425</span>&#160;                                                 -0.4f, -0.7f, -0.2f});  <span class="comment">//{numUnits, outputSize}</span></div><div class="line"><a name="l01426"></a><span class="lineno"> 1426</span>&#160;</div><div class="line"><a name="l01427"></a><span class="lineno"> 1427</span>&#160;    <span class="keyword">auto</span> cellToInputWeights =</div><div class="line"><a name="l01428"></a><span class="lineno"> 1428</span>&#160;            MakeTensor&lt;float, 1&gt;(tensorInfo4, {0.05f, 0.1f, 0.25f, 0.15f});      <span class="comment">//{numUnits}</span></div><div class="line"><a name="l01429"></a><span class="lineno"> 1429</span>&#160;</div><div class="line"><a name="l01430"></a><span class="lineno"> 1430</span>&#160;    <span class="keyword">auto</span> cellToForgetWeights =</div><div class="line"><a name="l01431"></a><span class="lineno"> 1431</span>&#160;            MakeTensor&lt;float, 1&gt;(tensorInfo4, {-0.02f, -0.15f, -0.25f, -0.03f}); <span class="comment">//{numUnits}</span></div><div class="line"><a name="l01432"></a><span class="lineno"> 1432</span>&#160;</div><div class="line"><a name="l01433"></a><span class="lineno"> 1433</span>&#160;    <span class="keyword">auto</span> cellToOutputWeights =</div><div class="line"><a name="l01434"></a><span class="lineno"> 1434</span>&#160;            MakeTensor&lt;float, 1&gt;(tensorInfo4, {0.1f, -0.1f, -0.5f, 0.05f});      <span class="comment">//{numUnits}</span></div><div class="line"><a name="l01435"></a><span class="lineno"> 1435</span>&#160;</div><div class="line"><a name="l01436"></a><span class="lineno"> 1436</span>&#160;    <span class="keyword">auto</span> projectionWeights =</div><div class="line"><a name="l01437"></a><span class="lineno"> 1437</span>&#160;            MakeTensor&lt;float, 2&gt;(tensorInfo3x4,</div><div class="line"><a name="l01438"></a><span class="lineno"> 1438</span>&#160;                                 {-0.1f, 0.2f, 0.01f, -0.2f,</div><div class="line"><a name="l01439"></a><span class="lineno"> 1439</span>&#160;                                   0.1f, 0.5f,  0.3f, 0.08f,</div><div class="line"><a name="l01440"></a><span class="lineno"> 1440</span>&#160;                                  0.07f, 0.2f, -0.4f,  0.2f}); <span class="comment">//{outputSize, numUnits}</span></div><div class="line"><a name="l01441"></a><span class="lineno"> 1441</span>&#160;</div><div class="line"><a name="l01442"></a><span class="lineno"> 1442</span>&#160;    std::vector&lt;float&gt; projectionBiasVector(outputSize, 0.f);</div><div class="line"><a name="l01443"></a><span class="lineno"> 1443</span>&#160;    <span class="keyword">auto</span> projectionBias = MakeTensor&lt;float,1&gt;(tensorInfo3, projectionBiasVector); <span class="comment">//{outputSize}</span></div><div class="line"><a name="l01444"></a><span class="lineno"> 1444</span>&#160;</div><div class="line"><a name="l01445"></a><span class="lineno"> 1445</span>&#160;    <span class="keyword">auto</span> inputLayerNormWeights =</div><div class="line"><a name="l01446"></a><span class="lineno"> 1446</span>&#160;            MakeTensor&lt;float, 1&gt;(tensorInfo4, {0.1f, 0.2f, 0.3f, 0.5f}); <span class="comment">//{numUnits}</span></div><div class="line"><a name="l01447"></a><span class="lineno"> 1447</span>&#160;</div><div class="line"><a name="l01448"></a><span class="lineno"> 1448</span>&#160;    <span class="keyword">auto</span> forgetLayerNormWeights =</div><div class="line"><a name="l01449"></a><span class="lineno"> 1449</span>&#160;            MakeTensor&lt;float, 1&gt;(tensorInfo4, {0.2f, 0.2f, 0.4f, 0.3f}); <span class="comment">//{numUnits}</span></div><div class="line"><a name="l01450"></a><span class="lineno"> 1450</span>&#160;</div><div class="line"><a name="l01451"></a><span class="lineno"> 1451</span>&#160;    <span class="keyword">auto</span> cellLayerNormWeights =</div><div class="line"><a name="l01452"></a><span class="lineno"> 1452</span>&#160;            MakeTensor&lt;float, 1&gt;(tensorInfo4, {0.7f, 0.2f, 0.3f, 0.8f}); <span class="comment">//{numUnits}</span></div><div class="line"><a name="l01453"></a><span class="lineno"> 1453</span>&#160;</div><div class="line"><a name="l01454"></a><span class="lineno"> 1454</span>&#160;    <span class="keyword">auto</span> outputLayerNormWeights =</div><div class="line"><a name="l01455"></a><span class="lineno"> 1455</span>&#160;            MakeTensor&lt;float, 1&gt;(tensorInfo4, {0.6f, 0.2f, 0.2f, 0.5f}); <span class="comment">//{numUnits}</span></div><div class="line"><a name="l01456"></a><span class="lineno"> 1456</span>&#160;</div><div class="line"><a name="l01457"></a><span class="lineno"> 1457</span>&#160;</div><div class="line"><a name="l01458"></a><span class="lineno"> 1458</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputToInputWeightsTensor(tensorInfo4x5);</div><div class="line"><a name="l01459"></a><span class="lineno"> 1459</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputToForgetWeightsTensor(tensorInfo4x5);</div><div class="line"><a name="l01460"></a><span class="lineno"> 1460</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputToCellWeightsTensor(tensorInfo4x5);</div><div class="line"><a name="l01461"></a><span class="lineno"> 1461</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputToOutputWeightsTensor(tensorInfo4x5);</div><div class="line"><a name="l01462"></a><span class="lineno"> 1462</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> recurrentToForgetWeightsTensor(tensorInfo4x3);</div><div class="line"><a name="l01463"></a><span class="lineno"> 1463</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> recurrentToInputWeightsTensor(tensorInfo4x3);</div><div class="line"><a name="l01464"></a><span class="lineno"> 1464</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> recurrentToCellWeightsTensor(tensorInfo4x3);</div><div class="line"><a name="l01465"></a><span class="lineno"> 1465</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> recurrentToOutputWeightsTensor(tensorInfo4x3);</div><div class="line"><a name="l01466"></a><span class="lineno"> 1466</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> cellToInputWeightsTensor(tensorInfo4);</div><div class="line"><a name="l01467"></a><span class="lineno"> 1467</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputGateBiasTensor(tensorInfo4);</div><div class="line"><a name="l01468"></a><span class="lineno"> 1468</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> forgetGateBiasTensor(tensorInfo4);</div><div class="line"><a name="l01469"></a><span class="lineno"> 1469</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> cellBiasTensor(tensorInfo4);</div><div class="line"><a name="l01470"></a><span class="lineno"> 1470</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> outputGateBiasTensor(tensorInfo4);</div><div class="line"><a name="l01471"></a><span class="lineno"> 1471</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> cellToForgetWeightsTensor(tensorInfo4);</div><div class="line"><a name="l01472"></a><span class="lineno"> 1472</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> cellToOutputWeightsTensor(tensorInfo4);</div><div class="line"><a name="l01473"></a><span class="lineno"> 1473</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> projectionWeightsTensor(tensorInfo3x4);</div><div class="line"><a name="l01474"></a><span class="lineno"> 1474</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> projectionBiasTensor(tensorInfo3);</div><div class="line"><a name="l01475"></a><span class="lineno"> 1475</span>&#160;</div><div class="line"><a name="l01476"></a><span class="lineno"> 1476</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputLayerNormWeightsTensor(tensorInfo4);</div><div class="line"><a name="l01477"></a><span class="lineno"> 1477</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> forgetLayerNormWeightsTensor(tensorInfo4);</div><div class="line"><a name="l01478"></a><span class="lineno"> 1478</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> cellLayerNormWeightsTensor(tensorInfo4);</div><div class="line"><a name="l01479"></a><span class="lineno"> 1479</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> outputLayerNormWeightsTensor(tensorInfo4);</div><div class="line"><a name="l01480"></a><span class="lineno"> 1480</span>&#160;</div><div class="line"><a name="l01481"></a><span class="lineno"> 1481</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputToInputWeightsTensor, &amp;inputToInputWeights[0][0]);</div><div class="line"><a name="l01482"></a><span class="lineno"> 1482</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputToForgetWeightsTensor, &amp;inputToForgetWeights[0][0]);</div><div class="line"><a name="l01483"></a><span class="lineno"> 1483</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputToCellWeightsTensor, &amp;inputToCellWeights[0][0]);</div><div class="line"><a name="l01484"></a><span class="lineno"> 1484</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputToOutputWeightsTensor, &amp;inputToOutputWeights[0][0]);</div><div class="line"><a name="l01485"></a><span class="lineno"> 1485</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;recurrentToInputWeightsTensor, &amp;recurrentToInputWeights[0][0]);</div><div class="line"><a name="l01486"></a><span class="lineno"> 1486</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;recurrentToForgetWeightsTensor, &amp;recurrentToForgetWeights[0][0]);</div><div class="line"><a name="l01487"></a><span class="lineno"> 1487</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;recurrentToCellWeightsTensor, &amp;recurrentToCellWeights[0][0]);</div><div class="line"><a name="l01488"></a><span class="lineno"> 1488</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;recurrentToOutputWeightsTensor, &amp;recurrentToOutputWeights[0][0]);</div><div class="line"><a name="l01489"></a><span class="lineno"> 1489</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;cellToInputWeightsTensor, &amp;cellToInputWeights[0]);</div><div class="line"><a name="l01490"></a><span class="lineno"> 1490</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputGateBiasTensor, &amp;inputGateBias[0]);</div><div class="line"><a name="l01491"></a><span class="lineno"> 1491</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;forgetGateBiasTensor, &amp;forgetGateBias[0]);</div><div class="line"><a name="l01492"></a><span class="lineno"> 1492</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;cellBiasTensor, &amp;cellBias[0]);</div><div class="line"><a name="l01493"></a><span class="lineno"> 1493</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;outputGateBiasTensor, &amp;outputGateBias[0]);</div><div class="line"><a name="l01494"></a><span class="lineno"> 1494</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;cellToForgetWeightsTensor, &amp;cellToForgetWeights[0]);</div><div class="line"><a name="l01495"></a><span class="lineno"> 1495</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;cellToOutputWeightsTensor, &amp;cellToOutputWeights[0]);</div><div class="line"><a name="l01496"></a><span class="lineno"> 1496</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;projectionWeightsTensor, &amp;projectionWeights[0][0]);</div><div class="line"><a name="l01497"></a><span class="lineno"> 1497</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;projectionBiasTensor, &amp;projectionBias[0]);</div><div class="line"><a name="l01498"></a><span class="lineno"> 1498</span>&#160;</div><div class="line"><a name="l01499"></a><span class="lineno"> 1499</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputLayerNormWeightsTensor, &amp;inputLayerNormWeights[0]);</div><div class="line"><a name="l01500"></a><span class="lineno"> 1500</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;forgetLayerNormWeightsTensor, &amp;forgetLayerNormWeights[0]);</div><div class="line"><a name="l01501"></a><span class="lineno"> 1501</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;cellLayerNormWeightsTensor, &amp;cellLayerNormWeights[0]);</div><div class="line"><a name="l01502"></a><span class="lineno"> 1502</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;outputLayerNormWeightsTensor, &amp;outputLayerNormWeights[0]);</div><div class="line"><a name="l01503"></a><span class="lineno"> 1503</span>&#160;</div><div class="line"><a name="l01504"></a><span class="lineno"> 1504</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a08a1932be591c315a512a877d38b22df">m_InputToInputWeights</a> = &amp;inputToInputWeightsTensor;</div><div class="line"><a name="l01505"></a><span class="lineno"> 1505</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a3ea82566d98c5a657c76c3d851c47848">m_InputToForgetWeights</a> = &amp;inputToForgetWeightsTensor;</div><div class="line"><a name="l01506"></a><span class="lineno"> 1506</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a28ad98d17603fd8b12e046f8ece58970">m_InputToCellWeights</a> = &amp;inputToCellWeightsTensor;</div><div class="line"><a name="l01507"></a><span class="lineno"> 1507</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a83dc9086b2e4a4e4cadb66bd874df798">m_InputToOutputWeights</a> = &amp;inputToOutputWeightsTensor;</div><div class="line"><a name="l01508"></a><span class="lineno"> 1508</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a98d377149071d8842d610cc0734d1cfe">m_RecurrentToInputWeights</a> = &amp;recurrentToInputWeightsTensor;</div><div class="line"><a name="l01509"></a><span class="lineno"> 1509</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a45d73e66cbb2b65049e4016c20657ccf">m_RecurrentToForgetWeights</a> = &amp;recurrentToForgetWeightsTensor;</div><div class="line"><a name="l01510"></a><span class="lineno"> 1510</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#aea142bd50ffb93631c2e08324ec92a1e">m_RecurrentToCellWeights</a> = &amp;recurrentToCellWeightsTensor;</div><div class="line"><a name="l01511"></a><span class="lineno"> 1511</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#adebc1771e5a1f4b113a7aa594ea74d2c">m_RecurrentToOutputWeights</a> = &amp;recurrentToOutputWeightsTensor;</div><div class="line"><a name="l01512"></a><span class="lineno"> 1512</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a5c1c0a7ead7273788976c9e97cffaab7">m_CellToInputWeights</a> = &amp;cellToInputWeightsTensor;</div><div class="line"><a name="l01513"></a><span class="lineno"> 1513</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#acb3aade8fae984f7293e222dcbe66030">m_InputGateBias</a> = &amp;inputGateBiasTensor;</div><div class="line"><a name="l01514"></a><span class="lineno"> 1514</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#aba3ffe91d818266b8785ce971548eb59">m_ForgetGateBias</a> = &amp;forgetGateBiasTensor;</div><div class="line"><a name="l01515"></a><span class="lineno"> 1515</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a75980b5795efd899a0c678a06a900c6d">m_CellBias</a> = &amp;cellBiasTensor;</div><div class="line"><a name="l01516"></a><span class="lineno"> 1516</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a332551528a4b3534c2d6c89ce816fcd9">m_OutputGateBias</a> = &amp;outputGateBiasTensor;</div><div class="line"><a name="l01517"></a><span class="lineno"> 1517</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#acefa49d7faf26933e27e473e7bdb4175">m_CellToForgetWeights</a> = &amp;cellToForgetWeightsTensor;</div><div class="line"><a name="l01518"></a><span class="lineno"> 1518</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a6f74071b0e07bbe2cb20a8f78826e084">m_CellToOutputWeights</a> = &amp;cellToOutputWeightsTensor;</div><div class="line"><a name="l01519"></a><span class="lineno"> 1519</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#af3c52626a6f05597d82ed095d0765962">m_ProjectionWeights</a> = &amp;projectionWeightsTensor;</div><div class="line"><a name="l01520"></a><span class="lineno"> 1520</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a2ba352eb1fdf6dc5ecf7f2e6b6b48f94">m_ProjectionBias</a> = &amp;projectionBiasTensor;</div><div class="line"><a name="l01521"></a><span class="lineno"> 1521</span>&#160;</div><div class="line"><a name="l01522"></a><span class="lineno"> 1522</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a9cc28aa4fff6ba9a8abdb340c1abdd57">m_InputLayerNormWeights</a> = &amp;inputLayerNormWeightsTensor;</div><div class="line"><a name="l01523"></a><span class="lineno"> 1523</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a453a4af385d0c060c9aac990fceaa1ef">m_ForgetLayerNormWeights</a> = &amp;forgetLayerNormWeightsTensor;</div><div class="line"><a name="l01524"></a><span class="lineno"> 1524</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a518f0195d0278a892b49649b8860d17f">m_CellLayerNormWeights</a> = &amp;cellLayerNormWeightsTensor;</div><div class="line"><a name="l01525"></a><span class="lineno"> 1525</span>&#160;    data.<a class="code" href="structarmnn_1_1_lstm_queue_descriptor.xhtml#aa3f07e27230d6d99adc2c82ba681df2b">m_OutputLayerNormWeights</a> = &amp;outputLayerNormWeightsTensor;</div><div class="line"><a name="l01526"></a><span class="lineno"> 1526</span>&#160;</div><div class="line"><a name="l01527"></a><span class="lineno"> 1527</span>&#160;    <span class="comment">// Flags to set test configuration</span></div><div class="line"><a name="l01528"></a><span class="lineno"> 1528</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_lstm_descriptor.xhtml#ae1b07ed928036004bd257169e5aeeef4">m_ActivationFunc</a> = 4;</div><div class="line"><a name="l01529"></a><span class="lineno"> 1529</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_lstm_descriptor.xhtml#ad474e5c51a0b194ef32e812b86c0cbdb">m_CifgEnabled</a> = <span class="keyword">false</span>;</div><div class="line"><a name="l01530"></a><span class="lineno"> 1530</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_lstm_descriptor.xhtml#a2837b4396f20c956952d1a7286cab5f8">m_PeepholeEnabled</a> = <span class="keyword">true</span>;</div><div class="line"><a name="l01531"></a><span class="lineno"> 1531</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_lstm_descriptor.xhtml#a6c9de81fc65b3c4924cab11907075a17">m_ProjectionEnabled</a> = <span class="keyword">true</span>;</div><div class="line"><a name="l01532"></a><span class="lineno"> 1532</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_lstm_descriptor.xhtml#a4a8ec49f130084445d44297549254780">m_LayerNormEnabled</a> = <span class="keyword">true</span>;</div><div class="line"><a name="l01533"></a><span class="lineno"> 1533</span>&#160;</div><div class="line"><a name="l01534"></a><span class="lineno"> 1534</span>&#160;</div><div class="line"><a name="l01535"></a><span class="lineno"> 1535</span>&#160;    std::unique_ptr&lt;armnn::IWorkload&gt; workload = workloadFactory.<a class="code" href="classarmnn_1_1_i_workload_factory.xhtml#ab6bd7aaf685d4e956d780f8655a6f174">CreateLstm</a>(data, info);</div><div class="line"><a name="l01536"></a><span class="lineno"> 1536</span>&#160;    inputHandle-&gt;Allocate();</div><div class="line"><a name="l01537"></a><span class="lineno"> 1537</span>&#160;    outputStateInHandle-&gt;Allocate();</div><div class="line"><a name="l01538"></a><span class="lineno"> 1538</span>&#160;    cellStateInHandle-&gt;Allocate();</div><div class="line"><a name="l01539"></a><span class="lineno"> 1539</span>&#160;</div><div class="line"><a name="l01540"></a><span class="lineno"> 1540</span>&#160;    scratchHandle-&gt;Allocate();</div><div class="line"><a name="l01541"></a><span class="lineno"> 1541</span>&#160;    outputStateOutHandle-&gt;Allocate();</div><div class="line"><a name="l01542"></a><span class="lineno"> 1542</span>&#160;    cellStateOutHandle-&gt;Allocate();</div><div class="line"><a name="l01543"></a><span class="lineno"> 1543</span>&#160;    outputHandle-&gt;Allocate();</div><div class="line"><a name="l01544"></a><span class="lineno"> 1544</span>&#160;</div><div class="line"><a name="l01545"></a><span class="lineno"> 1545</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#ae15f1a3c55d2db87683577de9fa4437c">CopyDataToITensorHandle</a>(inputHandle.get(), &amp;inputTensor[0][0]);</div><div class="line"><a name="l01546"></a><span class="lineno"> 1546</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#ae15f1a3c55d2db87683577de9fa4437c">CopyDataToITensorHandle</a>(outputStateInHandle.get(), &amp;outputStateInTensor[0][0]);</div><div class="line"><a name="l01547"></a><span class="lineno"> 1547</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#ae15f1a3c55d2db87683577de9fa4437c">CopyDataToITensorHandle</a>(cellStateInHandle.get(), &amp;cellStateInTensor[0][0]);</div><div class="line"><a name="l01548"></a><span class="lineno"> 1548</span>&#160;</div><div class="line"><a name="l01549"></a><span class="lineno"> 1549</span>&#160;    workload-&gt;Execute();</div><div class="line"><a name="l01550"></a><span class="lineno"> 1550</span>&#160;</div><div class="line"><a name="l01551"></a><span class="lineno"> 1551</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#a99b626c58a926dc7d6df78d22ec186c8">CopyDataFromITensorHandle</a>(&amp;ret.output[0][0], outputHandle.get());</div><div class="line"><a name="l01552"></a><span class="lineno"> 1552</span>&#160;</div><div class="line"><a name="l01553"></a><span class="lineno"> 1553</span>&#160;    <span class="keywordflow">return</span> ret;</div><div class="line"><a name="l01554"></a><span class="lineno"> 1554</span>&#160;}</div><div class="line"><a name="l01555"></a><span class="lineno"> 1555</span>&#160;</div><div class="line"><a name="l01556"></a><span class="lineno"> 1556</span>&#160;<a class="code" href="struct_layer_test_result.xhtml">LayerTestResult&lt;uint8_t, 2&gt;</a> QuantizedLstmTestImpl(</div><div class="line"><a name="l01557"></a><span class="lineno"> 1557</span>&#160;    <a class="code" href="classarmnn_1_1_i_workload_factory.xhtml">armnn::IWorkloadFactory</a>&amp; workloadFactory,</div><div class="line"><a name="l01558"></a><span class="lineno"> 1558</span>&#160;    <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_backend_internal.xhtml#a693b40e6b94e958836aeb0410ca186bd">armnn::IBackendInternal::IMemoryManagerSharedPtr</a>&amp; memoryManager,</div><div class="line"><a name="l01559"></a><span class="lineno"> 1559</span>&#160;    <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml">armnn::ITensorHandleFactory</a>&amp; tensorHandleFactory,</div><div class="line"><a name="l01560"></a><span class="lineno"> 1560</span>&#160;    <span class="keyword">const</span> boost::multi_array&lt;uint8_t, 2&gt;&amp; input,</div><div class="line"><a name="l01561"></a><span class="lineno"> 1561</span>&#160;    <span class="keyword">const</span> boost::multi_array&lt;uint8_t, 2&gt;&amp; outputExpected)</div><div class="line"><a name="l01562"></a><span class="lineno"> 1562</span>&#160;{</div><div class="line"><a name="l01563"></a><span class="lineno"> 1563</span>&#160;    <a class="code" href="namespacearmnn.xhtml#a44affeeb090c3c6a3062830562672e84">IgnoreUnused</a>(memoryManager);</div><div class="line"><a name="l01564"></a><span class="lineno"> 1564</span>&#160;    <span class="keyword">auto</span> numBatches = <a class="code" href="namespacearmnn.xhtml#a375ca3cff9f1b005d1412dc5f3cf5b6e">armnn::numeric_cast</a>&lt;<span class="keywordtype">unsigned</span> <span class="keywordtype">int</span>&gt;(input.shape()[0]);</div><div class="line"><a name="l01565"></a><span class="lineno"> 1565</span>&#160;    <span class="keyword">auto</span> inputSize = <a class="code" href="namespacearmnn.xhtml#a375ca3cff9f1b005d1412dc5f3cf5b6e">armnn::numeric_cast</a>&lt;<span class="keywordtype">unsigned</span> <span class="keywordtype">int</span>&gt;(input.shape()[1]);</div><div class="line"><a name="l01566"></a><span class="lineno"> 1566</span>&#160;    <span class="keyword">auto</span> outputSize = <a class="code" href="namespacearmnn.xhtml#a375ca3cff9f1b005d1412dc5f3cf5b6e">armnn::numeric_cast</a>&lt;<span class="keywordtype">unsigned</span> <span class="keywordtype">int</span>&gt;(outputExpected.shape()[1]);</div><div class="line"><a name="l01567"></a><span class="lineno"> 1567</span>&#160;</div><div class="line"><a name="l01568"></a><span class="lineno"> 1568</span>&#160;    <span class="comment">// Scale/Offset for input/output, cellState In/Out, weights, bias</span></div><div class="line"><a name="l01569"></a><span class="lineno"> 1569</span>&#160;    <span class="keywordtype">float</span> inputOutputScale = 0.0078125f;</div><div class="line"><a name="l01570"></a><span class="lineno"> 1570</span>&#160;    int32_t inputOutputOffset = 128;</div><div class="line"><a name="l01571"></a><span class="lineno"> 1571</span>&#160;</div><div class="line"><a name="l01572"></a><span class="lineno"> 1572</span>&#160;    <span class="keywordtype">float</span> cellStateScale = 0.00048828125f;</div><div class="line"><a name="l01573"></a><span class="lineno"> 1573</span>&#160;    int32_t cellStateOffset = 0;</div><div class="line"><a name="l01574"></a><span class="lineno"> 1574</span>&#160;</div><div class="line"><a name="l01575"></a><span class="lineno"> 1575</span>&#160;    <span class="keywordtype">float</span> weightsScale = 0.00408021f;</div><div class="line"><a name="l01576"></a><span class="lineno"> 1576</span>&#160;    int32_t weightsOffset = 100;</div><div class="line"><a name="l01577"></a><span class="lineno"> 1577</span>&#160;</div><div class="line"><a name="l01578"></a><span class="lineno"> 1578</span>&#160;    <span class="keywordtype">float</span> biasScale = 3.1876640625e-05f;</div><div class="line"><a name="l01579"></a><span class="lineno"> 1579</span>&#160;    int32_t biasOffset = 0;</div><div class="line"><a name="l01580"></a><span class="lineno"> 1580</span>&#160;</div><div class="line"><a name="l01581"></a><span class="lineno"> 1581</span>&#160;    <span class="comment">// Input/Output tensor info</span></div><div class="line"><a name="l01582"></a><span class="lineno"> 1582</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> inputInfo({numBatches , inputSize},</div><div class="line"><a name="l01583"></a><span class="lineno"> 1583</span>&#160;                                 <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a0a3f57c876f5a230244c38e1453a8a6e">armnn::DataType::QAsymmU8</a>,</div><div class="line"><a name="l01584"></a><span class="lineno"> 1584</span>&#160;                                 inputOutputScale,</div><div class="line"><a name="l01585"></a><span class="lineno"> 1585</span>&#160;                                 inputOutputOffset);</div><div class="line"><a name="l01586"></a><span class="lineno"> 1586</span>&#160;</div><div class="line"><a name="l01587"></a><span class="lineno"> 1587</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> cellStateInfo({numBatches , outputSize},</div><div class="line"><a name="l01588"></a><span class="lineno"> 1588</span>&#160;                                     <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a053c769dcf82d66ef326c86980c02ba7">armnn::DataType::QSymmS16</a>,</div><div class="line"><a name="l01589"></a><span class="lineno"> 1589</span>&#160;                                     cellStateScale,</div><div class="line"><a name="l01590"></a><span class="lineno"> 1590</span>&#160;                                     cellStateOffset);</div><div class="line"><a name="l01591"></a><span class="lineno"> 1591</span>&#160;</div><div class="line"><a name="l01592"></a><span class="lineno"> 1592</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> outputStateInfo({numBatches , outputSize},</div><div class="line"><a name="l01593"></a><span class="lineno"> 1593</span>&#160;                                       <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a0a3f57c876f5a230244c38e1453a8a6e">armnn::DataType::QAsymmU8</a>,</div><div class="line"><a name="l01594"></a><span class="lineno"> 1594</span>&#160;                                       inputOutputScale,</div><div class="line"><a name="l01595"></a><span class="lineno"> 1595</span>&#160;                                       inputOutputOffset);</div><div class="line"><a name="l01596"></a><span class="lineno"> 1596</span>&#160;</div><div class="line"><a name="l01597"></a><span class="lineno"> 1597</span>&#160;    <a class="code" href="struct_layer_test_result.xhtml">LayerTestResult&lt;uint8_t, 2&gt;</a> ret(outputStateInfo);</div><div class="line"><a name="l01598"></a><span class="lineno"> 1598</span>&#160;</div><div class="line"><a name="l01599"></a><span class="lineno"> 1599</span>&#160;    <span class="comment">// Input0</span></div><div class="line"><a name="l01600"></a><span class="lineno"> 1600</span>&#160;    std::vector&lt;uint8_t&gt; inputVector;</div><div class="line"><a name="l01601"></a><span class="lineno"> 1601</span>&#160;    inputVector.assign(input.data(), input.data() + (numBatches * inputSize));</div><div class="line"><a name="l01602"></a><span class="lineno"> 1602</span>&#160;    <span class="keyword">auto</span> inputTensor = MakeTensor&lt;uint8_t, 2&gt;(inputInfo, inputVector);</div><div class="line"><a name="l01603"></a><span class="lineno"> 1603</span>&#160;</div><div class="line"><a name="l01604"></a><span class="lineno"> 1604</span>&#160;    <span class="comment">// Input1</span></div><div class="line"><a name="l01605"></a><span class="lineno"> 1605</span>&#160;    std::vector&lt;int16_t&gt; cellStateInVector   = {876, 1034, 955, -909, 761, 1029, 796, -1036}; <span class="comment">// 13</span></div><div class="line"><a name="l01606"></a><span class="lineno"> 1606</span>&#160;    <span class="keyword">auto</span> cellStateInTensor   = MakeTensor&lt;int16_t, 2&gt;(cellStateInfo, cellStateInVector);</div><div class="line"><a name="l01607"></a><span class="lineno"> 1607</span>&#160;</div><div class="line"><a name="l01608"></a><span class="lineno"> 1608</span>&#160;    <span class="comment">// Input2</span></div><div class="line"><a name="l01609"></a><span class="lineno"> 1609</span>&#160;    std::vector&lt;uint8_t&gt; outputStateInVector = {136, 150, 140, 115, 135, 152, 138, 112}; <span class="comment">// 14</span></div><div class="line"><a name="l01610"></a><span class="lineno"> 1610</span>&#160;    <span class="keyword">auto</span> outputStateInTensor = MakeTensor&lt;uint8_t, 2&gt;(outputStateInfo, outputStateInVector);</div><div class="line"><a name="l01611"></a><span class="lineno"> 1611</span>&#160;</div><div class="line"><a name="l01612"></a><span class="lineno"> 1612</span>&#160;    <span class="comment">// Output0</span></div><div class="line"><a name="l01613"></a><span class="lineno"> 1613</span>&#160;    std::vector&lt;int16_t&gt; cellStateOutVector  = {1485, 1177, 1373, -1023, 1019, 1355, 1097, -1235}; <span class="comment">// 0</span></div><div class="line"><a name="l01614"></a><span class="lineno"> 1614</span>&#160;    <span class="keyword">auto</span> cellStateOutTensor  = MakeTensor&lt;int16_t, 2&gt;(cellStateInfo, cellStateOutVector);</div><div class="line"><a name="l01615"></a><span class="lineno"> 1615</span>&#160;</div><div class="line"><a name="l01616"></a><span class="lineno"> 1616</span>&#160;    <span class="comment">// Output1</span></div><div class="line"><a name="l01617"></a><span class="lineno"> 1617</span>&#160;    std::vector&lt;uint8_t&gt; outputVector; <span class="comment">// 1</span></div><div class="line"><a name="l01618"></a><span class="lineno"> 1618</span>&#160;    outputVector.assign(outputExpected.data(), outputExpected.data() + (numBatches * outputSize));</div><div class="line"><a name="l01619"></a><span class="lineno"> 1619</span>&#160;    ret.outputExpected = MakeTensor&lt;uint8_t, 2&gt;(outputStateInfo, outputVector);</div><div class="line"><a name="l01620"></a><span class="lineno"> 1620</span>&#160;</div><div class="line"><a name="l01621"></a><span class="lineno"> 1621</span>&#160;    <span class="comment">// Create tensor handles</span></div><div class="line"><a name="l01622"></a><span class="lineno"> 1622</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; inputHandle = tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(inputInfo);</div><div class="line"><a name="l01623"></a><span class="lineno"> 1623</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; cellStateInHandle =</div><div class="line"><a name="l01624"></a><span class="lineno"> 1624</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(cellStateInfo);</div><div class="line"><a name="l01625"></a><span class="lineno"> 1625</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; outputStateInHandle =</div><div class="line"><a name="l01626"></a><span class="lineno"> 1626</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(outputStateInfo);</div><div class="line"><a name="l01627"></a><span class="lineno"> 1627</span>&#160;</div><div class="line"><a name="l01628"></a><span class="lineno"> 1628</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; cellStateOutHandle =</div><div class="line"><a name="l01629"></a><span class="lineno"> 1629</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(cellStateInfo);</div><div class="line"><a name="l01630"></a><span class="lineno"> 1630</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; outputHandle = tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(outputStateInfo);</div><div class="line"><a name="l01631"></a><span class="lineno"> 1631</span>&#160;</div><div class="line"><a name="l01632"></a><span class="lineno"> 1632</span>&#160;    <a class="code" href="structarmnn_1_1_quantized_lstm_queue_descriptor.xhtml">armnn::QuantizedLstmQueueDescriptor</a> data;</div><div class="line"><a name="l01633"></a><span class="lineno"> 1633</span>&#160;    <a class="code" href="structarmnn_1_1_workload_info.xhtml">armnn::WorkloadInfo</a> info;</div><div class="line"><a name="l01634"></a><span class="lineno"> 1634</span>&#160;</div><div class="line"><a name="l01635"></a><span class="lineno"> 1635</span>&#160;    <span class="comment">// Add inputs and outputs to workload</span></div><div class="line"><a name="l01636"></a><span class="lineno"> 1636</span>&#160;    AddInputToWorkload(data, info, inputInfo, inputHandle.get());</div><div class="line"><a name="l01637"></a><span class="lineno"> 1637</span>&#160;    AddInputToWorkload(data, info, cellStateInfo, cellStateInHandle.get());</div><div class="line"><a name="l01638"></a><span class="lineno"> 1638</span>&#160;    AddInputToWorkload(data, info, outputStateInfo, outputStateInHandle.get());</div><div class="line"><a name="l01639"></a><span class="lineno"> 1639</span>&#160;</div><div class="line"><a name="l01640"></a><span class="lineno"> 1640</span>&#160;    AddOutputToWorkload(data, info, cellStateInfo, cellStateOutHandle.get());</div><div class="line"><a name="l01641"></a><span class="lineno"> 1641</span>&#160;    AddOutputToWorkload(data, info, outputStateInfo, outputHandle.get());</div><div class="line"><a name="l01642"></a><span class="lineno"> 1642</span>&#160;</div><div class="line"><a name="l01643"></a><span class="lineno"> 1643</span>&#160;    <span class="comment">// Weights and bias tensor and quantization info</span></div><div class="line"><a name="l01644"></a><span class="lineno"> 1644</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> inputWeightsInfo({outputSize, inputSize},</div><div class="line"><a name="l01645"></a><span class="lineno"> 1645</span>&#160;                                        <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a0a3f57c876f5a230244c38e1453a8a6e">armnn::DataType::QAsymmU8</a>,</div><div class="line"><a name="l01646"></a><span class="lineno"> 1646</span>&#160;                                        weightsScale,</div><div class="line"><a name="l01647"></a><span class="lineno"> 1647</span>&#160;                                        weightsOffset);</div><div class="line"><a name="l01648"></a><span class="lineno"> 1648</span>&#160;</div><div class="line"><a name="l01649"></a><span class="lineno"> 1649</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> recurrentWeightsInfo({outputSize, outputSize},</div><div class="line"><a name="l01650"></a><span class="lineno"> 1650</span>&#160;                                            <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a0a3f57c876f5a230244c38e1453a8a6e">armnn::DataType::QAsymmU8</a>,</div><div class="line"><a name="l01651"></a><span class="lineno"> 1651</span>&#160;                                            weightsScale,</div><div class="line"><a name="l01652"></a><span class="lineno"> 1652</span>&#160;                                            weightsOffset);</div><div class="line"><a name="l01653"></a><span class="lineno"> 1653</span>&#160;</div><div class="line"><a name="l01654"></a><span class="lineno"> 1654</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> biasInfo({outputSize}, <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6accedffbc6e5308e33d3843e8bdc0dad7">armnn::DataType::Signed32</a>, biasScale, biasOffset);</div><div class="line"><a name="l01655"></a><span class="lineno"> 1655</span>&#160;</div><div class="line"><a name="l01656"></a><span class="lineno"> 1656</span>&#160;    <span class="comment">// Weights and bias tensor data</span></div><div class="line"><a name="l01657"></a><span class="lineno"> 1657</span>&#160;    <span class="keyword">auto</span> inputToInputWeights  = MakeTensor&lt;uint8_t, 2&gt;(inputWeightsInfo, {146, 250, 235, 171, 10, 218, 171, 108});</div><div class="line"><a name="l01658"></a><span class="lineno"> 1658</span>&#160;    <span class="keyword">auto</span> inputToForgetWeights = MakeTensor&lt;uint8_t, 2&gt;(inputWeightsInfo, {24, 50, 132, 179, 158, 110, 3, 169});</div><div class="line"><a name="l01659"></a><span class="lineno"> 1659</span>&#160;    <span class="keyword">auto</span> inputToCellWeights   = MakeTensor&lt;uint8_t, 2&gt;(inputWeightsInfo, {133, 34, 29, 49, 206, 109, 54, 183});</div><div class="line"><a name="l01660"></a><span class="lineno"> 1660</span>&#160;    <span class="keyword">auto</span> inputToOutputWeights = MakeTensor&lt;uint8_t, 2&gt;(inputWeightsInfo, {195, 187, 11, 99, 109, 10, 218, 48});</div><div class="line"><a name="l01661"></a><span class="lineno"> 1661</span>&#160;</div><div class="line"><a name="l01662"></a><span class="lineno"> 1662</span>&#160;    <span class="keyword">auto</span> recurrentToInputWeights  = MakeTensor&lt;uint8_t, 2&gt;(recurrentWeightsInfo,</div><div class="line"><a name="l01663"></a><span class="lineno"> 1663</span>&#160;            {254, 206, 77, 168, 71, 20, 215, 6, 223, 7, 118, 225, 59, 130, 174, 26});</div><div class="line"><a name="l01664"></a><span class="lineno"> 1664</span>&#160;    <span class="keyword">auto</span> recurrentToForgetWeights = MakeTensor&lt;uint8_t, 2&gt;(recurrentWeightsInfo,</div><div class="line"><a name="l01665"></a><span class="lineno"> 1665</span>&#160;            {137, 240, 103, 52, 68, 51, 237, 112, 0, 220, 89, 23, 69, 4, 207, 253});</div><div class="line"><a name="l01666"></a><span class="lineno"> 1666</span>&#160;    <span class="keyword">auto</span> recurrentToCellWeights   = MakeTensor&lt;uint8_t, 2&gt;(recurrentWeightsInfo,</div><div class="line"><a name="l01667"></a><span class="lineno"> 1667</span>&#160;            {172, 60, 205, 65, 14, 0, 140, 168, 240, 223, 133, 56, 142, 64, 246, 216});</div><div class="line"><a name="l01668"></a><span class="lineno"> 1668</span>&#160;    <span class="keyword">auto</span> recurrentToOutputWeights = MakeTensor&lt;uint8_t, 2&gt;(recurrentWeightsInfo,</div><div class="line"><a name="l01669"></a><span class="lineno"> 1669</span>&#160;            {106, 214, 67, 23, 59, 158, 45, 3, 119, 132, 49, 205, 129, 218, 11, 98});</div><div class="line"><a name="l01670"></a><span class="lineno"> 1670</span>&#160;</div><div class="line"><a name="l01671"></a><span class="lineno"> 1671</span>&#160;    <span class="keyword">auto</span> inputGateBias  = MakeTensor&lt;int32_t, 1&gt;(biasInfo, {-7876, 13488, -726, 32839});</div><div class="line"><a name="l01672"></a><span class="lineno"> 1672</span>&#160;    <span class="keyword">auto</span> forgetGateBias = MakeTensor&lt;int32_t, 1&gt;(biasInfo, {9206, -46884, -11693, -38724});</div><div class="line"><a name="l01673"></a><span class="lineno"> 1673</span>&#160;    <span class="keyword">auto</span> cellBias       = MakeTensor&lt;int32_t, 1&gt;(biasInfo, {39481, 48624, 48976, -21419});</div><div class="line"><a name="l01674"></a><span class="lineno"> 1674</span>&#160;    <span class="keyword">auto</span> outputGateBias = MakeTensor&lt;int32_t, 1&gt;(biasInfo, {-58999, -17050, -41852, -40538});</div><div class="line"><a name="l01675"></a><span class="lineno"> 1675</span>&#160;</div><div class="line"><a name="l01676"></a><span class="lineno"> 1676</span>&#160;    <span class="comment">// ScopedCpuTensorHandles</span></div><div class="line"><a name="l01677"></a><span class="lineno"> 1677</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputToInputWeightsTensor(inputWeightsInfo);</div><div class="line"><a name="l01678"></a><span class="lineno"> 1678</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputToForgetWeightsTensor(inputWeightsInfo);</div><div class="line"><a name="l01679"></a><span class="lineno"> 1679</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputToCellWeightsTensor(inputWeightsInfo);</div><div class="line"><a name="l01680"></a><span class="lineno"> 1680</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputToOutputWeightsTensor(inputWeightsInfo);</div><div class="line"><a name="l01681"></a><span class="lineno"> 1681</span>&#160;</div><div class="line"><a name="l01682"></a><span class="lineno"> 1682</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> recurrentToInputWeightsTensor(recurrentWeightsInfo);</div><div class="line"><a name="l01683"></a><span class="lineno"> 1683</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> recurrentToForgetWeightsTensor(recurrentWeightsInfo);</div><div class="line"><a name="l01684"></a><span class="lineno"> 1684</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> recurrentToCellWeightsTensor(recurrentWeightsInfo);</div><div class="line"><a name="l01685"></a><span class="lineno"> 1685</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> recurrentToOutputWeightsTensor(recurrentWeightsInfo);</div><div class="line"><a name="l01686"></a><span class="lineno"> 1686</span>&#160;</div><div class="line"><a name="l01687"></a><span class="lineno"> 1687</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputGateBiasTensor(biasInfo);</div><div class="line"><a name="l01688"></a><span class="lineno"> 1688</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> forgetGateBiasTensor(biasInfo);</div><div class="line"><a name="l01689"></a><span class="lineno"> 1689</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> cellBiasTensor(biasInfo);</div><div class="line"><a name="l01690"></a><span class="lineno"> 1690</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> outputGateBiasTensor(biasInfo);</div><div class="line"><a name="l01691"></a><span class="lineno"> 1691</span>&#160;</div><div class="line"><a name="l01692"></a><span class="lineno"> 1692</span>&#160;    <span class="comment">// Allocate and copy data</span></div><div class="line"><a name="l01693"></a><span class="lineno"> 1693</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputToInputWeightsTensor, &amp;inputToInputWeights[0][0]);</div><div class="line"><a name="l01694"></a><span class="lineno"> 1694</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputToForgetWeightsTensor, &amp;inputToForgetWeights[0][0]);</div><div class="line"><a name="l01695"></a><span class="lineno"> 1695</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputToCellWeightsTensor, &amp;inputToCellWeights[0][0]);</div><div class="line"><a name="l01696"></a><span class="lineno"> 1696</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputToOutputWeightsTensor, &amp;inputToOutputWeights[0][0]);</div><div class="line"><a name="l01697"></a><span class="lineno"> 1697</span>&#160;</div><div class="line"><a name="l01698"></a><span class="lineno"> 1698</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;recurrentToInputWeightsTensor, &amp;recurrentToInputWeights[0][0]);</div><div class="line"><a name="l01699"></a><span class="lineno"> 1699</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;recurrentToForgetWeightsTensor, &amp;recurrentToForgetWeights[0][0]);</div><div class="line"><a name="l01700"></a><span class="lineno"> 1700</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;recurrentToCellWeightsTensor, &amp;recurrentToCellWeights[0][0]);</div><div class="line"><a name="l01701"></a><span class="lineno"> 1701</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;recurrentToOutputWeightsTensor, &amp;recurrentToOutputWeights[0][0]);</div><div class="line"><a name="l01702"></a><span class="lineno"> 1702</span>&#160;</div><div class="line"><a name="l01703"></a><span class="lineno"> 1703</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputGateBiasTensor, &amp;inputGateBias[0]);</div><div class="line"><a name="l01704"></a><span class="lineno"> 1704</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;forgetGateBiasTensor, &amp;forgetGateBias[0]);</div><div class="line"><a name="l01705"></a><span class="lineno"> 1705</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;cellBiasTensor, &amp;cellBias[0]);</div><div class="line"><a name="l01706"></a><span class="lineno"> 1706</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;outputGateBiasTensor, &amp;outputGateBias[0]);</div><div class="line"><a name="l01707"></a><span class="lineno"> 1707</span>&#160;</div><div class="line"><a name="l01708"></a><span class="lineno"> 1708</span>&#160;    <span class="comment">// Setup queue descriptor</span></div><div class="line"><a name="l01709"></a><span class="lineno"> 1709</span>&#160;    data.<a class="code" href="structarmnn_1_1_quantized_lstm_queue_descriptor.xhtml#a08a1932be591c315a512a877d38b22df">m_InputToInputWeights</a> = &amp;inputToInputWeightsTensor;</div><div class="line"><a name="l01710"></a><span class="lineno"> 1710</span>&#160;    data.<a class="code" href="structarmnn_1_1_quantized_lstm_queue_descriptor.xhtml#a3ea82566d98c5a657c76c3d851c47848">m_InputToForgetWeights</a> = &amp;inputToForgetWeightsTensor;</div><div class="line"><a name="l01711"></a><span class="lineno"> 1711</span>&#160;    data.<a class="code" href="structarmnn_1_1_quantized_lstm_queue_descriptor.xhtml#a28ad98d17603fd8b12e046f8ece58970">m_InputToCellWeights</a> = &amp;inputToCellWeightsTensor;</div><div class="line"><a name="l01712"></a><span class="lineno"> 1712</span>&#160;    data.<a class="code" href="structarmnn_1_1_quantized_lstm_queue_descriptor.xhtml#a83dc9086b2e4a4e4cadb66bd874df798">m_InputToOutputWeights</a> = &amp;inputToOutputWeightsTensor;</div><div class="line"><a name="l01713"></a><span class="lineno"> 1713</span>&#160;</div><div class="line"><a name="l01714"></a><span class="lineno"> 1714</span>&#160;    data.<a class="code" href="structarmnn_1_1_quantized_lstm_queue_descriptor.xhtml#a98d377149071d8842d610cc0734d1cfe">m_RecurrentToInputWeights</a> = &amp;recurrentToInputWeightsTensor;</div><div class="line"><a name="l01715"></a><span class="lineno"> 1715</span>&#160;    data.<a class="code" href="structarmnn_1_1_quantized_lstm_queue_descriptor.xhtml#a45d73e66cbb2b65049e4016c20657ccf">m_RecurrentToForgetWeights</a> = &amp;recurrentToForgetWeightsTensor;</div><div class="line"><a name="l01716"></a><span class="lineno"> 1716</span>&#160;    data.<a class="code" href="structarmnn_1_1_quantized_lstm_queue_descriptor.xhtml#aea142bd50ffb93631c2e08324ec92a1e">m_RecurrentToCellWeights</a> = &amp;recurrentToCellWeightsTensor;</div><div class="line"><a name="l01717"></a><span class="lineno"> 1717</span>&#160;    data.<a class="code" href="structarmnn_1_1_quantized_lstm_queue_descriptor.xhtml#adebc1771e5a1f4b113a7aa594ea74d2c">m_RecurrentToOutputWeights</a> = &amp;recurrentToOutputWeightsTensor;</div><div class="line"><a name="l01718"></a><span class="lineno"> 1718</span>&#160;</div><div class="line"><a name="l01719"></a><span class="lineno"> 1719</span>&#160;    data.<a class="code" href="structarmnn_1_1_quantized_lstm_queue_descriptor.xhtml#acb3aade8fae984f7293e222dcbe66030">m_InputGateBias</a> = &amp;inputGateBiasTensor;</div><div class="line"><a name="l01720"></a><span class="lineno"> 1720</span>&#160;    data.<a class="code" href="structarmnn_1_1_quantized_lstm_queue_descriptor.xhtml#aba3ffe91d818266b8785ce971548eb59">m_ForgetGateBias</a> = &amp;forgetGateBiasTensor;</div><div class="line"><a name="l01721"></a><span class="lineno"> 1721</span>&#160;    data.<a class="code" href="structarmnn_1_1_quantized_lstm_queue_descriptor.xhtml#a75980b5795efd899a0c678a06a900c6d">m_CellBias</a> = &amp;cellBiasTensor;</div><div class="line"><a name="l01722"></a><span class="lineno"> 1722</span>&#160;    data.<a class="code" href="structarmnn_1_1_quantized_lstm_queue_descriptor.xhtml#a332551528a4b3534c2d6c89ce816fcd9">m_OutputGateBias</a> = &amp;outputGateBiasTensor;</div><div class="line"><a name="l01723"></a><span class="lineno"> 1723</span>&#160;</div><div class="line"><a name="l01724"></a><span class="lineno"> 1724</span>&#160;    <span class="comment">// Create workload and allocate tensor handles</span></div><div class="line"><a name="l01725"></a><span class="lineno"> 1725</span>&#160;    std::unique_ptr&lt;armnn::IWorkload&gt; workload = workloadFactory.<a class="code" href="classarmnn_1_1_i_workload_factory.xhtml#ab5ceda49651dcd53fb7eb05658b5a0cb">CreateQuantizedLstm</a>(data, info);</div><div class="line"><a name="l01726"></a><span class="lineno"> 1726</span>&#160;    inputHandle-&gt;Allocate();</div><div class="line"><a name="l01727"></a><span class="lineno"> 1727</span>&#160;    outputStateInHandle-&gt;Allocate();</div><div class="line"><a name="l01728"></a><span class="lineno"> 1728</span>&#160;    cellStateInHandle-&gt;Allocate();</div><div class="line"><a name="l01729"></a><span class="lineno"> 1729</span>&#160;</div><div class="line"><a name="l01730"></a><span class="lineno"> 1730</span>&#160;    cellStateOutHandle-&gt;Allocate();</div><div class="line"><a name="l01731"></a><span class="lineno"> 1731</span>&#160;    outputHandle-&gt;Allocate();</div><div class="line"><a name="l01732"></a><span class="lineno"> 1732</span>&#160;</div><div class="line"><a name="l01733"></a><span class="lineno"> 1733</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#ae15f1a3c55d2db87683577de9fa4437c">CopyDataToITensorHandle</a>(inputHandle.get(), &amp;inputTensor[0][0]);</div><div class="line"><a name="l01734"></a><span class="lineno"> 1734</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#ae15f1a3c55d2db87683577de9fa4437c">CopyDataToITensorHandle</a>(outputStateInHandle.get(), &amp;outputStateInTensor[0][0]);</div><div class="line"><a name="l01735"></a><span class="lineno"> 1735</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#ae15f1a3c55d2db87683577de9fa4437c">CopyDataToITensorHandle</a>(cellStateInHandle.get(), &amp;cellStateInTensor[0][0]);</div><div class="line"><a name="l01736"></a><span class="lineno"> 1736</span>&#160;</div><div class="line"><a name="l01737"></a><span class="lineno"> 1737</span>&#160;    workload-&gt;Execute();</div><div class="line"><a name="l01738"></a><span class="lineno"> 1738</span>&#160;</div><div class="line"><a name="l01739"></a><span class="lineno"> 1739</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#a99b626c58a926dc7d6df78d22ec186c8">CopyDataFromITensorHandle</a>(&amp;ret.output[0][0], outputHandle.get());</div><div class="line"><a name="l01740"></a><span class="lineno"> 1740</span>&#160;</div><div class="line"><a name="l01741"></a><span class="lineno"> 1741</span>&#160;    <span class="keywordflow">return</span> ret;</div><div class="line"><a name="l01742"></a><span class="lineno"> 1742</span>&#160;}</div><div class="line"><a name="l01743"></a><span class="lineno"> 1743</span>&#160;</div><div class="line"><a name="l01744"></a><span class="lineno"> 1744</span>&#160;<span class="comment">// QLSTM: CIFG, LayerNorm</span></div><div class="line"><a name="l01745"></a><span class="lineno"> 1745</span>&#160;<a class="code" href="struct_layer_test_result.xhtml">LayerTestResult&lt;int8_t, 2&gt;</a> QLstmTestImpl(</div><div class="line"><a name="l01746"></a><span class="lineno"> 1746</span>&#160;        <a class="code" href="classarmnn_1_1_i_workload_factory.xhtml">armnn::IWorkloadFactory</a>&amp; workloadFactory,</div><div class="line"><a name="l01747"></a><span class="lineno"> 1747</span>&#160;        <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_backend_internal.xhtml#a693b40e6b94e958836aeb0410ca186bd">armnn::IBackendInternal::IMemoryManagerSharedPtr</a>&amp; memoryManager,</div><div class="line"><a name="l01748"></a><span class="lineno"> 1748</span>&#160;        <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml">armnn::ITensorHandleFactory</a>&amp; tensorHandleFactory,</div><div class="line"><a name="l01749"></a><span class="lineno"> 1749</span>&#160;        <span class="keyword">const</span> boost::multi_array&lt;int8_t, 2&gt;&amp; input,</div><div class="line"><a name="l01750"></a><span class="lineno"> 1750</span>&#160;        <span class="keyword">const</span> boost::multi_array&lt;int8_t, 2&gt;&amp; outputExpected)</div><div class="line"><a name="l01751"></a><span class="lineno"> 1751</span>&#160;{</div><div class="line"><a name="l01752"></a><span class="lineno"> 1752</span>&#160;    <a class="code" href="namespacearmnn.xhtml#a44affeeb090c3c6a3062830562672e84">IgnoreUnused</a>(memoryManager);</div><div class="line"><a name="l01753"></a><span class="lineno"> 1753</span>&#160;    <span class="keywordtype">unsigned</span> <span class="keywordtype">int</span> numBatches = 2;</div><div class="line"><a name="l01754"></a><span class="lineno"> 1754</span>&#160;    <span class="keywordtype">unsigned</span> <span class="keywordtype">int</span> inputSize  = 5;</div><div class="line"><a name="l01755"></a><span class="lineno"> 1755</span>&#160;    <span class="keywordtype">unsigned</span> <span class="keywordtype">int</span> outputSize = 4;</div><div class="line"><a name="l01756"></a><span class="lineno"> 1756</span>&#160;    <span class="keywordtype">unsigned</span> <span class="keywordtype">int</span> numUnits   = 4;</div><div class="line"><a name="l01757"></a><span class="lineno"> 1757</span>&#160;</div><div class="line"><a name="l01758"></a><span class="lineno"> 1758</span>&#160;    <span class="keywordtype">bool</span> cifgEnabled       = <span class="keyword">true</span>;</div><div class="line"><a name="l01759"></a><span class="lineno"> 1759</span>&#160;    <span class="keywordtype">bool</span> peepholeEnabled   = <span class="keyword">false</span>;</div><div class="line"><a name="l01760"></a><span class="lineno"> 1760</span>&#160;    <span class="keywordtype">bool</span> projectionEnabled = <span class="keyword">false</span>;</div><div class="line"><a name="l01761"></a><span class="lineno"> 1761</span>&#160;    <span class="keywordtype">bool</span> layerNormEnabled  = <span class="keyword">true</span>;</div><div class="line"><a name="l01762"></a><span class="lineno"> 1762</span>&#160;</div><div class="line"><a name="l01763"></a><span class="lineno"> 1763</span>&#160;    <span class="comment">// Scale/Offset quantization info</span></div><div class="line"><a name="l01764"></a><span class="lineno"> 1764</span>&#160;    <span class="keywordtype">float</span> inputScale    = 0.0078125f;</div><div class="line"><a name="l01765"></a><span class="lineno"> 1765</span>&#160;    int32_t inputOffset = 0;</div><div class="line"><a name="l01766"></a><span class="lineno"> 1766</span>&#160;</div><div class="line"><a name="l01767"></a><span class="lineno"> 1767</span>&#160;    int32_t hiddenStateZeroPoint = 0;</div><div class="line"><a name="l01768"></a><span class="lineno"> 1768</span>&#160;    <span class="keywordtype">float</span> hiddenStateScale       = 0.007f;</div><div class="line"><a name="l01769"></a><span class="lineno"> 1769</span>&#160;</div><div class="line"><a name="l01770"></a><span class="lineno"> 1770</span>&#160;    <span class="comment">// if (!projectionEnabled) outputScale == hiddenStateScale</span></div><div class="line"><a name="l01771"></a><span class="lineno"> 1771</span>&#160;    <span class="keywordtype">float</span> outputScale    = hiddenStateScale;</div><div class="line"><a name="l01772"></a><span class="lineno"> 1772</span>&#160;    int32_t outputOffset = hiddenStateZeroPoint;</div><div class="line"><a name="l01773"></a><span class="lineno"> 1773</span>&#160;</div><div class="line"><a name="l01774"></a><span class="lineno"> 1774</span>&#160;    <span class="keywordtype">float</span> cellStateScale    = 3.05176e-05f;</div><div class="line"><a name="l01775"></a><span class="lineno"> 1775</span>&#160;    int32_t cellStateOffset = 0;</div><div class="line"><a name="l01776"></a><span class="lineno"> 1776</span>&#160;</div><div class="line"><a name="l01777"></a><span class="lineno"> 1777</span>&#160;    <span class="keywordtype">float</span> weightsScale    = 0.00784314f;</div><div class="line"><a name="l01778"></a><span class="lineno"> 1778</span>&#160;    int32_t weightsOffset = 0;</div><div class="line"><a name="l01779"></a><span class="lineno"> 1779</span>&#160;</div><div class="line"><a name="l01780"></a><span class="lineno"> 1780</span>&#160;    <span class="keywordtype">float</span> layerNormScale    = 3.05182e-05f;</div><div class="line"><a name="l01781"></a><span class="lineno"> 1781</span>&#160;    int32_t layerNormOffset = 0;</div><div class="line"><a name="l01782"></a><span class="lineno"> 1782</span>&#160;</div><div class="line"><a name="l01783"></a><span class="lineno"> 1783</span>&#160;    <span class="keywordtype">float</span> biasScale    = layerNormScale / 1024;</div><div class="line"><a name="l01784"></a><span class="lineno"> 1784</span>&#160;    int32_t biasOffset = 0;</div><div class="line"><a name="l01785"></a><span class="lineno"> 1785</span>&#160;</div><div class="line"><a name="l01786"></a><span class="lineno"> 1786</span>&#160;    <span class="keywordtype">float</span> inputIntermediateScale  = 0.007059f;</div><div class="line"><a name="l01787"></a><span class="lineno"> 1787</span>&#160;    <span class="keywordtype">float</span> forgetIntermediateScale = 0.007812f;</div><div class="line"><a name="l01788"></a><span class="lineno"> 1788</span>&#160;    <span class="keywordtype">float</span> cellIntermediateScale   = inputIntermediateScale;</div><div class="line"><a name="l01789"></a><span class="lineno"> 1789</span>&#160;    <span class="keywordtype">float</span> outputIntermediateScale = forgetIntermediateScale;</div><div class="line"><a name="l01790"></a><span class="lineno"> 1790</span>&#160;</div><div class="line"><a name="l01791"></a><span class="lineno"> 1791</span>&#160;    <span class="keywordtype">float</span> cellClip       = 0.0f;</div><div class="line"><a name="l01792"></a><span class="lineno"> 1792</span>&#160;    <span class="keywordtype">float</span> projectionClip = 0.0f;</div><div class="line"><a name="l01793"></a><span class="lineno"> 1793</span>&#160;</div><div class="line"><a name="l01794"></a><span class="lineno"> 1794</span>&#160;    <span class="comment">// Input/Output tensor info</span></div><div class="line"><a name="l01795"></a><span class="lineno"> 1795</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> inputInfo({numBatches , inputSize},</div><div class="line"><a name="l01796"></a><span class="lineno"> 1796</span>&#160;                                <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a9d02ea768c081d4bdb2b7cab0b3f510d">armnn::DataType::QAsymmS8</a>,</div><div class="line"><a name="l01797"></a><span class="lineno"> 1797</span>&#160;                                inputScale,</div><div class="line"><a name="l01798"></a><span class="lineno"> 1798</span>&#160;                                inputOffset);</div><div class="line"><a name="l01799"></a><span class="lineno"> 1799</span>&#160;</div><div class="line"><a name="l01800"></a><span class="lineno"> 1800</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> cellStateInfo({numBatches , numUnits},</div><div class="line"><a name="l01801"></a><span class="lineno"> 1801</span>&#160;                                    <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a053c769dcf82d66ef326c86980c02ba7">armnn::DataType::QSymmS16</a>,</div><div class="line"><a name="l01802"></a><span class="lineno"> 1802</span>&#160;                                    cellStateScale,</div><div class="line"><a name="l01803"></a><span class="lineno"> 1803</span>&#160;                                    cellStateOffset);</div><div class="line"><a name="l01804"></a><span class="lineno"> 1804</span>&#160;</div><div class="line"><a name="l01805"></a><span class="lineno"> 1805</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> outputStateInfo({numBatches , outputSize},</div><div class="line"><a name="l01806"></a><span class="lineno"> 1806</span>&#160;                                      <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a9d02ea768c081d4bdb2b7cab0b3f510d">armnn::DataType::QAsymmS8</a>,</div><div class="line"><a name="l01807"></a><span class="lineno"> 1807</span>&#160;                                      outputScale,</div><div class="line"><a name="l01808"></a><span class="lineno"> 1808</span>&#160;                                      outputOffset);</div><div class="line"><a name="l01809"></a><span class="lineno"> 1809</span>&#160;</div><div class="line"><a name="l01810"></a><span class="lineno"> 1810</span>&#160;    <a class="code" href="struct_layer_test_result.xhtml">LayerTestResult&lt;int8_t, 2&gt;</a> ret(outputStateInfo);</div><div class="line"><a name="l01811"></a><span class="lineno"> 1811</span>&#160;</div><div class="line"><a name="l01812"></a><span class="lineno"> 1812</span>&#160;    <span class="comment">// Input tensors</span></div><div class="line"><a name="l01813"></a><span class="lineno"> 1813</span>&#160;    std::vector&lt;int8_t&gt; inputVector;</div><div class="line"><a name="l01814"></a><span class="lineno"> 1814</span>&#160;    inputVector.assign(input.data(), input.data() + (numBatches * inputSize));</div><div class="line"><a name="l01815"></a><span class="lineno"> 1815</span>&#160;    <span class="keyword">auto</span> inputTensor = MakeTensor&lt;int8_t, 2&gt;(inputInfo, inputVector);</div><div class="line"><a name="l01816"></a><span class="lineno"> 1816</span>&#160;</div><div class="line"><a name="l01817"></a><span class="lineno"> 1817</span>&#160;    std::vector&lt;int16_t&gt; cellStateInVector = {0, 0, 0, 0, 0, 0, 0, 0};</div><div class="line"><a name="l01818"></a><span class="lineno"> 1818</span>&#160;    <span class="keyword">auto</span> cellStateInTensor = MakeTensor&lt;int16_t, 2&gt;(cellStateInfo, cellStateInVector);</div><div class="line"><a name="l01819"></a><span class="lineno"> 1819</span>&#160;</div><div class="line"><a name="l01820"></a><span class="lineno"> 1820</span>&#160;    std::vector&lt;int8_t&gt; outputStateInVector = {0, 0, 0, 0, 0, 0, 0, 0};</div><div class="line"><a name="l01821"></a><span class="lineno"> 1821</span>&#160;    <span class="keyword">auto</span> outputStateInTensor = MakeTensor&lt;int8_t, 2&gt;(outputStateInfo, outputStateInVector);</div><div class="line"><a name="l01822"></a><span class="lineno"> 1822</span>&#160;</div><div class="line"><a name="l01823"></a><span class="lineno"> 1823</span>&#160;    <span class="comment">// Output tensors</span></div><div class="line"><a name="l01824"></a><span class="lineno"> 1824</span>&#160;    std::vector&lt;int16_t&gt; cellStateOutVector  = {-11692, 9960, 5491, 8861, -9422, 7726, 2056, 13149};</div><div class="line"><a name="l01825"></a><span class="lineno"> 1825</span>&#160;    <span class="keyword">auto</span> cellStateOutTensor  = MakeTensor&lt;int16_t, 2&gt;(cellStateInfo, cellStateOutVector);</div><div class="line"><a name="l01826"></a><span class="lineno"> 1826</span>&#160;</div><div class="line"><a name="l01827"></a><span class="lineno"> 1827</span>&#160;    std::vector&lt;int8_t&gt; outputVector;</div><div class="line"><a name="l01828"></a><span class="lineno"> 1828</span>&#160;    outputVector.assign(outputExpected.data(), outputExpected.data() + (numBatches * outputSize));</div><div class="line"><a name="l01829"></a><span class="lineno"> 1829</span>&#160;    ret.outputExpected = MakeTensor&lt;int8_t, 2&gt;(outputStateInfo, outputVector);</div><div class="line"><a name="l01830"></a><span class="lineno"> 1830</span>&#160;</div><div class="line"><a name="l01831"></a><span class="lineno"> 1831</span>&#160;    <span class="comment">// Create tensor handles</span></div><div class="line"><a name="l01832"></a><span class="lineno"> 1832</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; inputHandle = tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(inputInfo);</div><div class="line"><a name="l01833"></a><span class="lineno"> 1833</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; cellStateInHandle =</div><div class="line"><a name="l01834"></a><span class="lineno"> 1834</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(cellStateInfo);</div><div class="line"><a name="l01835"></a><span class="lineno"> 1835</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; outputStateInHandle =</div><div class="line"><a name="l01836"></a><span class="lineno"> 1836</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(outputStateInfo);</div><div class="line"><a name="l01837"></a><span class="lineno"> 1837</span>&#160;</div><div class="line"><a name="l01838"></a><span class="lineno"> 1838</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; outputStateOutHandle =</div><div class="line"><a name="l01839"></a><span class="lineno"> 1839</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(outputStateInfo);</div><div class="line"><a name="l01840"></a><span class="lineno"> 1840</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; cellStateOutHandle =</div><div class="line"><a name="l01841"></a><span class="lineno"> 1841</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(cellStateInfo);</div><div class="line"><a name="l01842"></a><span class="lineno"> 1842</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; outputHandle = tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(outputStateInfo);</div><div class="line"><a name="l01843"></a><span class="lineno"> 1843</span>&#160;</div><div class="line"><a name="l01844"></a><span class="lineno"> 1844</span>&#160;    <a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml">armnn::QLstmQueueDescriptor</a> data;</div><div class="line"><a name="l01845"></a><span class="lineno"> 1845</span>&#160;    <a class="code" href="structarmnn_1_1_workload_info.xhtml">armnn::WorkloadInfo</a> info;</div><div class="line"><a name="l01846"></a><span class="lineno"> 1846</span>&#160;</div><div class="line"><a name="l01847"></a><span class="lineno"> 1847</span>&#160;    <span class="comment">// Add inputs and outputs to workload</span></div><div class="line"><a name="l01848"></a><span class="lineno"> 1848</span>&#160;    AddInputToWorkload(data, info, inputInfo, inputHandle.get());</div><div class="line"><a name="l01849"></a><span class="lineno"> 1849</span>&#160;    AddInputToWorkload(data, info, outputStateInfo, outputStateInHandle.get());</div><div class="line"><a name="l01850"></a><span class="lineno"> 1850</span>&#160;    AddInputToWorkload(data, info, cellStateInfo, cellStateInHandle.get());</div><div class="line"><a name="l01851"></a><span class="lineno"> 1851</span>&#160;</div><div class="line"><a name="l01852"></a><span class="lineno"> 1852</span>&#160;    AddOutputToWorkload(data, info, outputStateInfo, outputStateOutHandle.get());</div><div class="line"><a name="l01853"></a><span class="lineno"> 1853</span>&#160;    AddOutputToWorkload(data, info, cellStateInfo, cellStateOutHandle.get());</div><div class="line"><a name="l01854"></a><span class="lineno"> 1854</span>&#160;    AddOutputToWorkload(data, info, outputStateInfo, outputHandle.get());</div><div class="line"><a name="l01855"></a><span class="lineno"> 1855</span>&#160;</div><div class="line"><a name="l01856"></a><span class="lineno"> 1856</span>&#160;    <span class="comment">// Weights and bias tensor and quantization info</span></div><div class="line"><a name="l01857"></a><span class="lineno"> 1857</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> inputWeightsInfo({outputSize, inputSize},</div><div class="line"><a name="l01858"></a><span class="lineno"> 1858</span>&#160;                                       <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a9945327825b115e93a3b89f4302e76db">armnn::DataType::QSymmS8</a>,</div><div class="line"><a name="l01859"></a><span class="lineno"> 1859</span>&#160;                                       weightsScale,</div><div class="line"><a name="l01860"></a><span class="lineno"> 1860</span>&#160;                                       weightsOffset);</div><div class="line"><a name="l01861"></a><span class="lineno"> 1861</span>&#160;</div><div class="line"><a name="l01862"></a><span class="lineno"> 1862</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> recurrentWeightsInfo({outputSize, outputSize},</div><div class="line"><a name="l01863"></a><span class="lineno"> 1863</span>&#160;                                           <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a9945327825b115e93a3b89f4302e76db">armnn::DataType::QSymmS8</a>,</div><div class="line"><a name="l01864"></a><span class="lineno"> 1864</span>&#160;                                           weightsScale,</div><div class="line"><a name="l01865"></a><span class="lineno"> 1865</span>&#160;                                           weightsOffset);</div><div class="line"><a name="l01866"></a><span class="lineno"> 1866</span>&#160;</div><div class="line"><a name="l01867"></a><span class="lineno"> 1867</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> biasInfo({outputSize}, <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6accedffbc6e5308e33d3843e8bdc0dad7">armnn::DataType::Signed32</a>, biasScale, biasOffset);</div><div class="line"><a name="l01868"></a><span class="lineno"> 1868</span>&#160;</div><div class="line"><a name="l01869"></a><span class="lineno"> 1869</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> layerNormWeightsInfo({numUnits}, <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a053c769dcf82d66ef326c86980c02ba7">armnn::DataType::QSymmS16</a>, layerNormScale, layerNormOffset);</div><div class="line"><a name="l01870"></a><span class="lineno"> 1870</span>&#160;</div><div class="line"><a name="l01871"></a><span class="lineno"> 1871</span>&#160;    <span class="comment">// Weights and bias tensor data</span></div><div class="line"><a name="l01872"></a><span class="lineno"> 1872</span>&#160;    <span class="keyword">auto</span> inputToForgetWeights = MakeTensor&lt;int8_t, 2&gt;(inputWeightsInfo,</div><div class="line"><a name="l01873"></a><span class="lineno"> 1873</span>&#160;            {-77, -13, 38, 25, 115, -64, -25, -51, 38, -102, -51, 38, -64, -51, -77, 38, -51, -77, -64, -64});</div><div class="line"><a name="l01874"></a><span class="lineno"> 1874</span>&#160;    <span class="keyword">auto</span> inputToCellWeights   = MakeTensor&lt;int8_t, 2&gt;(inputWeightsInfo,</div><div class="line"><a name="l01875"></a><span class="lineno"> 1875</span>&#160;            {-51, -38, -25, -13, -64, 64, -25, -38, -25, -77, 77, -13, -51, -38, -89, 89, -115, -64, 102, 77});</div><div class="line"><a name="l01876"></a><span class="lineno"> 1876</span>&#160;    <span class="keyword">auto</span> inputToOutputWeights = MakeTensor&lt;int8_t, 2&gt;(inputWeightsInfo,</div><div class="line"><a name="l01877"></a><span class="lineno"> 1877</span>&#160;            {-102, -51, -25, -115, -13, -89, 38, -38, -102, -25, 77, -25, 51, -89, -38, -64, 13, 64, -77, -51});</div><div class="line"><a name="l01878"></a><span class="lineno"> 1878</span>&#160;</div><div class="line"><a name="l01879"></a><span class="lineno"> 1879</span>&#160;    <span class="keyword">auto</span> recurrentToForgetWeights = MakeTensor&lt;int8_t, 2&gt;(recurrentWeightsInfo,</div><div class="line"><a name="l01880"></a><span class="lineno"> 1880</span>&#160;            {-64, -38, -64, -25, 77, 51, 115, 38, -13, 25, 64, 25, 25, 38, -13, 51});</div><div class="line"><a name="l01881"></a><span class="lineno"> 1881</span>&#160;    <span class="keyword">auto</span> recurrentToCellWeights   = MakeTensor&lt;int8_t, 2&gt;(recurrentWeightsInfo,</div><div class="line"><a name="l01882"></a><span class="lineno"> 1882</span>&#160;            {-38, 25, 13, -38, 102, -10, -25, 38, 102, -77, -13, 25, 38, -13, 25, 64});</div><div class="line"><a name="l01883"></a><span class="lineno"> 1883</span>&#160;    <span class="keyword">auto</span> recurrentToOutputWeights = MakeTensor&lt;int8_t, 2&gt;(recurrentWeightsInfo,</div><div class="line"><a name="l01884"></a><span class="lineno"> 1884</span>&#160;            {38, -13, 13, -25, -64, -89, -25, -77, -13, -51, -89, -25, 13, 64, 25, -38});</div><div class="line"><a name="l01885"></a><span class="lineno"> 1885</span>&#160;</div><div class="line"><a name="l01886"></a><span class="lineno"> 1886</span>&#160;    <span class="keyword">auto</span> forgetGateBias = MakeTensor&lt;int32_t, 1&gt;(biasInfo, {2147484, -6442451, -4294968, 2147484});</div><div class="line"><a name="l01887"></a><span class="lineno"> 1887</span>&#160;    <span class="keyword">auto</span> cellBias       = MakeTensor&lt;int32_t, 1&gt;(biasInfo, {-1073742, 15461883, 5368709, 1717987});</div><div class="line"><a name="l01888"></a><span class="lineno"> 1888</span>&#160;    <span class="keyword">auto</span> outputGateBias = MakeTensor&lt;int32_t, 1&gt;(biasInfo, {1073742, -214748, 4294968, 2147484});</div><div class="line"><a name="l01889"></a><span class="lineno"> 1889</span>&#160;</div><div class="line"><a name="l01890"></a><span class="lineno"> 1890</span>&#160;    <span class="keyword">auto</span> forgetLayerNormWeights = MakeTensor&lt;int16_t, 1&gt;(layerNormWeightsInfo, {6553, 6553, 13107, 9830});</div><div class="line"><a name="l01891"></a><span class="lineno"> 1891</span>&#160;    <span class="keyword">auto</span> cellLayerNormWeights   = MakeTensor&lt;int16_t, 1&gt;(layerNormWeightsInfo, {22937, 6553, 9830, 26214});</div><div class="line"><a name="l01892"></a><span class="lineno"> 1892</span>&#160;    <span class="keyword">auto</span> outputLayerNormWeights = MakeTensor&lt;int16_t, 1&gt;(layerNormWeightsInfo, {19660, 6553, 6553, 16384});</div><div class="line"><a name="l01893"></a><span class="lineno"> 1893</span>&#160;</div><div class="line"><a name="l01894"></a><span class="lineno"> 1894</span>&#160;    <span class="comment">// ScopedCpuTensorHandles</span></div><div class="line"><a name="l01895"></a><span class="lineno"> 1895</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputToForgetWeightsTensor(inputWeightsInfo);</div><div class="line"><a name="l01896"></a><span class="lineno"> 1896</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputToCellWeightsTensor(inputWeightsInfo);</div><div class="line"><a name="l01897"></a><span class="lineno"> 1897</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputToOutputWeightsTensor(inputWeightsInfo);</div><div class="line"><a name="l01898"></a><span class="lineno"> 1898</span>&#160;</div><div class="line"><a name="l01899"></a><span class="lineno"> 1899</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> recurrentToForgetWeightsTensor(recurrentWeightsInfo);</div><div class="line"><a name="l01900"></a><span class="lineno"> 1900</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> recurrentToCellWeightsTensor(recurrentWeightsInfo);</div><div class="line"><a name="l01901"></a><span class="lineno"> 1901</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> recurrentToOutputWeightsTensor(recurrentWeightsInfo);</div><div class="line"><a name="l01902"></a><span class="lineno"> 1902</span>&#160;</div><div class="line"><a name="l01903"></a><span class="lineno"> 1903</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> forgetGateBiasTensor(biasInfo);</div><div class="line"><a name="l01904"></a><span class="lineno"> 1904</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> cellBiasTensor(biasInfo);</div><div class="line"><a name="l01905"></a><span class="lineno"> 1905</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> outputGateBiasTensor(biasInfo);</div><div class="line"><a name="l01906"></a><span class="lineno"> 1906</span>&#160;</div><div class="line"><a name="l01907"></a><span class="lineno"> 1907</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> forgetLayerNormWeightsTensor(layerNormWeightsInfo);</div><div class="line"><a name="l01908"></a><span class="lineno"> 1908</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> cellLayerNormWeightsTensor(layerNormWeightsInfo);</div><div class="line"><a name="l01909"></a><span class="lineno"> 1909</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> outputLayerNormWeightsTensor(layerNormWeightsInfo);</div><div class="line"><a name="l01910"></a><span class="lineno"> 1910</span>&#160;</div><div class="line"><a name="l01911"></a><span class="lineno"> 1911</span>&#160;    <span class="comment">// Allocate and copy data</span></div><div class="line"><a name="l01912"></a><span class="lineno"> 1912</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputToForgetWeightsTensor, &amp;inputToForgetWeights[0][0]);</div><div class="line"><a name="l01913"></a><span class="lineno"> 1913</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputToCellWeightsTensor, &amp;inputToCellWeights[0][0]);</div><div class="line"><a name="l01914"></a><span class="lineno"> 1914</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputToOutputWeightsTensor, &amp;inputToOutputWeights[0][0]);</div><div class="line"><a name="l01915"></a><span class="lineno"> 1915</span>&#160;</div><div class="line"><a name="l01916"></a><span class="lineno"> 1916</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;recurrentToForgetWeightsTensor, &amp;recurrentToForgetWeights[0][0]);</div><div class="line"><a name="l01917"></a><span class="lineno"> 1917</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;recurrentToCellWeightsTensor, &amp;recurrentToCellWeights[0][0]);</div><div class="line"><a name="l01918"></a><span class="lineno"> 1918</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;recurrentToOutputWeightsTensor, &amp;recurrentToOutputWeights[0][0]);</div><div class="line"><a name="l01919"></a><span class="lineno"> 1919</span>&#160;</div><div class="line"><a name="l01920"></a><span class="lineno"> 1920</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;forgetGateBiasTensor, &amp;forgetGateBias[0]);</div><div class="line"><a name="l01921"></a><span class="lineno"> 1921</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;cellBiasTensor, &amp;cellBias[0]);</div><div class="line"><a name="l01922"></a><span class="lineno"> 1922</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;outputGateBiasTensor, &amp;outputGateBias[0]);</div><div class="line"><a name="l01923"></a><span class="lineno"> 1923</span>&#160;</div><div class="line"><a name="l01924"></a><span class="lineno"> 1924</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;forgetLayerNormWeightsTensor, &amp;forgetLayerNormWeights[0]);</div><div class="line"><a name="l01925"></a><span class="lineno"> 1925</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;cellLayerNormWeightsTensor, &amp;cellLayerNormWeights[0]);</div><div class="line"><a name="l01926"></a><span class="lineno"> 1926</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;outputLayerNormWeightsTensor, &amp;outputLayerNormWeights[0]);</div><div class="line"><a name="l01927"></a><span class="lineno"> 1927</span>&#160;</div><div class="line"><a name="l01928"></a><span class="lineno"> 1928</span>&#160;    <span class="comment">// Setup queue descriptor</span></div><div class="line"><a name="l01929"></a><span class="lineno"> 1929</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a3ea82566d98c5a657c76c3d851c47848">m_InputToForgetWeights</a> = &amp;inputToForgetWeightsTensor;</div><div class="line"><a name="l01930"></a><span class="lineno"> 1930</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a28ad98d17603fd8b12e046f8ece58970">m_InputToCellWeights</a> = &amp;inputToCellWeightsTensor;</div><div class="line"><a name="l01931"></a><span class="lineno"> 1931</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a83dc9086b2e4a4e4cadb66bd874df798">m_InputToOutputWeights</a> = &amp;inputToOutputWeightsTensor;</div><div class="line"><a name="l01932"></a><span class="lineno"> 1932</span>&#160;</div><div class="line"><a name="l01933"></a><span class="lineno"> 1933</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a45d73e66cbb2b65049e4016c20657ccf">m_RecurrentToForgetWeights</a> = &amp;recurrentToForgetWeightsTensor;</div><div class="line"><a name="l01934"></a><span class="lineno"> 1934</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#aea142bd50ffb93631c2e08324ec92a1e">m_RecurrentToCellWeights</a> = &amp;recurrentToCellWeightsTensor;</div><div class="line"><a name="l01935"></a><span class="lineno"> 1935</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#adebc1771e5a1f4b113a7aa594ea74d2c">m_RecurrentToOutputWeights</a> = &amp;recurrentToOutputWeightsTensor;</div><div class="line"><a name="l01936"></a><span class="lineno"> 1936</span>&#160;</div><div class="line"><a name="l01937"></a><span class="lineno"> 1937</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#aba3ffe91d818266b8785ce971548eb59">m_ForgetGateBias</a> = &amp;forgetGateBiasTensor;</div><div class="line"><a name="l01938"></a><span class="lineno"> 1938</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a75980b5795efd899a0c678a06a900c6d">m_CellBias</a> = &amp;cellBiasTensor;</div><div class="line"><a name="l01939"></a><span class="lineno"> 1939</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a332551528a4b3534c2d6c89ce816fcd9">m_OutputGateBias</a> = &amp;outputGateBiasTensor;</div><div class="line"><a name="l01940"></a><span class="lineno"> 1940</span>&#160;</div><div class="line"><a name="l01941"></a><span class="lineno"> 1941</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a453a4af385d0c060c9aac990fceaa1ef">m_ForgetLayerNormWeights</a> = &amp;forgetLayerNormWeightsTensor;</div><div class="line"><a name="l01942"></a><span class="lineno"> 1942</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a518f0195d0278a892b49649b8860d17f">m_CellLayerNormWeights</a> = &amp;cellLayerNormWeightsTensor;</div><div class="line"><a name="l01943"></a><span class="lineno"> 1943</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#aa3f07e27230d6d99adc2c82ba681df2b">m_OutputLayerNormWeights</a> = &amp;outputLayerNormWeightsTensor;</div><div class="line"><a name="l01944"></a><span class="lineno"> 1944</span>&#160;</div><div class="line"><a name="l01945"></a><span class="lineno"> 1945</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#ad474e5c51a0b194ef32e812b86c0cbdb">m_CifgEnabled</a> = cifgEnabled;</div><div class="line"><a name="l01946"></a><span class="lineno"> 1946</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#a2837b4396f20c956952d1a7286cab5f8">m_PeepholeEnabled</a> = peepholeEnabled;</div><div class="line"><a name="l01947"></a><span class="lineno"> 1947</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#a6c9de81fc65b3c4924cab11907075a17">m_ProjectionEnabled</a> = projectionEnabled;</div><div class="line"><a name="l01948"></a><span class="lineno"> 1948</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#a4a8ec49f130084445d44297549254780">m_LayerNormEnabled</a> = layerNormEnabled;</div><div class="line"><a name="l01949"></a><span class="lineno"> 1949</span>&#160;</div><div class="line"><a name="l01950"></a><span class="lineno"> 1950</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#a09e1f097944f61cc901240f9300364cf">m_InputIntermediateScale</a> = inputIntermediateScale;</div><div class="line"><a name="l01951"></a><span class="lineno"> 1951</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#afec7f36158448f723b426a9527acb189">m_ForgetIntermediateScale</a> = forgetIntermediateScale;</div><div class="line"><a name="l01952"></a><span class="lineno"> 1952</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#a0477ee1b44ace6090119178eea78cb0b">m_CellIntermediateScale</a> = cellIntermediateScale;</div><div class="line"><a name="l01953"></a><span class="lineno"> 1953</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#aa43409f9b457352c95c89f20ce5d844d">m_OutputIntermediateScale</a> = outputIntermediateScale;</div><div class="line"><a name="l01954"></a><span class="lineno"> 1954</span>&#160;</div><div class="line"><a name="l01955"></a><span class="lineno"> 1955</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#a4556cbd764d4848d8ad0637a9eed580d">m_HiddenStateZeroPoint</a> = hiddenStateZeroPoint;</div><div class="line"><a name="l01956"></a><span class="lineno"> 1956</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#af8f724af7210b52529216feefa993c98">m_HiddenStateScale</a> = hiddenStateScale;</div><div class="line"><a name="l01957"></a><span class="lineno"> 1957</span>&#160;</div><div class="line"><a name="l01958"></a><span class="lineno"> 1958</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#ac81fb0e66dc623dc37c77f219f53a6d3">m_CellClip</a> = cellClip;</div><div class="line"><a name="l01959"></a><span class="lineno"> 1959</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#aa6a518b65088f34803b3214334bdff61">m_ProjectionClip</a> = projectionClip;</div><div class="line"><a name="l01960"></a><span class="lineno"> 1960</span>&#160;</div><div class="line"><a name="l01961"></a><span class="lineno"> 1961</span>&#160;    <span class="comment">// Create workload and allocate tensor handles</span></div><div class="line"><a name="l01962"></a><span class="lineno"> 1962</span>&#160;    std::unique_ptr&lt;armnn::IWorkload&gt; workload = workloadFactory.<a class="code" href="classarmnn_1_1_i_workload_factory.xhtml#ab17bf35d906f8daad42be0a0799c72ab">CreateQLstm</a>(data, info);</div><div class="line"><a name="l01963"></a><span class="lineno"> 1963</span>&#160;    inputHandle-&gt;Allocate();</div><div class="line"><a name="l01964"></a><span class="lineno"> 1964</span>&#160;    outputStateInHandle-&gt;Allocate();</div><div class="line"><a name="l01965"></a><span class="lineno"> 1965</span>&#160;    cellStateInHandle-&gt;Allocate();</div><div class="line"><a name="l01966"></a><span class="lineno"> 1966</span>&#160;</div><div class="line"><a name="l01967"></a><span class="lineno"> 1967</span>&#160;    outputStateOutHandle-&gt;Allocate();</div><div class="line"><a name="l01968"></a><span class="lineno"> 1968</span>&#160;    cellStateOutHandle-&gt;Allocate();</div><div class="line"><a name="l01969"></a><span class="lineno"> 1969</span>&#160;    outputHandle-&gt;Allocate();</div><div class="line"><a name="l01970"></a><span class="lineno"> 1970</span>&#160;</div><div class="line"><a name="l01971"></a><span class="lineno"> 1971</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#ae15f1a3c55d2db87683577de9fa4437c">CopyDataToITensorHandle</a>(inputHandle.get(), &amp;inputTensor[0][0]);</div><div class="line"><a name="l01972"></a><span class="lineno"> 1972</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#ae15f1a3c55d2db87683577de9fa4437c">CopyDataToITensorHandle</a>(outputStateInHandle.get(), &amp;outputStateInTensor[0][0]);</div><div class="line"><a name="l01973"></a><span class="lineno"> 1973</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#ae15f1a3c55d2db87683577de9fa4437c">CopyDataToITensorHandle</a>(cellStateInHandle.get(), &amp;cellStateInTensor[0][0]);</div><div class="line"><a name="l01974"></a><span class="lineno"> 1974</span>&#160;</div><div class="line"><a name="l01975"></a><span class="lineno"> 1975</span>&#160;    workload-&gt;Execute();</div><div class="line"><a name="l01976"></a><span class="lineno"> 1976</span>&#160;</div><div class="line"><a name="l01977"></a><span class="lineno"> 1977</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#a99b626c58a926dc7d6df78d22ec186c8">CopyDataFromITensorHandle</a>(&amp;ret.output[0][0], outputHandle.get());</div><div class="line"><a name="l01978"></a><span class="lineno"> 1978</span>&#160;</div><div class="line"><a name="l01979"></a><span class="lineno"> 1979</span>&#160;    <span class="keywordflow">return</span> ret;</div><div class="line"><a name="l01980"></a><span class="lineno"> 1980</span>&#160;}</div><div class="line"><a name="l01981"></a><span class="lineno"> 1981</span>&#160;</div><div class="line"><a name="l01982"></a><span class="lineno"> 1982</span>&#160;<span class="comment">// QLSTM: Projection, LayerNorm</span></div><div class="line"><a name="l01983"></a><span class="lineno"> 1983</span>&#160;<a class="code" href="struct_layer_test_result.xhtml">LayerTestResult&lt;int8_t, 2&gt;</a> QLstmTestImpl1(</div><div class="line"><a name="l01984"></a><span class="lineno"> 1984</span>&#160;        <a class="code" href="classarmnn_1_1_i_workload_factory.xhtml">armnn::IWorkloadFactory</a>&amp; workloadFactory,</div><div class="line"><a name="l01985"></a><span class="lineno"> 1985</span>&#160;        <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_backend_internal.xhtml#a693b40e6b94e958836aeb0410ca186bd">armnn::IBackendInternal::IMemoryManagerSharedPtr</a>&amp; memoryManager,</div><div class="line"><a name="l01986"></a><span class="lineno"> 1986</span>&#160;        <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml">armnn::ITensorHandleFactory</a>&amp; tensorHandleFactory,</div><div class="line"><a name="l01987"></a><span class="lineno"> 1987</span>&#160;        <span class="keyword">const</span> boost::multi_array&lt;int8_t, 2&gt;&amp; input,</div><div class="line"><a name="l01988"></a><span class="lineno"> 1988</span>&#160;        <span class="keyword">const</span> boost::multi_array&lt;int8_t, 2&gt;&amp; outputExpected)</div><div class="line"><a name="l01989"></a><span class="lineno"> 1989</span>&#160;{</div><div class="line"><a name="l01990"></a><span class="lineno"> 1990</span>&#160;    <a class="code" href="namespacearmnn.xhtml#a44affeeb090c3c6a3062830562672e84">IgnoreUnused</a>(memoryManager);</div><div class="line"><a name="l01991"></a><span class="lineno"> 1991</span>&#160;    <span class="keywordtype">unsigned</span> <span class="keywordtype">int</span> numBatches = 2;</div><div class="line"><a name="l01992"></a><span class="lineno"> 1992</span>&#160;    <span class="keywordtype">unsigned</span> <span class="keywordtype">int</span> inputSize  = 5;</div><div class="line"><a name="l01993"></a><span class="lineno"> 1993</span>&#160;    <span class="keywordtype">unsigned</span> <span class="keywordtype">int</span> outputSize = 3;</div><div class="line"><a name="l01994"></a><span class="lineno"> 1994</span>&#160;    <span class="keywordtype">unsigned</span> <span class="keywordtype">int</span> numUnits   = 4;</div><div class="line"><a name="l01995"></a><span class="lineno"> 1995</span>&#160;</div><div class="line"><a name="l01996"></a><span class="lineno"> 1996</span>&#160;    <span class="keywordtype">bool</span> cifgEnabled       = <span class="keyword">false</span>;</div><div class="line"><a name="l01997"></a><span class="lineno"> 1997</span>&#160;    <span class="keywordtype">bool</span> peepholeEnabled   = <span class="keyword">false</span>;</div><div class="line"><a name="l01998"></a><span class="lineno"> 1998</span>&#160;    <span class="keywordtype">bool</span> projectionEnabled = <span class="keyword">true</span>;</div><div class="line"><a name="l01999"></a><span class="lineno"> 1999</span>&#160;    <span class="keywordtype">bool</span> layerNormEnabled  = <span class="keyword">true</span>;</div><div class="line"><a name="l02000"></a><span class="lineno"> 2000</span>&#160;</div><div class="line"><a name="l02001"></a><span class="lineno"> 2001</span>&#160;    <span class="comment">// Scale/Offset quantization info</span></div><div class="line"><a name="l02002"></a><span class="lineno"> 2002</span>&#160;    <span class="keywordtype">float</span> inputScale    = 0.0078125f;</div><div class="line"><a name="l02003"></a><span class="lineno"> 2003</span>&#160;    int32_t inputOffset = 0;</div><div class="line"><a name="l02004"></a><span class="lineno"> 2004</span>&#160;</div><div class="line"><a name="l02005"></a><span class="lineno"> 2005</span>&#160;    int32_t hiddenStateZeroPoint = 0;</div><div class="line"><a name="l02006"></a><span class="lineno"> 2006</span>&#160;    <span class="keywordtype">float</span> hiddenStateScale       = 0.007f;</div><div class="line"><a name="l02007"></a><span class="lineno"> 2007</span>&#160;</div><div class="line"><a name="l02008"></a><span class="lineno"> 2008</span>&#160;    <span class="comment">// if (!projectionEnabled) outputScale == hiddenStateScale</span></div><div class="line"><a name="l02009"></a><span class="lineno"> 2009</span>&#160;    <span class="keywordtype">float</span> outputScale    = 3.05176e-05f;</div><div class="line"><a name="l02010"></a><span class="lineno"> 2010</span>&#160;    int32_t outputOffset = 0;</div><div class="line"><a name="l02011"></a><span class="lineno"> 2011</span>&#160;</div><div class="line"><a name="l02012"></a><span class="lineno"> 2012</span>&#160;    <span class="keywordtype">float</span> cellStateScale    = 3.05176e-05f;</div><div class="line"><a name="l02013"></a><span class="lineno"> 2013</span>&#160;    int32_t cellStateOffset = 0;</div><div class="line"><a name="l02014"></a><span class="lineno"> 2014</span>&#160;</div><div class="line"><a name="l02015"></a><span class="lineno"> 2015</span>&#160;    <span class="keywordtype">float</span> weightsScale    = 0.00784314f;</div><div class="line"><a name="l02016"></a><span class="lineno"> 2016</span>&#160;    int32_t weightsOffset = 0;</div><div class="line"><a name="l02017"></a><span class="lineno"> 2017</span>&#160;</div><div class="line"><a name="l02018"></a><span class="lineno"> 2018</span>&#160;    <span class="keywordtype">float</span> layerNormScale    = 3.05182e-05f;</div><div class="line"><a name="l02019"></a><span class="lineno"> 2019</span>&#160;    int32_t layerNormOffset = 0;</div><div class="line"><a name="l02020"></a><span class="lineno"> 2020</span>&#160;</div><div class="line"><a name="l02021"></a><span class="lineno"> 2021</span>&#160;    <span class="keywordtype">float</span> biasScale    = layerNormScale / 1024;</div><div class="line"><a name="l02022"></a><span class="lineno"> 2022</span>&#160;    int32_t biasOffset = 0;</div><div class="line"><a name="l02023"></a><span class="lineno"> 2023</span>&#160;</div><div class="line"><a name="l02024"></a><span class="lineno"> 2024</span>&#160;    <span class="keywordtype">float</span> projectionWeightsScale = 0.00392157f;</div><div class="line"><a name="l02025"></a><span class="lineno"> 2025</span>&#160;</div><div class="line"><a name="l02026"></a><span class="lineno"> 2026</span>&#160;    <span class="keywordtype">float</span> inputIntermediateScale  = 0.007059f;</div><div class="line"><a name="l02027"></a><span class="lineno"> 2027</span>&#160;    <span class="keywordtype">float</span> forgetIntermediateScale = 0.007812f;</div><div class="line"><a name="l02028"></a><span class="lineno"> 2028</span>&#160;    <span class="keywordtype">float</span> cellIntermediateScale   = inputIntermediateScale;</div><div class="line"><a name="l02029"></a><span class="lineno"> 2029</span>&#160;    <span class="keywordtype">float</span> outputIntermediateScale = forgetIntermediateScale;</div><div class="line"><a name="l02030"></a><span class="lineno"> 2030</span>&#160;</div><div class="line"><a name="l02031"></a><span class="lineno"> 2031</span>&#160;    <span class="keywordtype">float</span> cellClip       = 0.0f;</div><div class="line"><a name="l02032"></a><span class="lineno"> 2032</span>&#160;    <span class="keywordtype">float</span> projectionClip = 0.0f;</div><div class="line"><a name="l02033"></a><span class="lineno"> 2033</span>&#160;</div><div class="line"><a name="l02034"></a><span class="lineno"> 2034</span>&#160;    <span class="comment">// Input/Output tensor info</span></div><div class="line"><a name="l02035"></a><span class="lineno"> 2035</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> inputInfo({numBatches , inputSize},</div><div class="line"><a name="l02036"></a><span class="lineno"> 2036</span>&#160;                                <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a9d02ea768c081d4bdb2b7cab0b3f510d">armnn::DataType::QAsymmS8</a>,</div><div class="line"><a name="l02037"></a><span class="lineno"> 2037</span>&#160;                                inputScale,</div><div class="line"><a name="l02038"></a><span class="lineno"> 2038</span>&#160;                                inputOffset);</div><div class="line"><a name="l02039"></a><span class="lineno"> 2039</span>&#160;</div><div class="line"><a name="l02040"></a><span class="lineno"> 2040</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> cellStateInfo({numBatches , numUnits},</div><div class="line"><a name="l02041"></a><span class="lineno"> 2041</span>&#160;                                    <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a053c769dcf82d66ef326c86980c02ba7">armnn::DataType::QSymmS16</a>,</div><div class="line"><a name="l02042"></a><span class="lineno"> 2042</span>&#160;                                    cellStateScale,</div><div class="line"><a name="l02043"></a><span class="lineno"> 2043</span>&#160;                                    cellStateOffset);</div><div class="line"><a name="l02044"></a><span class="lineno"> 2044</span>&#160;</div><div class="line"><a name="l02045"></a><span class="lineno"> 2045</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> outputStateInfo({numBatches , outputSize},</div><div class="line"><a name="l02046"></a><span class="lineno"> 2046</span>&#160;                                      <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a9d02ea768c081d4bdb2b7cab0b3f510d">armnn::DataType::QAsymmS8</a>,</div><div class="line"><a name="l02047"></a><span class="lineno"> 2047</span>&#160;                                      outputScale,</div><div class="line"><a name="l02048"></a><span class="lineno"> 2048</span>&#160;                                      outputOffset);</div><div class="line"><a name="l02049"></a><span class="lineno"> 2049</span>&#160;</div><div class="line"><a name="l02050"></a><span class="lineno"> 2050</span>&#160;    <a class="code" href="struct_layer_test_result.xhtml">LayerTestResult&lt;int8_t, 2&gt;</a> ret(outputStateInfo);</div><div class="line"><a name="l02051"></a><span class="lineno"> 2051</span>&#160;</div><div class="line"><a name="l02052"></a><span class="lineno"> 2052</span>&#160;    <span class="comment">// Input tensors</span></div><div class="line"><a name="l02053"></a><span class="lineno"> 2053</span>&#160;    std::vector&lt;int8_t&gt; inputVector;</div><div class="line"><a name="l02054"></a><span class="lineno"> 2054</span>&#160;    inputVector.assign(input.data(), input.data() + (numBatches * inputSize));</div><div class="line"><a name="l02055"></a><span class="lineno"> 2055</span>&#160;    <span class="keyword">auto</span> inputTensor = MakeTensor&lt;int8_t, 2&gt;(inputInfo, inputVector);</div><div class="line"><a name="l02056"></a><span class="lineno"> 2056</span>&#160;</div><div class="line"><a name="l02057"></a><span class="lineno"> 2057</span>&#160;    std::vector&lt;int16_t&gt; cellStateInVector = {0, 0, 0, 0, 0, 0, 0, 0};</div><div class="line"><a name="l02058"></a><span class="lineno"> 2058</span>&#160;    <span class="keyword">auto</span> cellStateInTensor = MakeTensor&lt;int16_t, 2&gt;(cellStateInfo, cellStateInVector);</div><div class="line"><a name="l02059"></a><span class="lineno"> 2059</span>&#160;</div><div class="line"><a name="l02060"></a><span class="lineno"> 2060</span>&#160;    std::vector&lt;int8_t&gt; outputStateInVector = {0, 0, 0, 0, 0, 0};</div><div class="line"><a name="l02061"></a><span class="lineno"> 2061</span>&#160;    <span class="keyword">auto</span> outputStateInTensor = MakeTensor&lt;int8_t, 2&gt;(outputStateInfo, outputStateInVector);</div><div class="line"><a name="l02062"></a><span class="lineno"> 2062</span>&#160;</div><div class="line"><a name="l02063"></a><span class="lineno"> 2063</span>&#160;    <span class="comment">// Output tensors</span></div><div class="line"><a name="l02064"></a><span class="lineno"> 2064</span>&#160;    std::vector&lt;int16_t&gt; cellStateOutVector  = {-14650, 8939, 5771, 6715, -11843, 7847, 1508, 12939};</div><div class="line"><a name="l02065"></a><span class="lineno"> 2065</span>&#160;    <span class="keyword">auto</span> cellStateOutTensor  = MakeTensor&lt;int16_t, 2&gt;(cellStateInfo, cellStateOutVector);</div><div class="line"><a name="l02066"></a><span class="lineno"> 2066</span>&#160;</div><div class="line"><a name="l02067"></a><span class="lineno"> 2067</span>&#160;    std::vector&lt;int8_t&gt; outputVector;</div><div class="line"><a name="l02068"></a><span class="lineno"> 2068</span>&#160;    outputVector.assign(outputExpected.data(), outputExpected.data() + (numBatches * outputSize));</div><div class="line"><a name="l02069"></a><span class="lineno"> 2069</span>&#160;    ret.outputExpected = MakeTensor&lt;int8_t, 2&gt;(outputStateInfo, outputVector);</div><div class="line"><a name="l02070"></a><span class="lineno"> 2070</span>&#160;</div><div class="line"><a name="l02071"></a><span class="lineno"> 2071</span>&#160;    <span class="comment">// Create tensor handles</span></div><div class="line"><a name="l02072"></a><span class="lineno"> 2072</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; inputHandle = tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(inputInfo);</div><div class="line"><a name="l02073"></a><span class="lineno"> 2073</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; cellStateInHandle =</div><div class="line"><a name="l02074"></a><span class="lineno"> 2074</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(cellStateInfo);</div><div class="line"><a name="l02075"></a><span class="lineno"> 2075</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; outputStateInHandle =</div><div class="line"><a name="l02076"></a><span class="lineno"> 2076</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(outputStateInfo);</div><div class="line"><a name="l02077"></a><span class="lineno"> 2077</span>&#160;</div><div class="line"><a name="l02078"></a><span class="lineno"> 2078</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; outputStateOutHandle =</div><div class="line"><a name="l02079"></a><span class="lineno"> 2079</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(outputStateInfo);</div><div class="line"><a name="l02080"></a><span class="lineno"> 2080</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; cellStateOutHandle =</div><div class="line"><a name="l02081"></a><span class="lineno"> 2081</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(cellStateInfo);</div><div class="line"><a name="l02082"></a><span class="lineno"> 2082</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; outputHandle = tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(outputStateInfo);</div><div class="line"><a name="l02083"></a><span class="lineno"> 2083</span>&#160;</div><div class="line"><a name="l02084"></a><span class="lineno"> 2084</span>&#160;    <a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml">armnn::QLstmQueueDescriptor</a> data;</div><div class="line"><a name="l02085"></a><span class="lineno"> 2085</span>&#160;    <a class="code" href="structarmnn_1_1_workload_info.xhtml">armnn::WorkloadInfo</a> info;</div><div class="line"><a name="l02086"></a><span class="lineno"> 2086</span>&#160;</div><div class="line"><a name="l02087"></a><span class="lineno"> 2087</span>&#160;    <span class="comment">// Add inputs and outputs to workload</span></div><div class="line"><a name="l02088"></a><span class="lineno"> 2088</span>&#160;    AddInputToWorkload(data, info, inputInfo, inputHandle.get());</div><div class="line"><a name="l02089"></a><span class="lineno"> 2089</span>&#160;    AddInputToWorkload(data, info, outputStateInfo, outputStateInHandle.get());</div><div class="line"><a name="l02090"></a><span class="lineno"> 2090</span>&#160;    AddInputToWorkload(data, info, cellStateInfo, cellStateInHandle.get());</div><div class="line"><a name="l02091"></a><span class="lineno"> 2091</span>&#160;</div><div class="line"><a name="l02092"></a><span class="lineno"> 2092</span>&#160;    AddOutputToWorkload(data, info, outputStateInfo, outputStateOutHandle.get());</div><div class="line"><a name="l02093"></a><span class="lineno"> 2093</span>&#160;    AddOutputToWorkload(data, info, cellStateInfo, cellStateOutHandle.get());</div><div class="line"><a name="l02094"></a><span class="lineno"> 2094</span>&#160;    AddOutputToWorkload(data, info, outputStateInfo, outputHandle.get());</div><div class="line"><a name="l02095"></a><span class="lineno"> 2095</span>&#160;</div><div class="line"><a name="l02096"></a><span class="lineno"> 2096</span>&#160;    <span class="comment">// Weights and bias tensor and quantization info</span></div><div class="line"><a name="l02097"></a><span class="lineno"> 2097</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> inputWeightsInfo({numUnits, inputSize},</div><div class="line"><a name="l02098"></a><span class="lineno"> 2098</span>&#160;                                       <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a9945327825b115e93a3b89f4302e76db">armnn::DataType::QSymmS8</a>,</div><div class="line"><a name="l02099"></a><span class="lineno"> 2099</span>&#160;                                       weightsScale,</div><div class="line"><a name="l02100"></a><span class="lineno"> 2100</span>&#160;                                       weightsOffset);</div><div class="line"><a name="l02101"></a><span class="lineno"> 2101</span>&#160;</div><div class="line"><a name="l02102"></a><span class="lineno"> 2102</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> recurrentWeightsInfo({numUnits, outputSize},</div><div class="line"><a name="l02103"></a><span class="lineno"> 2103</span>&#160;                                           <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a9945327825b115e93a3b89f4302e76db">armnn::DataType::QSymmS8</a>,</div><div class="line"><a name="l02104"></a><span class="lineno"> 2104</span>&#160;                                           weightsScale,</div><div class="line"><a name="l02105"></a><span class="lineno"> 2105</span>&#160;                                           weightsOffset);</div><div class="line"><a name="l02106"></a><span class="lineno"> 2106</span>&#160;</div><div class="line"><a name="l02107"></a><span class="lineno"> 2107</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> biasInfo({numUnits}, <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6accedffbc6e5308e33d3843e8bdc0dad7">armnn::DataType::Signed32</a>, biasScale, biasOffset);</div><div class="line"><a name="l02108"></a><span class="lineno"> 2108</span>&#160;</div><div class="line"><a name="l02109"></a><span class="lineno"> 2109</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> layerNormWeightsInfo({numUnits}, <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a053c769dcf82d66ef326c86980c02ba7">armnn::DataType::QSymmS16</a>, layerNormScale, layerNormOffset);</div><div class="line"><a name="l02110"></a><span class="lineno"> 2110</span>&#160;</div><div class="line"><a name="l02111"></a><span class="lineno"> 2111</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> projectionWeightsInfo({outputSize, numUnits},</div><div class="line"><a name="l02112"></a><span class="lineno"> 2112</span>&#160;                                            <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a9945327825b115e93a3b89f4302e76db">armnn::DataType::QSymmS8</a>,</div><div class="line"><a name="l02113"></a><span class="lineno"> 2113</span>&#160;                                            projectionWeightsScale,</div><div class="line"><a name="l02114"></a><span class="lineno"> 2114</span>&#160;                                            0);</div><div class="line"><a name="l02115"></a><span class="lineno"> 2115</span>&#160;</div><div class="line"><a name="l02116"></a><span class="lineno"> 2116</span>&#160;    <span class="comment">// Weights and bias tensor data</span></div><div class="line"><a name="l02117"></a><span class="lineno"> 2117</span>&#160;    <span class="keyword">auto</span> inputToInputWeights = MakeTensor&lt;int8_t, 2&gt;(inputWeightsInfo,</div><div class="line"><a name="l02118"></a><span class="lineno"> 2118</span>&#160;            {64, 77, 89, -102, -115, 13, 25, 38, -51, 64, -102, 89, -77, 64, -51, -64, -51, -38, -25, -13});</div><div class="line"><a name="l02119"></a><span class="lineno"> 2119</span>&#160;    <span class="keyword">auto</span> inputToForgetWeights = MakeTensor&lt;int8_t, 2&gt;(inputWeightsInfo,</div><div class="line"><a name="l02120"></a><span class="lineno"> 2120</span>&#160;            {-77, -13, 38, 25, 115, -64, -25, -51, 38, -102, -51, 38, -64, -51, -77, 38, -51, -77, -64, -64});</div><div class="line"><a name="l02121"></a><span class="lineno"> 2121</span>&#160;    <span class="keyword">auto</span> inputToCellWeights   = MakeTensor&lt;int8_t, 2&gt;(inputWeightsInfo,</div><div class="line"><a name="l02122"></a><span class="lineno"> 2122</span>&#160;            {-51, -38, -25, -13, -64, 64, -25, -38, -25, -77, 77, -13, -51, -38, -89, 89, -115, -64, 102, 77});</div><div class="line"><a name="l02123"></a><span class="lineno"> 2123</span>&#160;    <span class="keyword">auto</span> inputToOutputWeights = MakeTensor&lt;int8_t, 2&gt;(inputWeightsInfo,</div><div class="line"><a name="l02124"></a><span class="lineno"> 2124</span>&#160;            {-102, -51, -25, -115, -13, -89, 38, -38, -102, -25, 77, -25, 51, -89, -38, -64, 13, 64, -77, -51});</div><div class="line"><a name="l02125"></a><span class="lineno"> 2125</span>&#160;</div><div class="line"><a name="l02126"></a><span class="lineno"> 2126</span>&#160;    <span class="keyword">auto</span> recurrentToInputWeights = MakeTensor&lt;int8_t, 2&gt;(recurrentWeightsInfo,</div><div class="line"><a name="l02127"></a><span class="lineno"> 2127</span>&#160;            {-25, -38, 51, 13, -64, 115, -25, -38, -89, 6, -25, -77});</div><div class="line"><a name="l02128"></a><span class="lineno"> 2128</span>&#160;    <span class="keyword">auto</span> recurrentToForgetWeights = MakeTensor&lt;int8_t, 2&gt;(recurrentWeightsInfo,</div><div class="line"><a name="l02129"></a><span class="lineno"> 2129</span>&#160;            {-64, -38, -64, -25, 77, 51, 115, 38, -13, 25, 64, 25});</div><div class="line"><a name="l02130"></a><span class="lineno"> 2130</span>&#160;    <span class="keyword">auto</span> recurrentToCellWeights   = MakeTensor&lt;int8_t, 2&gt;(recurrentWeightsInfo,</div><div class="line"><a name="l02131"></a><span class="lineno"> 2131</span>&#160;            {-38, 25, 13, -38, 102, -10, -25, 38, 102, -77, -13, 25});</div><div class="line"><a name="l02132"></a><span class="lineno"> 2132</span>&#160;    <span class="keyword">auto</span> recurrentToOutputWeights = MakeTensor&lt;int8_t, 2&gt;(recurrentWeightsInfo,</div><div class="line"><a name="l02133"></a><span class="lineno"> 2133</span>&#160;            {38, -13, 13, -25, -64, -89, -25, -77, -13, -51, -89, -25});</div><div class="line"><a name="l02134"></a><span class="lineno"> 2134</span>&#160;</div><div class="line"><a name="l02135"></a><span class="lineno"> 2135</span>&#160;    <span class="keyword">auto</span> inputGateBias  = MakeTensor&lt;int32_t, 1&gt;(biasInfo, {644245, 3221226, 4724464, 8160438});</div><div class="line"><a name="l02136"></a><span class="lineno"> 2136</span>&#160;    <span class="keyword">auto</span> forgetGateBias = MakeTensor&lt;int32_t, 1&gt;(biasInfo, {2147484, -6442451, -4294968, 2147484});</div><div class="line"><a name="l02137"></a><span class="lineno"> 2137</span>&#160;    <span class="keyword">auto</span> cellBias       = MakeTensor&lt;int32_t, 1&gt;(biasInfo, {-1073742, 15461883, 5368709, 1717987});</div><div class="line"><a name="l02138"></a><span class="lineno"> 2138</span>&#160;    <span class="keyword">auto</span> outputGateBias = MakeTensor&lt;int32_t, 1&gt;(biasInfo, {1073742, -214748, 4294968, 2147484});</div><div class="line"><a name="l02139"></a><span class="lineno"> 2139</span>&#160;</div><div class="line"><a name="l02140"></a><span class="lineno"> 2140</span>&#160;    <span class="keyword">auto</span> inputLayerNormWeights = MakeTensor&lt;int16_t, 1&gt;(layerNormWeightsInfo, {3277, 6553, 9830, 16384});</div><div class="line"><a name="l02141"></a><span class="lineno"> 2141</span>&#160;    <span class="keyword">auto</span> forgetLayerNormWeights = MakeTensor&lt;int16_t, 1&gt;(layerNormWeightsInfo, {6553, 6553, 13107, 9830});</div><div class="line"><a name="l02142"></a><span class="lineno"> 2142</span>&#160;    <span class="keyword">auto</span> cellLayerNormWeights   = MakeTensor&lt;int16_t, 1&gt;(layerNormWeightsInfo, {22937, 6553, 9830, 26214});</div><div class="line"><a name="l02143"></a><span class="lineno"> 2143</span>&#160;    <span class="keyword">auto</span> outputLayerNormWeights = MakeTensor&lt;int16_t, 1&gt;(layerNormWeightsInfo, {19660, 6553, 6553, 16384});</div><div class="line"><a name="l02144"></a><span class="lineno"> 2144</span>&#160;</div><div class="line"><a name="l02145"></a><span class="lineno"> 2145</span>&#160;    <span class="keyword">auto</span> projectionWeights = MakeTensor&lt;int8_t, 2&gt;(projectionWeightsInfo,</div><div class="line"><a name="l02146"></a><span class="lineno"> 2146</span>&#160;            {-25, 51, 3, -51, 25, 127, 77, 20, 18, 51, -102, 51});</div><div class="line"><a name="l02147"></a><span class="lineno"> 2147</span>&#160;</div><div class="line"><a name="l02148"></a><span class="lineno"> 2148</span>&#160;    <span class="comment">// ScopedCpuTensorHandles</span></div><div class="line"><a name="l02149"></a><span class="lineno"> 2149</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputToInputWeightsTensor(inputWeightsInfo);</div><div class="line"><a name="l02150"></a><span class="lineno"> 2150</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputToForgetWeightsTensor(inputWeightsInfo);</div><div class="line"><a name="l02151"></a><span class="lineno"> 2151</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputToCellWeightsTensor(inputWeightsInfo);</div><div class="line"><a name="l02152"></a><span class="lineno"> 2152</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputToOutputWeightsTensor(inputWeightsInfo);</div><div class="line"><a name="l02153"></a><span class="lineno"> 2153</span>&#160;</div><div class="line"><a name="l02154"></a><span class="lineno"> 2154</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> recurrentToInputWeightsTensor(recurrentWeightsInfo);</div><div class="line"><a name="l02155"></a><span class="lineno"> 2155</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> recurrentToForgetWeightsTensor(recurrentWeightsInfo);</div><div class="line"><a name="l02156"></a><span class="lineno"> 2156</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> recurrentToCellWeightsTensor(recurrentWeightsInfo);</div><div class="line"><a name="l02157"></a><span class="lineno"> 2157</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> recurrentToOutputWeightsTensor(recurrentWeightsInfo);</div><div class="line"><a name="l02158"></a><span class="lineno"> 2158</span>&#160;</div><div class="line"><a name="l02159"></a><span class="lineno"> 2159</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputGateBiasTensor(biasInfo);</div><div class="line"><a name="l02160"></a><span class="lineno"> 2160</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> forgetGateBiasTensor(biasInfo);</div><div class="line"><a name="l02161"></a><span class="lineno"> 2161</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> cellBiasTensor(biasInfo);</div><div class="line"><a name="l02162"></a><span class="lineno"> 2162</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> outputGateBiasTensor(biasInfo);</div><div class="line"><a name="l02163"></a><span class="lineno"> 2163</span>&#160;</div><div class="line"><a name="l02164"></a><span class="lineno"> 2164</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputLayerNormWeightsTensor(layerNormWeightsInfo);</div><div class="line"><a name="l02165"></a><span class="lineno"> 2165</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> forgetLayerNormWeightsTensor(layerNormWeightsInfo);</div><div class="line"><a name="l02166"></a><span class="lineno"> 2166</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> cellLayerNormWeightsTensor(layerNormWeightsInfo);</div><div class="line"><a name="l02167"></a><span class="lineno"> 2167</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> outputLayerNormWeightsTensor(layerNormWeightsInfo);</div><div class="line"><a name="l02168"></a><span class="lineno"> 2168</span>&#160;</div><div class="line"><a name="l02169"></a><span class="lineno"> 2169</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> projectionWeightsTensor(projectionWeightsInfo);</div><div class="line"><a name="l02170"></a><span class="lineno"> 2170</span>&#160;</div><div class="line"><a name="l02171"></a><span class="lineno"> 2171</span>&#160;    <span class="comment">// Allocate and copy data</span></div><div class="line"><a name="l02172"></a><span class="lineno"> 2172</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputToInputWeightsTensor, &amp;inputToInputWeights[0][0]);</div><div class="line"><a name="l02173"></a><span class="lineno"> 2173</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputToForgetWeightsTensor, &amp;inputToForgetWeights[0][0]);</div><div class="line"><a name="l02174"></a><span class="lineno"> 2174</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputToCellWeightsTensor, &amp;inputToCellWeights[0][0]);</div><div class="line"><a name="l02175"></a><span class="lineno"> 2175</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputToOutputWeightsTensor, &amp;inputToOutputWeights[0][0]);</div><div class="line"><a name="l02176"></a><span class="lineno"> 2176</span>&#160;</div><div class="line"><a name="l02177"></a><span class="lineno"> 2177</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;recurrentToInputWeightsTensor, &amp;recurrentToInputWeights[0][0]);</div><div class="line"><a name="l02178"></a><span class="lineno"> 2178</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;recurrentToForgetWeightsTensor, &amp;recurrentToForgetWeights[0][0]);</div><div class="line"><a name="l02179"></a><span class="lineno"> 2179</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;recurrentToCellWeightsTensor, &amp;recurrentToCellWeights[0][0]);</div><div class="line"><a name="l02180"></a><span class="lineno"> 2180</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;recurrentToOutputWeightsTensor, &amp;recurrentToOutputWeights[0][0]);</div><div class="line"><a name="l02181"></a><span class="lineno"> 2181</span>&#160;</div><div class="line"><a name="l02182"></a><span class="lineno"> 2182</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputGateBiasTensor, &amp;inputGateBias[0]);</div><div class="line"><a name="l02183"></a><span class="lineno"> 2183</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;forgetGateBiasTensor, &amp;forgetGateBias[0]);</div><div class="line"><a name="l02184"></a><span class="lineno"> 2184</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;cellBiasTensor, &amp;cellBias[0]);</div><div class="line"><a name="l02185"></a><span class="lineno"> 2185</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;outputGateBiasTensor, &amp;outputGateBias[0]);</div><div class="line"><a name="l02186"></a><span class="lineno"> 2186</span>&#160;</div><div class="line"><a name="l02187"></a><span class="lineno"> 2187</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputLayerNormWeightsTensor, &amp;inputLayerNormWeights[0]);</div><div class="line"><a name="l02188"></a><span class="lineno"> 2188</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;forgetLayerNormWeightsTensor, &amp;forgetLayerNormWeights[0]);</div><div class="line"><a name="l02189"></a><span class="lineno"> 2189</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;cellLayerNormWeightsTensor, &amp;cellLayerNormWeights[0]);</div><div class="line"><a name="l02190"></a><span class="lineno"> 2190</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;outputLayerNormWeightsTensor, &amp;outputLayerNormWeights[0]);</div><div class="line"><a name="l02191"></a><span class="lineno"> 2191</span>&#160;</div><div class="line"><a name="l02192"></a><span class="lineno"> 2192</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;projectionWeightsTensor, &amp;projectionWeights[0][0]);</div><div class="line"><a name="l02193"></a><span class="lineno"> 2193</span>&#160;</div><div class="line"><a name="l02194"></a><span class="lineno"> 2194</span>&#160;    <span class="comment">// Setup queue descriptor</span></div><div class="line"><a name="l02195"></a><span class="lineno"> 2195</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a08a1932be591c315a512a877d38b22df">m_InputToInputWeights</a> = &amp;inputToInputWeightsTensor;</div><div class="line"><a name="l02196"></a><span class="lineno"> 2196</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a3ea82566d98c5a657c76c3d851c47848">m_InputToForgetWeights</a> = &amp;inputToForgetWeightsTensor;</div><div class="line"><a name="l02197"></a><span class="lineno"> 2197</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a28ad98d17603fd8b12e046f8ece58970">m_InputToCellWeights</a> = &amp;inputToCellWeightsTensor;</div><div class="line"><a name="l02198"></a><span class="lineno"> 2198</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a83dc9086b2e4a4e4cadb66bd874df798">m_InputToOutputWeights</a> = &amp;inputToOutputWeightsTensor;</div><div class="line"><a name="l02199"></a><span class="lineno"> 2199</span>&#160;</div><div class="line"><a name="l02200"></a><span class="lineno"> 2200</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a98d377149071d8842d610cc0734d1cfe">m_RecurrentToInputWeights</a> = &amp;recurrentToInputWeightsTensor;</div><div class="line"><a name="l02201"></a><span class="lineno"> 2201</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a45d73e66cbb2b65049e4016c20657ccf">m_RecurrentToForgetWeights</a> = &amp;recurrentToForgetWeightsTensor;</div><div class="line"><a name="l02202"></a><span class="lineno"> 2202</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#aea142bd50ffb93631c2e08324ec92a1e">m_RecurrentToCellWeights</a> = &amp;recurrentToCellWeightsTensor;</div><div class="line"><a name="l02203"></a><span class="lineno"> 2203</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#adebc1771e5a1f4b113a7aa594ea74d2c">m_RecurrentToOutputWeights</a> = &amp;recurrentToOutputWeightsTensor;</div><div class="line"><a name="l02204"></a><span class="lineno"> 2204</span>&#160;</div><div class="line"><a name="l02205"></a><span class="lineno"> 2205</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#acb3aade8fae984f7293e222dcbe66030">m_InputGateBias</a> = &amp;inputGateBiasTensor;</div><div class="line"><a name="l02206"></a><span class="lineno"> 2206</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#aba3ffe91d818266b8785ce971548eb59">m_ForgetGateBias</a> = &amp;forgetGateBiasTensor;</div><div class="line"><a name="l02207"></a><span class="lineno"> 2207</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a75980b5795efd899a0c678a06a900c6d">m_CellBias</a> = &amp;cellBiasTensor;</div><div class="line"><a name="l02208"></a><span class="lineno"> 2208</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a332551528a4b3534c2d6c89ce816fcd9">m_OutputGateBias</a> = &amp;outputGateBiasTensor;</div><div class="line"><a name="l02209"></a><span class="lineno"> 2209</span>&#160;</div><div class="line"><a name="l02210"></a><span class="lineno"> 2210</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a9cc28aa4fff6ba9a8abdb340c1abdd57">m_InputLayerNormWeights</a> = &amp;inputLayerNormWeightsTensor;</div><div class="line"><a name="l02211"></a><span class="lineno"> 2211</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a453a4af385d0c060c9aac990fceaa1ef">m_ForgetLayerNormWeights</a> = &amp;forgetLayerNormWeightsTensor;</div><div class="line"><a name="l02212"></a><span class="lineno"> 2212</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a518f0195d0278a892b49649b8860d17f">m_CellLayerNormWeights</a> = &amp;cellLayerNormWeightsTensor;</div><div class="line"><a name="l02213"></a><span class="lineno"> 2213</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#aa3f07e27230d6d99adc2c82ba681df2b">m_OutputLayerNormWeights</a> = &amp;outputLayerNormWeightsTensor;</div><div class="line"><a name="l02214"></a><span class="lineno"> 2214</span>&#160;</div><div class="line"><a name="l02215"></a><span class="lineno"> 2215</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#af3c52626a6f05597d82ed095d0765962">m_ProjectionWeights</a> = &amp;projectionWeightsTensor;</div><div class="line"><a name="l02216"></a><span class="lineno"> 2216</span>&#160;</div><div class="line"><a name="l02217"></a><span class="lineno"> 2217</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#ad474e5c51a0b194ef32e812b86c0cbdb">m_CifgEnabled</a> = cifgEnabled;</div><div class="line"><a name="l02218"></a><span class="lineno"> 2218</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#a2837b4396f20c956952d1a7286cab5f8">m_PeepholeEnabled</a> = peepholeEnabled;</div><div class="line"><a name="l02219"></a><span class="lineno"> 2219</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#a6c9de81fc65b3c4924cab11907075a17">m_ProjectionEnabled</a> = projectionEnabled;</div><div class="line"><a name="l02220"></a><span class="lineno"> 2220</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#a4a8ec49f130084445d44297549254780">m_LayerNormEnabled</a> = layerNormEnabled;</div><div class="line"><a name="l02221"></a><span class="lineno"> 2221</span>&#160;</div><div class="line"><a name="l02222"></a><span class="lineno"> 2222</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#a09e1f097944f61cc901240f9300364cf">m_InputIntermediateScale</a> = inputIntermediateScale;</div><div class="line"><a name="l02223"></a><span class="lineno"> 2223</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#afec7f36158448f723b426a9527acb189">m_ForgetIntermediateScale</a> = forgetIntermediateScale;</div><div class="line"><a name="l02224"></a><span class="lineno"> 2224</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#a0477ee1b44ace6090119178eea78cb0b">m_CellIntermediateScale</a> = cellIntermediateScale;</div><div class="line"><a name="l02225"></a><span class="lineno"> 2225</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#aa43409f9b457352c95c89f20ce5d844d">m_OutputIntermediateScale</a> = outputIntermediateScale;</div><div class="line"><a name="l02226"></a><span class="lineno"> 2226</span>&#160;</div><div class="line"><a name="l02227"></a><span class="lineno"> 2227</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#a4556cbd764d4848d8ad0637a9eed580d">m_HiddenStateZeroPoint</a> = hiddenStateZeroPoint;</div><div class="line"><a name="l02228"></a><span class="lineno"> 2228</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#af8f724af7210b52529216feefa993c98">m_HiddenStateScale</a> = hiddenStateScale;</div><div class="line"><a name="l02229"></a><span class="lineno"> 2229</span>&#160;</div><div class="line"><a name="l02230"></a><span class="lineno"> 2230</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#ac81fb0e66dc623dc37c77f219f53a6d3">m_CellClip</a> = cellClip;</div><div class="line"><a name="l02231"></a><span class="lineno"> 2231</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#aa6a518b65088f34803b3214334bdff61">m_ProjectionClip</a> = projectionClip;</div><div class="line"><a name="l02232"></a><span class="lineno"> 2232</span>&#160;</div><div class="line"><a name="l02233"></a><span class="lineno"> 2233</span>&#160;    <span class="comment">// Create workload and allocate tensor handles</span></div><div class="line"><a name="l02234"></a><span class="lineno"> 2234</span>&#160;    std::unique_ptr&lt;armnn::IWorkload&gt; workload = workloadFactory.<a class="code" href="classarmnn_1_1_i_workload_factory.xhtml#ab17bf35d906f8daad42be0a0799c72ab">CreateQLstm</a>(data, info);</div><div class="line"><a name="l02235"></a><span class="lineno"> 2235</span>&#160;    inputHandle-&gt;Allocate();</div><div class="line"><a name="l02236"></a><span class="lineno"> 2236</span>&#160;    outputStateInHandle-&gt;Allocate();</div><div class="line"><a name="l02237"></a><span class="lineno"> 2237</span>&#160;    cellStateInHandle-&gt;Allocate();</div><div class="line"><a name="l02238"></a><span class="lineno"> 2238</span>&#160;</div><div class="line"><a name="l02239"></a><span class="lineno"> 2239</span>&#160;    outputStateOutHandle-&gt;Allocate();</div><div class="line"><a name="l02240"></a><span class="lineno"> 2240</span>&#160;    cellStateOutHandle-&gt;Allocate();</div><div class="line"><a name="l02241"></a><span class="lineno"> 2241</span>&#160;    outputHandle-&gt;Allocate();</div><div class="line"><a name="l02242"></a><span class="lineno"> 2242</span>&#160;</div><div class="line"><a name="l02243"></a><span class="lineno"> 2243</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#ae15f1a3c55d2db87683577de9fa4437c">CopyDataToITensorHandle</a>(inputHandle.get(), &amp;inputTensor[0][0]);</div><div class="line"><a name="l02244"></a><span class="lineno"> 2244</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#ae15f1a3c55d2db87683577de9fa4437c">CopyDataToITensorHandle</a>(outputStateInHandle.get(), &amp;outputStateInTensor[0][0]);</div><div class="line"><a name="l02245"></a><span class="lineno"> 2245</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#ae15f1a3c55d2db87683577de9fa4437c">CopyDataToITensorHandle</a>(cellStateInHandle.get(), &amp;cellStateInTensor[0][0]);</div><div class="line"><a name="l02246"></a><span class="lineno"> 2246</span>&#160;</div><div class="line"><a name="l02247"></a><span class="lineno"> 2247</span>&#160;    workload-&gt;Execute();</div><div class="line"><a name="l02248"></a><span class="lineno"> 2248</span>&#160;</div><div class="line"><a name="l02249"></a><span class="lineno"> 2249</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#a99b626c58a926dc7d6df78d22ec186c8">CopyDataFromITensorHandle</a>(&amp;ret.output[0][0], outputHandle.get());</div><div class="line"><a name="l02250"></a><span class="lineno"> 2250</span>&#160;</div><div class="line"><a name="l02251"></a><span class="lineno"> 2251</span>&#160;    <span class="keywordflow">return</span> ret;</div><div class="line"><a name="l02252"></a><span class="lineno"> 2252</span>&#160;}</div><div class="line"><a name="l02253"></a><span class="lineno"> 2253</span>&#160;</div><div class="line"><a name="l02254"></a><span class="lineno"> 2254</span>&#160;<span class="comment">// QLSTM: Projection, CIFG, LayerNorm</span></div><div class="line"><a name="l02255"></a><span class="lineno"> 2255</span>&#160;<a class="code" href="struct_layer_test_result.xhtml">LayerTestResult&lt;int8_t, 2&gt;</a> QLstmTestImpl2(</div><div class="line"><a name="l02256"></a><span class="lineno"> 2256</span>&#160;        <a class="code" href="classarmnn_1_1_i_workload_factory.xhtml">armnn::IWorkloadFactory</a>&amp; workloadFactory,</div><div class="line"><a name="l02257"></a><span class="lineno"> 2257</span>&#160;        <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_backend_internal.xhtml#a693b40e6b94e958836aeb0410ca186bd">armnn::IBackendInternal::IMemoryManagerSharedPtr</a>&amp; memoryManager,</div><div class="line"><a name="l02258"></a><span class="lineno"> 2258</span>&#160;        <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml">armnn::ITensorHandleFactory</a>&amp; tensorHandleFactory,</div><div class="line"><a name="l02259"></a><span class="lineno"> 2259</span>&#160;        <span class="keyword">const</span> boost::multi_array&lt;int8_t, 2&gt;&amp; input,</div><div class="line"><a name="l02260"></a><span class="lineno"> 2260</span>&#160;        <span class="keyword">const</span> boost::multi_array&lt;int8_t, 2&gt;&amp; outputExpected)</div><div class="line"><a name="l02261"></a><span class="lineno"> 2261</span>&#160;{</div><div class="line"><a name="l02262"></a><span class="lineno"> 2262</span>&#160;    <a class="code" href="namespacearmnn.xhtml#a44affeeb090c3c6a3062830562672e84">IgnoreUnused</a>(memoryManager);</div><div class="line"><a name="l02263"></a><span class="lineno"> 2263</span>&#160;    <span class="keywordtype">unsigned</span> <span class="keywordtype">int</span> numBatches = 2;</div><div class="line"><a name="l02264"></a><span class="lineno"> 2264</span>&#160;    <span class="keywordtype">unsigned</span> <span class="keywordtype">int</span> inputSize  = 5;</div><div class="line"><a name="l02265"></a><span class="lineno"> 2265</span>&#160;    <span class="keywordtype">unsigned</span> <span class="keywordtype">int</span> outputSize = 3;</div><div class="line"><a name="l02266"></a><span class="lineno"> 2266</span>&#160;    <span class="keywordtype">unsigned</span> <span class="keywordtype">int</span> numUnits   = 4;</div><div class="line"><a name="l02267"></a><span class="lineno"> 2267</span>&#160;</div><div class="line"><a name="l02268"></a><span class="lineno"> 2268</span>&#160;    <span class="keywordtype">bool</span> cifgEnabled       = <span class="keyword">true</span>;</div><div class="line"><a name="l02269"></a><span class="lineno"> 2269</span>&#160;    <span class="keywordtype">bool</span> peepholeEnabled   = <span class="keyword">false</span>;</div><div class="line"><a name="l02270"></a><span class="lineno"> 2270</span>&#160;    <span class="keywordtype">bool</span> projectionEnabled = <span class="keyword">true</span>;</div><div class="line"><a name="l02271"></a><span class="lineno"> 2271</span>&#160;    <span class="keywordtype">bool</span> layerNormEnabled  = <span class="keyword">true</span>;</div><div class="line"><a name="l02272"></a><span class="lineno"> 2272</span>&#160;</div><div class="line"><a name="l02273"></a><span class="lineno"> 2273</span>&#160;    <span class="comment">// Scale/Offset quantization info</span></div><div class="line"><a name="l02274"></a><span class="lineno"> 2274</span>&#160;    <span class="keywordtype">float</span> inputScale    = 0.0078125f;</div><div class="line"><a name="l02275"></a><span class="lineno"> 2275</span>&#160;    int32_t inputOffset = 0;</div><div class="line"><a name="l02276"></a><span class="lineno"> 2276</span>&#160;</div><div class="line"><a name="l02277"></a><span class="lineno"> 2277</span>&#160;    int32_t hiddenStateZeroPoint = 0;</div><div class="line"><a name="l02278"></a><span class="lineno"> 2278</span>&#160;    <span class="keywordtype">float</span> hiddenStateScale       = 0.007f;</div><div class="line"><a name="l02279"></a><span class="lineno"> 2279</span>&#160;</div><div class="line"><a name="l02280"></a><span class="lineno"> 2280</span>&#160;    <span class="comment">// if (!projectionEnabled) outputScale == hiddenStateScale</span></div><div class="line"><a name="l02281"></a><span class="lineno"> 2281</span>&#160;    <span class="keywordtype">float</span> outputScale    = 3.05176e-05f;</div><div class="line"><a name="l02282"></a><span class="lineno"> 2282</span>&#160;    int32_t outputOffset = 0;</div><div class="line"><a name="l02283"></a><span class="lineno"> 2283</span>&#160;</div><div class="line"><a name="l02284"></a><span class="lineno"> 2284</span>&#160;    <span class="keywordtype">float</span> cellStateScale    = 3.05176e-05f;</div><div class="line"><a name="l02285"></a><span class="lineno"> 2285</span>&#160;    int32_t cellStateOffset = 0;</div><div class="line"><a name="l02286"></a><span class="lineno"> 2286</span>&#160;</div><div class="line"><a name="l02287"></a><span class="lineno"> 2287</span>&#160;    <span class="keywordtype">float</span> weightsScale    = 0.00784314f;</div><div class="line"><a name="l02288"></a><span class="lineno"> 2288</span>&#160;    int32_t weightsOffset = 0;</div><div class="line"><a name="l02289"></a><span class="lineno"> 2289</span>&#160;</div><div class="line"><a name="l02290"></a><span class="lineno"> 2290</span>&#160;    <span class="keywordtype">float</span> layerNormScale    = 3.05182e-05f;</div><div class="line"><a name="l02291"></a><span class="lineno"> 2291</span>&#160;    int32_t layerNormOffset = 0;</div><div class="line"><a name="l02292"></a><span class="lineno"> 2292</span>&#160;</div><div class="line"><a name="l02293"></a><span class="lineno"> 2293</span>&#160;    <span class="keywordtype">float</span> biasScale    = layerNormScale / 1024;</div><div class="line"><a name="l02294"></a><span class="lineno"> 2294</span>&#160;    int32_t biasOffset = 0;</div><div class="line"><a name="l02295"></a><span class="lineno"> 2295</span>&#160;</div><div class="line"><a name="l02296"></a><span class="lineno"> 2296</span>&#160;    <span class="keywordtype">float</span> projectionWeightsScale = 0.00392157f;</div><div class="line"><a name="l02297"></a><span class="lineno"> 2297</span>&#160;</div><div class="line"><a name="l02298"></a><span class="lineno"> 2298</span>&#160;    <span class="keywordtype">float</span> inputIntermediateScale  = 0.007059f;</div><div class="line"><a name="l02299"></a><span class="lineno"> 2299</span>&#160;    <span class="keywordtype">float</span> forgetIntermediateScale = 0.007812f;</div><div class="line"><a name="l02300"></a><span class="lineno"> 2300</span>&#160;    <span class="keywordtype">float</span> cellIntermediateScale   = inputIntermediateScale;</div><div class="line"><a name="l02301"></a><span class="lineno"> 2301</span>&#160;    <span class="keywordtype">float</span> outputIntermediateScale = forgetIntermediateScale;</div><div class="line"><a name="l02302"></a><span class="lineno"> 2302</span>&#160;</div><div class="line"><a name="l02303"></a><span class="lineno"> 2303</span>&#160;    <span class="keywordtype">float</span> cellClip       = 0.0f;</div><div class="line"><a name="l02304"></a><span class="lineno"> 2304</span>&#160;    <span class="keywordtype">float</span> projectionClip = 0.0f;</div><div class="line"><a name="l02305"></a><span class="lineno"> 2305</span>&#160;</div><div class="line"><a name="l02306"></a><span class="lineno"> 2306</span>&#160;    <span class="comment">// Input/Output tensor info</span></div><div class="line"><a name="l02307"></a><span class="lineno"> 2307</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> inputInfo({numBatches , inputSize},</div><div class="line"><a name="l02308"></a><span class="lineno"> 2308</span>&#160;                                <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a9d02ea768c081d4bdb2b7cab0b3f510d">armnn::DataType::QAsymmS8</a>,</div><div class="line"><a name="l02309"></a><span class="lineno"> 2309</span>&#160;                                inputScale,</div><div class="line"><a name="l02310"></a><span class="lineno"> 2310</span>&#160;                                inputOffset);</div><div class="line"><a name="l02311"></a><span class="lineno"> 2311</span>&#160;</div><div class="line"><a name="l02312"></a><span class="lineno"> 2312</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> cellStateInfo({numBatches , numUnits},</div><div class="line"><a name="l02313"></a><span class="lineno"> 2313</span>&#160;                                    <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a053c769dcf82d66ef326c86980c02ba7">armnn::DataType::QSymmS16</a>,</div><div class="line"><a name="l02314"></a><span class="lineno"> 2314</span>&#160;                                    cellStateScale,</div><div class="line"><a name="l02315"></a><span class="lineno"> 2315</span>&#160;                                    cellStateOffset);</div><div class="line"><a name="l02316"></a><span class="lineno"> 2316</span>&#160;</div><div class="line"><a name="l02317"></a><span class="lineno"> 2317</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> outputStateInfo({numBatches , outputSize},</div><div class="line"><a name="l02318"></a><span class="lineno"> 2318</span>&#160;                                      <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a9d02ea768c081d4bdb2b7cab0b3f510d">armnn::DataType::QAsymmS8</a>,</div><div class="line"><a name="l02319"></a><span class="lineno"> 2319</span>&#160;                                      outputScale,</div><div class="line"><a name="l02320"></a><span class="lineno"> 2320</span>&#160;                                      outputOffset);</div><div class="line"><a name="l02321"></a><span class="lineno"> 2321</span>&#160;</div><div class="line"><a name="l02322"></a><span class="lineno"> 2322</span>&#160;    <a class="code" href="struct_layer_test_result.xhtml">LayerTestResult&lt;int8_t, 2&gt;</a> ret(outputStateInfo);</div><div class="line"><a name="l02323"></a><span class="lineno"> 2323</span>&#160;</div><div class="line"><a name="l02324"></a><span class="lineno"> 2324</span>&#160;    <span class="comment">// Input tensors</span></div><div class="line"><a name="l02325"></a><span class="lineno"> 2325</span>&#160;    std::vector&lt;int8_t&gt; inputVector;</div><div class="line"><a name="l02326"></a><span class="lineno"> 2326</span>&#160;    inputVector.assign(input.data(), input.data() + (numBatches * inputSize));</div><div class="line"><a name="l02327"></a><span class="lineno"> 2327</span>&#160;    <span class="keyword">auto</span> inputTensor = MakeTensor&lt;int8_t, 2&gt;(inputInfo, inputVector);</div><div class="line"><a name="l02328"></a><span class="lineno"> 2328</span>&#160;</div><div class="line"><a name="l02329"></a><span class="lineno"> 2329</span>&#160;    std::vector&lt;int16_t&gt; cellStateInVector = {0, 0, 0, 0, 0, 0, 0, 0};</div><div class="line"><a name="l02330"></a><span class="lineno"> 2330</span>&#160;    <span class="keyword">auto</span> cellStateInTensor = MakeTensor&lt;int16_t, 2&gt;(cellStateInfo, cellStateInVector);</div><div class="line"><a name="l02331"></a><span class="lineno"> 2331</span>&#160;</div><div class="line"><a name="l02332"></a><span class="lineno"> 2332</span>&#160;    std::vector&lt;int8_t&gt; outputStateInVector = {0, 0, 0, 0, 0, 0};</div><div class="line"><a name="l02333"></a><span class="lineno"> 2333</span>&#160;    <span class="keyword">auto</span> outputStateInTensor = MakeTensor&lt;int8_t, 2&gt;(outputStateInfo, outputStateInVector);</div><div class="line"><a name="l02334"></a><span class="lineno"> 2334</span>&#160;</div><div class="line"><a name="l02335"></a><span class="lineno"> 2335</span>&#160;    <span class="comment">// Output tensors</span></div><div class="line"><a name="l02336"></a><span class="lineno"> 2336</span>&#160;    std::vector&lt;int16_t&gt; cellStateOutVector  = {-14650, 8939, 5771, 6715, -11843, 7847, 1508, 12939};</div><div class="line"><a name="l02337"></a><span class="lineno"> 2337</span>&#160;    <span class="keyword">auto</span> cellStateOutTensor  = MakeTensor&lt;int16_t, 2&gt;(cellStateInfo, cellStateOutVector);</div><div class="line"><a name="l02338"></a><span class="lineno"> 2338</span>&#160;</div><div class="line"><a name="l02339"></a><span class="lineno"> 2339</span>&#160;    std::vector&lt;int8_t&gt; outputVector;</div><div class="line"><a name="l02340"></a><span class="lineno"> 2340</span>&#160;    outputVector.assign(outputExpected.data(), outputExpected.data() + (numBatches * outputSize));</div><div class="line"><a name="l02341"></a><span class="lineno"> 2341</span>&#160;    ret.outputExpected = MakeTensor&lt;int8_t, 2&gt;(outputStateInfo, outputVector);</div><div class="line"><a name="l02342"></a><span class="lineno"> 2342</span>&#160;</div><div class="line"><a name="l02343"></a><span class="lineno"> 2343</span>&#160;    <span class="comment">// Create tensor handles</span></div><div class="line"><a name="l02344"></a><span class="lineno"> 2344</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; inputHandle = tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(inputInfo);</div><div class="line"><a name="l02345"></a><span class="lineno"> 2345</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; cellStateInHandle =</div><div class="line"><a name="l02346"></a><span class="lineno"> 2346</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(cellStateInfo);</div><div class="line"><a name="l02347"></a><span class="lineno"> 2347</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; outputStateInHandle =</div><div class="line"><a name="l02348"></a><span class="lineno"> 2348</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(outputStateInfo);</div><div class="line"><a name="l02349"></a><span class="lineno"> 2349</span>&#160;</div><div class="line"><a name="l02350"></a><span class="lineno"> 2350</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; outputStateOutHandle =</div><div class="line"><a name="l02351"></a><span class="lineno"> 2351</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(outputStateInfo);</div><div class="line"><a name="l02352"></a><span class="lineno"> 2352</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; cellStateOutHandle =</div><div class="line"><a name="l02353"></a><span class="lineno"> 2353</span>&#160;            tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(cellStateInfo);</div><div class="line"><a name="l02354"></a><span class="lineno"> 2354</span>&#160;    std::unique_ptr&lt;armnn::ITensorHandle&gt; outputHandle = tensorHandleFactory.<a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">CreateTensorHandle</a>(outputStateInfo);</div><div class="line"><a name="l02355"></a><span class="lineno"> 2355</span>&#160;</div><div class="line"><a name="l02356"></a><span class="lineno"> 2356</span>&#160;    <a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml">armnn::QLstmQueueDescriptor</a> data;</div><div class="line"><a name="l02357"></a><span class="lineno"> 2357</span>&#160;    <a class="code" href="structarmnn_1_1_workload_info.xhtml">armnn::WorkloadInfo</a> info;</div><div class="line"><a name="l02358"></a><span class="lineno"> 2358</span>&#160;</div><div class="line"><a name="l02359"></a><span class="lineno"> 2359</span>&#160;    <span class="comment">// Add inputs and outputs to workload</span></div><div class="line"><a name="l02360"></a><span class="lineno"> 2360</span>&#160;    AddInputToWorkload(data, info, inputInfo, inputHandle.get());</div><div class="line"><a name="l02361"></a><span class="lineno"> 2361</span>&#160;    AddInputToWorkload(data, info, outputStateInfo, outputStateInHandle.get());</div><div class="line"><a name="l02362"></a><span class="lineno"> 2362</span>&#160;    AddInputToWorkload(data, info, cellStateInfo, cellStateInHandle.get());</div><div class="line"><a name="l02363"></a><span class="lineno"> 2363</span>&#160;</div><div class="line"><a name="l02364"></a><span class="lineno"> 2364</span>&#160;    AddOutputToWorkload(data, info, outputStateInfo, outputStateOutHandle.get());</div><div class="line"><a name="l02365"></a><span class="lineno"> 2365</span>&#160;    AddOutputToWorkload(data, info, cellStateInfo, cellStateOutHandle.get());</div><div class="line"><a name="l02366"></a><span class="lineno"> 2366</span>&#160;    AddOutputToWorkload(data, info, outputStateInfo, outputHandle.get());</div><div class="line"><a name="l02367"></a><span class="lineno"> 2367</span>&#160;</div><div class="line"><a name="l02368"></a><span class="lineno"> 2368</span>&#160;    <span class="comment">// Weights and bias tensor and quantization info</span></div><div class="line"><a name="l02369"></a><span class="lineno"> 2369</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> inputWeightsInfo({numUnits, inputSize},</div><div class="line"><a name="l02370"></a><span class="lineno"> 2370</span>&#160;                                       <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a9945327825b115e93a3b89f4302e76db">armnn::DataType::QSymmS8</a>,</div><div class="line"><a name="l02371"></a><span class="lineno"> 2371</span>&#160;                                       weightsScale,</div><div class="line"><a name="l02372"></a><span class="lineno"> 2372</span>&#160;                                       weightsOffset);</div><div class="line"><a name="l02373"></a><span class="lineno"> 2373</span>&#160;</div><div class="line"><a name="l02374"></a><span class="lineno"> 2374</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> recurrentWeightsInfo({numUnits, outputSize},</div><div class="line"><a name="l02375"></a><span class="lineno"> 2375</span>&#160;                                           <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a9945327825b115e93a3b89f4302e76db">armnn::DataType::QSymmS8</a>,</div><div class="line"><a name="l02376"></a><span class="lineno"> 2376</span>&#160;                                           weightsScale,</div><div class="line"><a name="l02377"></a><span class="lineno"> 2377</span>&#160;                                           weightsOffset);</div><div class="line"><a name="l02378"></a><span class="lineno"> 2378</span>&#160;</div><div class="line"><a name="l02379"></a><span class="lineno"> 2379</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> biasInfo({numUnits}, <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6accedffbc6e5308e33d3843e8bdc0dad7">armnn::DataType::Signed32</a>, biasScale, biasOffset);</div><div class="line"><a name="l02380"></a><span class="lineno"> 2380</span>&#160;</div><div class="line"><a name="l02381"></a><span class="lineno"> 2381</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> layerNormWeightsInfo({numUnits}, <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a053c769dcf82d66ef326c86980c02ba7">armnn::DataType::QSymmS16</a>, layerNormScale, layerNormOffset);</div><div class="line"><a name="l02382"></a><span class="lineno"> 2382</span>&#160;</div><div class="line"><a name="l02383"></a><span class="lineno"> 2383</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> projectionWeightsInfo({outputSize, numUnits},</div><div class="line"><a name="l02384"></a><span class="lineno"> 2384</span>&#160;                                            <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a9945327825b115e93a3b89f4302e76db">armnn::DataType::QSymmS8</a>,</div><div class="line"><a name="l02385"></a><span class="lineno"> 2385</span>&#160;                                            projectionWeightsScale,</div><div class="line"><a name="l02386"></a><span class="lineno"> 2386</span>&#160;                                            0);</div><div class="line"><a name="l02387"></a><span class="lineno"> 2387</span>&#160;</div><div class="line"><a name="l02388"></a><span class="lineno"> 2388</span>&#160;    <span class="comment">// Weights and bias tensor data</span></div><div class="line"><a name="l02389"></a><span class="lineno"> 2389</span>&#160;    <span class="keyword">auto</span> inputToForgetWeights = MakeTensor&lt;int8_t, 2&gt;(inputWeightsInfo,</div><div class="line"><a name="l02390"></a><span class="lineno"> 2390</span>&#160;            {-77, -13, 38, 25, 115, -64, -25, -51, 38, -102, -51, 38, -64, -51, -77, 38, -51, -77, -64, -64});</div><div class="line"><a name="l02391"></a><span class="lineno"> 2391</span>&#160;    <span class="keyword">auto</span> inputToCellWeights   = MakeTensor&lt;int8_t, 2&gt;(inputWeightsInfo,</div><div class="line"><a name="l02392"></a><span class="lineno"> 2392</span>&#160;            {-51, -38, -25, -13, -64, 64, -25, -38, -25, -77, 77, -13, -51, -38, -89, 89, -115, -64, 102, 77});</div><div class="line"><a name="l02393"></a><span class="lineno"> 2393</span>&#160;    <span class="keyword">auto</span> inputToOutputWeights = MakeTensor&lt;int8_t, 2&gt;(inputWeightsInfo,</div><div class="line"><a name="l02394"></a><span class="lineno"> 2394</span>&#160;            {-102, -51, -25, -115, -13, -89, 38, -38, -102, -25, 77, -25, 51, -89, -38, -64, 13, 64, -77, -51});</div><div class="line"><a name="l02395"></a><span class="lineno"> 2395</span>&#160;</div><div class="line"><a name="l02396"></a><span class="lineno"> 2396</span>&#160;    <span class="keyword">auto</span> recurrentToForgetWeights = MakeTensor&lt;int8_t, 2&gt;(recurrentWeightsInfo,</div><div class="line"><a name="l02397"></a><span class="lineno"> 2397</span>&#160;            {-64, -38, -64, -25, 77, 51, 115, 38, -13, 25, 64, 25});</div><div class="line"><a name="l02398"></a><span class="lineno"> 2398</span>&#160;    <span class="keyword">auto</span> recurrentToCellWeights   = MakeTensor&lt;int8_t, 2&gt;(recurrentWeightsInfo,</div><div class="line"><a name="l02399"></a><span class="lineno"> 2399</span>&#160;            {-38, 25, 13, -38, 102, -10, -25, 38, 102, -77, -13, 25});</div><div class="line"><a name="l02400"></a><span class="lineno"> 2400</span>&#160;    <span class="keyword">auto</span> recurrentToOutputWeights = MakeTensor&lt;int8_t, 2&gt;(recurrentWeightsInfo,</div><div class="line"><a name="l02401"></a><span class="lineno"> 2401</span>&#160;            {38, -13, 13, -25, -64, -89, -25, -77, -13, -51, -89, -25});</div><div class="line"><a name="l02402"></a><span class="lineno"> 2402</span>&#160;</div><div class="line"><a name="l02403"></a><span class="lineno"> 2403</span>&#160;    <span class="keyword">auto</span> forgetGateBias = MakeTensor&lt;int32_t, 1&gt;(biasInfo, {2147484, -6442451, -4294968, 2147484});</div><div class="line"><a name="l02404"></a><span class="lineno"> 2404</span>&#160;    <span class="keyword">auto</span> cellBias       = MakeTensor&lt;int32_t, 1&gt;(biasInfo, {-1073742, 15461883, 5368709, 1717987});</div><div class="line"><a name="l02405"></a><span class="lineno"> 2405</span>&#160;    <span class="keyword">auto</span> outputGateBias = MakeTensor&lt;int32_t, 1&gt;(biasInfo, {1073742, -214748, 4294968, 2147484});</div><div class="line"><a name="l02406"></a><span class="lineno"> 2406</span>&#160;</div><div class="line"><a name="l02407"></a><span class="lineno"> 2407</span>&#160;    <span class="keyword">auto</span> forgetLayerNormWeights = MakeTensor&lt;int16_t, 1&gt;(layerNormWeightsInfo, {6553, 6553, 13107, 9830});</div><div class="line"><a name="l02408"></a><span class="lineno"> 2408</span>&#160;    <span class="keyword">auto</span> cellLayerNormWeights   = MakeTensor&lt;int16_t, 1&gt;(layerNormWeightsInfo, {22937, 6553, 9830, 26214});</div><div class="line"><a name="l02409"></a><span class="lineno"> 2409</span>&#160;    <span class="keyword">auto</span> outputLayerNormWeights = MakeTensor&lt;int16_t, 1&gt;(layerNormWeightsInfo, {19660, 6553, 6553, 16384});</div><div class="line"><a name="l02410"></a><span class="lineno"> 2410</span>&#160;</div><div class="line"><a name="l02411"></a><span class="lineno"> 2411</span>&#160;    <span class="keyword">auto</span> projectionWeights = MakeTensor&lt;int8_t, 2&gt;(projectionWeightsInfo,</div><div class="line"><a name="l02412"></a><span class="lineno"> 2412</span>&#160;            {-25, 51, 3, -51, 25, 127, 77, 20, 18, 51, -102, 51});</div><div class="line"><a name="l02413"></a><span class="lineno"> 2413</span>&#160;</div><div class="line"><a name="l02414"></a><span class="lineno"> 2414</span>&#160;    <span class="comment">// ScopedCpuTensorHandles</span></div><div class="line"><a name="l02415"></a><span class="lineno"> 2415</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputToForgetWeightsTensor(inputWeightsInfo);</div><div class="line"><a name="l02416"></a><span class="lineno"> 2416</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputToCellWeightsTensor(inputWeightsInfo);</div><div class="line"><a name="l02417"></a><span class="lineno"> 2417</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> inputToOutputWeightsTensor(inputWeightsInfo);</div><div class="line"><a name="l02418"></a><span class="lineno"> 2418</span>&#160;</div><div class="line"><a name="l02419"></a><span class="lineno"> 2419</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> recurrentToForgetWeightsTensor(recurrentWeightsInfo);</div><div class="line"><a name="l02420"></a><span class="lineno"> 2420</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> recurrentToCellWeightsTensor(recurrentWeightsInfo);</div><div class="line"><a name="l02421"></a><span class="lineno"> 2421</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> recurrentToOutputWeightsTensor(recurrentWeightsInfo);</div><div class="line"><a name="l02422"></a><span class="lineno"> 2422</span>&#160;</div><div class="line"><a name="l02423"></a><span class="lineno"> 2423</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> forgetGateBiasTensor(biasInfo);</div><div class="line"><a name="l02424"></a><span class="lineno"> 2424</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> cellBiasTensor(biasInfo);</div><div class="line"><a name="l02425"></a><span class="lineno"> 2425</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> outputGateBiasTensor(biasInfo);</div><div class="line"><a name="l02426"></a><span class="lineno"> 2426</span>&#160;</div><div class="line"><a name="l02427"></a><span class="lineno"> 2427</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> forgetLayerNormWeightsTensor(layerNormWeightsInfo);</div><div class="line"><a name="l02428"></a><span class="lineno"> 2428</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> cellLayerNormWeightsTensor(layerNormWeightsInfo);</div><div class="line"><a name="l02429"></a><span class="lineno"> 2429</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> outputLayerNormWeightsTensor(layerNormWeightsInfo);</div><div class="line"><a name="l02430"></a><span class="lineno"> 2430</span>&#160;</div><div class="line"><a name="l02431"></a><span class="lineno"> 2431</span>&#160;    <a class="code" href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a> projectionWeightsTensor(projectionWeightsInfo);</div><div class="line"><a name="l02432"></a><span class="lineno"> 2432</span>&#160;</div><div class="line"><a name="l02433"></a><span class="lineno"> 2433</span>&#160;    <span class="comment">// Allocate and copy data</span></div><div class="line"><a name="l02434"></a><span class="lineno"> 2434</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputToForgetWeightsTensor, &amp;inputToForgetWeights[0][0]);</div><div class="line"><a name="l02435"></a><span class="lineno"> 2435</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputToCellWeightsTensor, &amp;inputToCellWeights[0][0]);</div><div class="line"><a name="l02436"></a><span class="lineno"> 2436</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;inputToOutputWeightsTensor, &amp;inputToOutputWeights[0][0]);</div><div class="line"><a name="l02437"></a><span class="lineno"> 2437</span>&#160;</div><div class="line"><a name="l02438"></a><span class="lineno"> 2438</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;recurrentToForgetWeightsTensor, &amp;recurrentToForgetWeights[0][0]);</div><div class="line"><a name="l02439"></a><span class="lineno"> 2439</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;recurrentToCellWeightsTensor, &amp;recurrentToCellWeights[0][0]);</div><div class="line"><a name="l02440"></a><span class="lineno"> 2440</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;recurrentToOutputWeightsTensor, &amp;recurrentToOutputWeights[0][0]);</div><div class="line"><a name="l02441"></a><span class="lineno"> 2441</span>&#160;</div><div class="line"><a name="l02442"></a><span class="lineno"> 2442</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;forgetGateBiasTensor, &amp;forgetGateBias[0]);</div><div class="line"><a name="l02443"></a><span class="lineno"> 2443</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;cellBiasTensor, &amp;cellBias[0]);</div><div class="line"><a name="l02444"></a><span class="lineno"> 2444</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;outputGateBiasTensor, &amp;outputGateBias[0]);</div><div class="line"><a name="l02445"></a><span class="lineno"> 2445</span>&#160;</div><div class="line"><a name="l02446"></a><span class="lineno"> 2446</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;forgetLayerNormWeightsTensor, &amp;forgetLayerNormWeights[0]);</div><div class="line"><a name="l02447"></a><span class="lineno"> 2447</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;cellLayerNormWeightsTensor, &amp;cellLayerNormWeights[0]);</div><div class="line"><a name="l02448"></a><span class="lineno"> 2448</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;outputLayerNormWeightsTensor, &amp;outputLayerNormWeights[0]);</div><div class="line"><a name="l02449"></a><span class="lineno"> 2449</span>&#160;</div><div class="line"><a name="l02450"></a><span class="lineno"> 2450</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a>(&amp;projectionWeightsTensor, &amp;projectionWeights[0][0]);</div><div class="line"><a name="l02451"></a><span class="lineno"> 2451</span>&#160;</div><div class="line"><a name="l02452"></a><span class="lineno"> 2452</span>&#160;    <span class="comment">// Setup queue descriptor</span></div><div class="line"><a name="l02453"></a><span class="lineno"> 2453</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a3ea82566d98c5a657c76c3d851c47848">m_InputToForgetWeights</a> = &amp;inputToForgetWeightsTensor;</div><div class="line"><a name="l02454"></a><span class="lineno"> 2454</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a28ad98d17603fd8b12e046f8ece58970">m_InputToCellWeights</a> = &amp;inputToCellWeightsTensor;</div><div class="line"><a name="l02455"></a><span class="lineno"> 2455</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a83dc9086b2e4a4e4cadb66bd874df798">m_InputToOutputWeights</a> = &amp;inputToOutputWeightsTensor;</div><div class="line"><a name="l02456"></a><span class="lineno"> 2456</span>&#160;</div><div class="line"><a name="l02457"></a><span class="lineno"> 2457</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a45d73e66cbb2b65049e4016c20657ccf">m_RecurrentToForgetWeights</a> = &amp;recurrentToForgetWeightsTensor;</div><div class="line"><a name="l02458"></a><span class="lineno"> 2458</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#aea142bd50ffb93631c2e08324ec92a1e">m_RecurrentToCellWeights</a> = &amp;recurrentToCellWeightsTensor;</div><div class="line"><a name="l02459"></a><span class="lineno"> 2459</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#adebc1771e5a1f4b113a7aa594ea74d2c">m_RecurrentToOutputWeights</a> = &amp;recurrentToOutputWeightsTensor;</div><div class="line"><a name="l02460"></a><span class="lineno"> 2460</span>&#160;</div><div class="line"><a name="l02461"></a><span class="lineno"> 2461</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#aba3ffe91d818266b8785ce971548eb59">m_ForgetGateBias</a> = &amp;forgetGateBiasTensor;</div><div class="line"><a name="l02462"></a><span class="lineno"> 2462</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a75980b5795efd899a0c678a06a900c6d">m_CellBias</a> = &amp;cellBiasTensor;</div><div class="line"><a name="l02463"></a><span class="lineno"> 2463</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a332551528a4b3534c2d6c89ce816fcd9">m_OutputGateBias</a> = &amp;outputGateBiasTensor;</div><div class="line"><a name="l02464"></a><span class="lineno"> 2464</span>&#160;</div><div class="line"><a name="l02465"></a><span class="lineno"> 2465</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a453a4af385d0c060c9aac990fceaa1ef">m_ForgetLayerNormWeights</a> = &amp;forgetLayerNormWeightsTensor;</div><div class="line"><a name="l02466"></a><span class="lineno"> 2466</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a518f0195d0278a892b49649b8860d17f">m_CellLayerNormWeights</a> = &amp;cellLayerNormWeightsTensor;</div><div class="line"><a name="l02467"></a><span class="lineno"> 2467</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#aa3f07e27230d6d99adc2c82ba681df2b">m_OutputLayerNormWeights</a> = &amp;outputLayerNormWeightsTensor;</div><div class="line"><a name="l02468"></a><span class="lineno"> 2468</span>&#160;</div><div class="line"><a name="l02469"></a><span class="lineno"> 2469</span>&#160;    data.<a class="code" href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#af3c52626a6f05597d82ed095d0765962">m_ProjectionWeights</a> = &amp;projectionWeightsTensor;</div><div class="line"><a name="l02470"></a><span class="lineno"> 2470</span>&#160;</div><div class="line"><a name="l02471"></a><span class="lineno"> 2471</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#ad474e5c51a0b194ef32e812b86c0cbdb">m_CifgEnabled</a> = cifgEnabled;</div><div class="line"><a name="l02472"></a><span class="lineno"> 2472</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#a2837b4396f20c956952d1a7286cab5f8">m_PeepholeEnabled</a> = peepholeEnabled;</div><div class="line"><a name="l02473"></a><span class="lineno"> 2473</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#a6c9de81fc65b3c4924cab11907075a17">m_ProjectionEnabled</a> = projectionEnabled;</div><div class="line"><a name="l02474"></a><span class="lineno"> 2474</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#a4a8ec49f130084445d44297549254780">m_LayerNormEnabled</a> = layerNormEnabled;</div><div class="line"><a name="l02475"></a><span class="lineno"> 2475</span>&#160;</div><div class="line"><a name="l02476"></a><span class="lineno"> 2476</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#a09e1f097944f61cc901240f9300364cf">m_InputIntermediateScale</a> = inputIntermediateScale;</div><div class="line"><a name="l02477"></a><span class="lineno"> 2477</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#afec7f36158448f723b426a9527acb189">m_ForgetIntermediateScale</a> = forgetIntermediateScale;</div><div class="line"><a name="l02478"></a><span class="lineno"> 2478</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#a0477ee1b44ace6090119178eea78cb0b">m_CellIntermediateScale</a> = cellIntermediateScale;</div><div class="line"><a name="l02479"></a><span class="lineno"> 2479</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#aa43409f9b457352c95c89f20ce5d844d">m_OutputIntermediateScale</a> = outputIntermediateScale;</div><div class="line"><a name="l02480"></a><span class="lineno"> 2480</span>&#160;</div><div class="line"><a name="l02481"></a><span class="lineno"> 2481</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#a4556cbd764d4848d8ad0637a9eed580d">m_HiddenStateZeroPoint</a> = hiddenStateZeroPoint;</div><div class="line"><a name="l02482"></a><span class="lineno"> 2482</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#af8f724af7210b52529216feefa993c98">m_HiddenStateScale</a> = hiddenStateScale;</div><div class="line"><a name="l02483"></a><span class="lineno"> 2483</span>&#160;</div><div class="line"><a name="l02484"></a><span class="lineno"> 2484</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#ac81fb0e66dc623dc37c77f219f53a6d3">m_CellClip</a> = cellClip;</div><div class="line"><a name="l02485"></a><span class="lineno"> 2485</span>&#160;    data.<a class="code" href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">m_Parameters</a>.<a class="code" href="structarmnn_1_1_q_lstm_descriptor.xhtml#aa6a518b65088f34803b3214334bdff61">m_ProjectionClip</a> = projectionClip;</div><div class="line"><a name="l02486"></a><span class="lineno"> 2486</span>&#160;</div><div class="line"><a name="l02487"></a><span class="lineno"> 2487</span>&#160;    <span class="comment">// Create workload and allocate tensor handles</span></div><div class="line"><a name="l02488"></a><span class="lineno"> 2488</span>&#160;    std::unique_ptr&lt;armnn::IWorkload&gt; workload = workloadFactory.<a class="code" href="classarmnn_1_1_i_workload_factory.xhtml#ab17bf35d906f8daad42be0a0799c72ab">CreateQLstm</a>(data, info);</div><div class="line"><a name="l02489"></a><span class="lineno"> 2489</span>&#160;    inputHandle-&gt;Allocate();</div><div class="line"><a name="l02490"></a><span class="lineno"> 2490</span>&#160;    outputStateInHandle-&gt;Allocate();</div><div class="line"><a name="l02491"></a><span class="lineno"> 2491</span>&#160;    cellStateInHandle-&gt;Allocate();</div><div class="line"><a name="l02492"></a><span class="lineno"> 2492</span>&#160;</div><div class="line"><a name="l02493"></a><span class="lineno"> 2493</span>&#160;    outputStateOutHandle-&gt;Allocate();</div><div class="line"><a name="l02494"></a><span class="lineno"> 2494</span>&#160;    cellStateOutHandle-&gt;Allocate();</div><div class="line"><a name="l02495"></a><span class="lineno"> 2495</span>&#160;    outputHandle-&gt;Allocate();</div><div class="line"><a name="l02496"></a><span class="lineno"> 2496</span>&#160;</div><div class="line"><a name="l02497"></a><span class="lineno"> 2497</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#ae15f1a3c55d2db87683577de9fa4437c">CopyDataToITensorHandle</a>(inputHandle.get(), &amp;inputTensor[0][0]);</div><div class="line"><a name="l02498"></a><span class="lineno"> 2498</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#ae15f1a3c55d2db87683577de9fa4437c">CopyDataToITensorHandle</a>(outputStateInHandle.get(), &amp;outputStateInTensor[0][0]);</div><div class="line"><a name="l02499"></a><span class="lineno"> 2499</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#ae15f1a3c55d2db87683577de9fa4437c">CopyDataToITensorHandle</a>(cellStateInHandle.get(), &amp;cellStateInTensor[0][0]);</div><div class="line"><a name="l02500"></a><span class="lineno"> 2500</span>&#160;</div><div class="line"><a name="l02501"></a><span class="lineno"> 2501</span>&#160;    workload-&gt;Execute();</div><div class="line"><a name="l02502"></a><span class="lineno"> 2502</span>&#160;</div><div class="line"><a name="l02503"></a><span class="lineno"> 2503</span>&#160;    <a class="code" href="_tensor_copy_utils_8cpp.xhtml#a99b626c58a926dc7d6df78d22ec186c8">CopyDataFromITensorHandle</a>(&amp;ret.output[0][0], outputHandle.get());</div><div class="line"><a name="l02504"></a><span class="lineno"> 2504</span>&#160;</div><div class="line"><a name="l02505"></a><span class="lineno"> 2505</span>&#160;    <span class="keywordflow">return</span> ret;</div><div class="line"><a name="l02506"></a><span class="lineno"> 2506</span>&#160;}</div><div class="line"><a name="l02507"></a><span class="lineno"> 2507</span>&#160;</div><div class="line"><a name="l02508"></a><span class="lineno"> 2508</span>&#160;</div><div class="line"><a name="l02509"></a><span class="lineno"> 2509</span>&#160;} <span class="comment">// anonymous namespace</span></div><div class="line"><a name="l02510"></a><span class="lineno"> 2510</span>&#160;</div><div class="line"><a name="l02511"></a><span class="lineno"> 2511</span>&#160;<span class="preprocessor">#if defined(ARMNNREF_ENABLED)</span></div><div class="line"><a name="l02512"></a><span class="lineno"> 2512</span>&#160;</div><div class="line"><a name="l02513"></a><span class="lineno"> 2513</span>&#160;<span class="comment">// The LSTM test units are run only for the reference backend at the moment</span></div><div class="line"><a name="l02514"></a><span class="lineno"> 2514</span>&#160;</div><div class="line"><a name="l02515"></a><span class="lineno"> 2515</span>&#160;<span class="keywordtype">void</span> LstmUtilsZeroVectorTest()</div><div class="line"><a name="l02516"></a><span class="lineno"> 2516</span>&#160;{</div><div class="line"><a name="l02517"></a><span class="lineno"> 2517</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> inputDesc({4}, <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a166495adc0d0f53bee6baecc577f5204">armnn::DataType::Float32</a>);</div><div class="line"><a name="l02518"></a><span class="lineno"> 2518</span>&#160;    boost::multi_array&lt;float, 1&gt; input = MakeTensor&lt;float, 1&gt;(inputDesc, std::vector&lt;float&gt;(</div><div class="line"><a name="l02519"></a><span class="lineno"> 2519</span>&#160;            {2., 3., 3., 4.}));</div><div class="line"><a name="l02520"></a><span class="lineno"> 2520</span>&#160;</div><div class="line"><a name="l02521"></a><span class="lineno"> 2521</span>&#160;    boost::multi_array&lt;float, 1&gt; expectedOutput = MakeTensor&lt;float, 1&gt;(inputDesc, std::vector&lt;float&gt;(</div><div class="line"><a name="l02522"></a><span class="lineno"> 2522</span>&#160;            {0., 0., 0., 0.}));</div><div class="line"><a name="l02523"></a><span class="lineno"> 2523</span>&#160;</div><div class="line"><a name="l02524"></a><span class="lineno"> 2524</span>&#160;    <span class="keywordflow">return</span> LstmUtilsZeroVectorTestImpl&lt;armnn::DataType::Float32&gt;(input, 4, expectedOutput);</div><div class="line"><a name="l02525"></a><span class="lineno"> 2525</span>&#160;}</div><div class="line"><a name="l02526"></a><span class="lineno"> 2526</span>&#160;</div><div class="line"><a name="l02527"></a><span class="lineno"> 2527</span>&#160;<span class="keywordtype">void</span> LstmUtilsMeanStddevNormalizationNoneZeroInputTest()</div><div class="line"><a name="l02528"></a><span class="lineno"> 2528</span>&#160;{</div><div class="line"><a name="l02529"></a><span class="lineno"> 2529</span>&#160;    uint32_t batchSize = 2;</div><div class="line"><a name="l02530"></a><span class="lineno"> 2530</span>&#160;    uint32_t vecSize = 4;</div><div class="line"><a name="l02531"></a><span class="lineno"> 2531</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> inputDesc({batchSize, vecSize}, <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a166495adc0d0f53bee6baecc577f5204">armnn::DataType::Float32</a>);</div><div class="line"><a name="l02532"></a><span class="lineno"> 2532</span>&#160;    boost::multi_array&lt;float, 2&gt; input = MakeTensor&lt;float, 2&gt;(inputDesc, std::vector&lt;float&gt;(</div><div class="line"><a name="l02533"></a><span class="lineno"> 2533</span>&#160;            { 0.1f, 0.2f, 0.3f, 0.4f,      <span class="comment">//batch 0</span></div><div class="line"><a name="l02534"></a><span class="lineno"> 2534</span>&#160;              0.9f, 1.0f, 1.1f, 1.2f }));  <span class="comment">//batch 1</span></div><div class="line"><a name="l02535"></a><span class="lineno"> 2535</span>&#160;</div><div class="line"><a name="l02536"></a><span class="lineno"> 2536</span>&#160;    boost::multi_array&lt;float, 2&gt; expectedOutput = MakeTensor&lt;float, 2&gt;(inputDesc, std::vector&lt;float&gt;(</div><div class="line"><a name="l02537"></a><span class="lineno"> 2537</span>&#160;            { -1.34164071f, -0.447213531f, 0.44721365f,  1.34164071f,      <span class="comment">//batch 0</span></div><div class="line"><a name="l02538"></a><span class="lineno"> 2538</span>&#160;              -1.34163153f, -0.447210163f, 0.447211236f, 1.3416326f  }));  <span class="comment">//batch 1</span></div><div class="line"><a name="l02539"></a><span class="lineno"> 2539</span>&#160;</div><div class="line"><a name="l02540"></a><span class="lineno"> 2540</span>&#160;    <span class="keywordflow">return</span> LstmUtilsMeanStddevNormalizationTestImpl&lt;armnn::DataType::Float32&gt;(input,</div><div class="line"><a name="l02541"></a><span class="lineno"> 2541</span>&#160;            vecSize, batchSize, expectedOutput);</div><div class="line"><a name="l02542"></a><span class="lineno"> 2542</span>&#160;}</div><div class="line"><a name="l02543"></a><span class="lineno"> 2543</span>&#160;</div><div class="line"><a name="l02544"></a><span class="lineno"> 2544</span>&#160;<span class="keywordtype">void</span> LstmUtilsMeanStddevNormalizationAllZeroInputTest()</div><div class="line"><a name="l02545"></a><span class="lineno"> 2545</span>&#160;{</div><div class="line"><a name="l02546"></a><span class="lineno"> 2546</span>&#160;    uint32_t batchSize = 2;</div><div class="line"><a name="l02547"></a><span class="lineno"> 2547</span>&#160;    uint32_t vecSize = 4;</div><div class="line"><a name="l02548"></a><span class="lineno"> 2548</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> inputDesc({batchSize, vecSize}, <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a166495adc0d0f53bee6baecc577f5204">armnn::DataType::Float32</a>);</div><div class="line"><a name="l02549"></a><span class="lineno"> 2549</span>&#160;    boost::multi_array&lt;float, 2&gt; input = MakeTensor&lt;float, 2&gt;(inputDesc, std::vector&lt;float&gt;(</div><div class="line"><a name="l02550"></a><span class="lineno"> 2550</span>&#160;            { 0.0f, 0.0f, 0.0f, 0.0f,      <span class="comment">//batch 0</span></div><div class="line"><a name="l02551"></a><span class="lineno"> 2551</span>&#160;              0.0f, 0.0f, 0.0f, 0.0f }));  <span class="comment">//batch 1</span></div><div class="line"><a name="l02552"></a><span class="lineno"> 2552</span>&#160;</div><div class="line"><a name="l02553"></a><span class="lineno"> 2553</span>&#160;    boost::multi_array&lt;float, 2&gt; expectedOutput = MakeTensor&lt;float, 2&gt;(inputDesc, std::vector&lt;float&gt;(</div><div class="line"><a name="l02554"></a><span class="lineno"> 2554</span>&#160;            { 0.0f, 0.0f, 0.0f, 0.0f,      <span class="comment">//batch 0</span></div><div class="line"><a name="l02555"></a><span class="lineno"> 2555</span>&#160;              0.0f, 0.0f, 0.0f, 0.0f }));  <span class="comment">//batch 1</span></div><div class="line"><a name="l02556"></a><span class="lineno"> 2556</span>&#160;</div><div class="line"><a name="l02557"></a><span class="lineno"> 2557</span>&#160;    <span class="keywordflow">return</span> LstmUtilsMeanStddevNormalizationTestImpl&lt;armnn::DataType::Float32&gt;(input,</div><div class="line"><a name="l02558"></a><span class="lineno"> 2558</span>&#160;            vecSize, batchSize, expectedOutput);</div><div class="line"><a name="l02559"></a><span class="lineno"> 2559</span>&#160;}</div><div class="line"><a name="l02560"></a><span class="lineno"> 2560</span>&#160;</div><div class="line"><a name="l02561"></a><span class="lineno"> 2561</span>&#160;<span class="keywordtype">void</span> LstmUtilsMeanStddevNormalizationMixedZeroInputTest()</div><div class="line"><a name="l02562"></a><span class="lineno"> 2562</span>&#160;{</div><div class="line"><a name="l02563"></a><span class="lineno"> 2563</span>&#160;    uint32_t batchSize = 2;</div><div class="line"><a name="l02564"></a><span class="lineno"> 2564</span>&#160;    uint32_t vecSize = 4;</div><div class="line"><a name="l02565"></a><span class="lineno"> 2565</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> inputDesc({batchSize, vecSize}, <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a166495adc0d0f53bee6baecc577f5204">armnn::DataType::Float32</a>);</div><div class="line"><a name="l02566"></a><span class="lineno"> 2566</span>&#160;    boost::multi_array&lt;float, 2&gt; input = MakeTensor&lt;float, 2&gt;(inputDesc, std::vector&lt;float&gt;(</div><div class="line"><a name="l02567"></a><span class="lineno"> 2567</span>&#160;            { 0.0f, 0.0f, 0.0f, 0.0f,      <span class="comment">//batch 0</span></div><div class="line"><a name="l02568"></a><span class="lineno"> 2568</span>&#160;              0.1f, 0.2f, 0.3f, 0.4f }));  <span class="comment">//batch 1</span></div><div class="line"><a name="l02569"></a><span class="lineno"> 2569</span>&#160;</div><div class="line"><a name="l02570"></a><span class="lineno"> 2570</span>&#160;    boost::multi_array&lt;float, 2&gt; expectedOutput = MakeTensor&lt;float, 2&gt;(inputDesc, std::vector&lt;float&gt;(</div><div class="line"><a name="l02571"></a><span class="lineno"> 2571</span>&#160;            {         0.0f,          0.0f,        0.0f,        0.0f,      <span class="comment">//batch 0</span></div><div class="line"><a name="l02572"></a><span class="lineno"> 2572</span>&#160;              -1.34164071f, -0.447213531f, 0.44721365f, 1.34164071f }));  <span class="comment">//batch 1</span></div><div class="line"><a name="l02573"></a><span class="lineno"> 2573</span>&#160;</div><div class="line"><a name="l02574"></a><span class="lineno"> 2574</span>&#160;    <span class="keywordflow">return</span> LstmUtilsMeanStddevNormalizationTestImpl&lt;armnn::DataType::Float32&gt;(input,</div><div class="line"><a name="l02575"></a><span class="lineno"> 2575</span>&#160;            vecSize, batchSize, expectedOutput);</div><div class="line"><a name="l02576"></a><span class="lineno"> 2576</span>&#160;}</div><div class="line"><a name="l02577"></a><span class="lineno"> 2577</span>&#160;</div><div class="line"><a name="l02578"></a><span class="lineno"> 2578</span>&#160;<span class="keywordtype">void</span> LstmUtilsVectorBatchVectorCwiseProductTest()</div><div class="line"><a name="l02579"></a><span class="lineno"> 2579</span>&#160;{</div><div class="line"><a name="l02580"></a><span class="lineno"> 2580</span>&#160;    uint32_t batchSize = 4;</div><div class="line"><a name="l02581"></a><span class="lineno"> 2581</span>&#160;    uint32_t vecSize = 29;</div><div class="line"><a name="l02582"></a><span class="lineno"> 2582</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> vecDesc({vecSize}, <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a166495adc0d0f53bee6baecc577f5204">armnn::DataType::Float32</a>);</div><div class="line"><a name="l02583"></a><span class="lineno"> 2583</span>&#160;    boost::multi_array&lt;float, 1&gt; vector = MakeTensor&lt;float, 1&gt;(vecDesc, std::vector&lt;float&gt;(</div><div class="line"><a name="l02584"></a><span class="lineno"> 2584</span>&#160;            {   1.1f,   2.2f,   3.3f,   4.4f,   5.5f,   6.6f,   7.7f,   8.8f,   9.9f, 10.1f,</div><div class="line"><a name="l02585"></a><span class="lineno"> 2585</span>&#160;              11.11f, 12.12f, 13.13f, 14.14f, 15.15f, 16.16f, 17.17f, 18.18f, 19.19f, 20.2f,</div><div class="line"><a name="l02586"></a><span class="lineno"> 2586</span>&#160;              21.21f, 22.22f, 23.23f, 24.24f, 25.25f, 26.26f, 27.27f, 28.28f,     0.0f}));</div><div class="line"><a name="l02587"></a><span class="lineno"> 2587</span>&#160;</div><div class="line"><a name="l02588"></a><span class="lineno"> 2588</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> batchVecDesc({batchSize, vecSize}, <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a166495adc0d0f53bee6baecc577f5204">armnn::DataType::Float32</a>);</div><div class="line"><a name="l02589"></a><span class="lineno"> 2589</span>&#160;    boost::multi_array&lt;float, 2&gt; batchVector = MakeTensor&lt;float, 2&gt;(batchVecDesc, std::vector&lt;float&gt;(</div><div class="line"><a name="l02590"></a><span class="lineno"> 2590</span>&#160;            { <span class="comment">/* batch 0 */</span></div><div class="line"><a name="l02591"></a><span class="lineno"> 2591</span>&#160;                1.1f,   2.2f,   3.3f,   4.4f,   5.5f,   6.6f,   7.7f,   8.8f,   9.9f,  10.1f,</div><div class="line"><a name="l02592"></a><span class="lineno"> 2592</span>&#160;              11.11f, 12.12f, 13.13f, 14.14f, 15.15f, 16.16f, 17.17f, 18.18f, 19.19f,  20.2f,</div><div class="line"><a name="l02593"></a><span class="lineno"> 2593</span>&#160;              21.21f, 22.22f, 23.23f, 24.24f, 25.25f, 26.26f, 27.27f, 28.28f,   0.0f,</div><div class="line"><a name="l02594"></a><span class="lineno"> 2594</span>&#160;              <span class="comment">/* batch 1 */</span></div><div class="line"><a name="l02595"></a><span class="lineno"> 2595</span>&#160;                -1.1f,   -2.2f,   -3.3f,   -4.4f,   -5.5f,   -6.6f,   -7.7f,   -8.8f,   -9.9f, -10.1f,</div><div class="line"><a name="l02596"></a><span class="lineno"> 2596</span>&#160;              -11.11f, -12.12f, -13.13f, -14.14f, -15.15f, -16.16f, -17.17f, -18.18f, -19.19f, -20.2f,</div><div class="line"><a name="l02597"></a><span class="lineno"> 2597</span>&#160;              -21.21f, -22.22f, -23.23f, -24.24f, -25.25f, -26.26f, -27.27f, -28.28f,    0.0f,</div><div class="line"><a name="l02598"></a><span class="lineno"> 2598</span>&#160;              <span class="comment">/* batch 2 */</span></div><div class="line"><a name="l02599"></a><span class="lineno"> 2599</span>&#160;                1.1f,   -2.2f,   3.3f,   -4.4f,   5.5f,   -6.6f,   7.7f,   -8.8f,   9.9f, -10.1f,</div><div class="line"><a name="l02600"></a><span class="lineno"> 2600</span>&#160;              11.11f, -12.12f, 13.13f, -14.14f, 15.15f, -16.16f, 17.17f, -18.18f, 19.19f, -20.2f,</div><div class="line"><a name="l02601"></a><span class="lineno"> 2601</span>&#160;              21.21f, -22.22f, 23.23f, -24.24f, 25.25f, -26.26f, 27.27f, -28.28f,   0.0f,</div><div class="line"><a name="l02602"></a><span class="lineno"> 2602</span>&#160;              <span class="comment">/* batch 3 */</span></div><div class="line"><a name="l02603"></a><span class="lineno"> 2603</span>&#160;                -1.1f,   2.2f,   -3.3f,   4.4f,   -5.5f,   6.6f,   -7.7f,   8.8f,   -9.9f, 10.1f,</div><div class="line"><a name="l02604"></a><span class="lineno"> 2604</span>&#160;              -11.11f, 12.12f, -13.13f, 14.14f, -15.15f, 16.16f, -17.17f, 18.18f, -19.19f, 20.2f,</div><div class="line"><a name="l02605"></a><span class="lineno"> 2605</span>&#160;              -21.21f, 22.22f, -23.23f, 24.24f, -25.25f, 26.26f, -27.27f, 28.28f,    0.0f}));</div><div class="line"><a name="l02606"></a><span class="lineno"> 2606</span>&#160;</div><div class="line"><a name="l02607"></a><span class="lineno"> 2607</span>&#160;    <span class="comment">// Expect output = input * output + output.</span></div><div class="line"><a name="l02608"></a><span class="lineno"> 2608</span>&#160;    boost::multi_array&lt;float, 2&gt; expectedOutput = MakeTensor&lt;float, 2&gt;(batchVecDesc, std::vector&lt;float&gt;(</div><div class="line"><a name="l02609"></a><span class="lineno"> 2609</span>&#160;            { <span class="comment">/* batch 0 */</span></div><div class="line"><a name="l02610"></a><span class="lineno"> 2610</span>&#160;                 1.210000f,    4.840000f,   10.889999f,   19.360001f,   30.250000f,   43.559998f,</div><div class="line"><a name="l02611"></a><span class="lineno"> 2611</span>&#160;                59.289997f,   77.440002f,   98.009995f,  102.010010f,  123.432091f,  146.894394f,</div><div class="line"><a name="l02612"></a><span class="lineno"> 2612</span>&#160;               172.396896f,  199.939606f,  229.522491f,  261.145599f,  294.808899f,  330.512421f,</div><div class="line"><a name="l02613"></a><span class="lineno"> 2613</span>&#160;               368.256134f,  408.040039f,  449.864075f,  493.728363f,  539.632874f,  587.577576f,</div><div class="line"><a name="l02614"></a><span class="lineno"> 2614</span>&#160;               637.562500f,  689.587585f,  743.652954f,  799.758423f,    0.000000f,</div><div class="line"><a name="l02615"></a><span class="lineno"> 2615</span>&#160;              <span class="comment">/* batch 1 */</span></div><div class="line"><a name="l02616"></a><span class="lineno"> 2616</span>&#160;                -1.210000f,   -4.840000f,  -10.889999f,  -19.360001f,  -30.250000f,  -43.559998f,</div><div class="line"><a name="l02617"></a><span class="lineno"> 2617</span>&#160;               -59.289997f,  -77.440002f,  -98.009995f, -102.010010f, -123.432091f, -146.894394f,</div><div class="line"><a name="l02618"></a><span class="lineno"> 2618</span>&#160;              -172.396896f, -199.939606f, -229.522491f, -261.145599f, -294.808899f, -330.512421f,</div><div class="line"><a name="l02619"></a><span class="lineno"> 2619</span>&#160;              -368.256134f, -408.040039f, -449.864075f, -493.728363f, -539.632874f, -587.577576f,</div><div class="line"><a name="l02620"></a><span class="lineno"> 2620</span>&#160;              -637.562500f, -689.587585f, -743.652954f, -799.758423f,    0.000000f,</div><div class="line"><a name="l02621"></a><span class="lineno"> 2621</span>&#160;              <span class="comment">/* batch 2 */</span></div><div class="line"><a name="l02622"></a><span class="lineno"> 2622</span>&#160;                 1.210000f,   -4.840000f,  10.889999f,   -19.360001f,   30.250000f,  -43.559998f,</div><div class="line"><a name="l02623"></a><span class="lineno"> 2623</span>&#160;                59.289997f,  -77.440002f,  98.009995f,  -102.010010f,  123.432091f, -146.894394f,</div><div class="line"><a name="l02624"></a><span class="lineno"> 2624</span>&#160;               172.396896f, -199.939606f, 229.522491f,  -261.145599f,  294.808899f, -330.512421f,</div><div class="line"><a name="l02625"></a><span class="lineno"> 2625</span>&#160;               368.256134f, -408.040039f, 449.864075f,  -493.728363f,  539.632874f, -587.577576f,</div><div class="line"><a name="l02626"></a><span class="lineno"> 2626</span>&#160;               637.562500f, -689.587585f, 743.652954f,  -799.758423f,    0.000000f,</div><div class="line"><a name="l02627"></a><span class="lineno"> 2627</span>&#160;              <span class="comment">/* batch 3 */</span></div><div class="line"><a name="l02628"></a><span class="lineno"> 2628</span>&#160;                -1.210000f,    4.840000f,  -10.889999f,   19.360001f,  -30.250000f,   43.559998f,</div><div class="line"><a name="l02629"></a><span class="lineno"> 2629</span>&#160;               -59.289997f,   77.440002f,  -98.009995f,  102.010010f, -123.432091f,  146.894394f,</div><div class="line"><a name="l02630"></a><span class="lineno"> 2630</span>&#160;              -172.396896f,  199.939606f, -229.522491f,  261.145599f, -294.808899f,  330.512421f,</div><div class="line"><a name="l02631"></a><span class="lineno"> 2631</span>&#160;              -368.256134f,  408.040039f, -449.864075f,  493.728363f, -539.632874f,  587.577576f,</div><div class="line"><a name="l02632"></a><span class="lineno"> 2632</span>&#160;              -637.562500f,  689.587585f, -743.652954f,  799.758423f,    0.000000f}));</div><div class="line"><a name="l02633"></a><span class="lineno"> 2633</span>&#160;</div><div class="line"><a name="l02634"></a><span class="lineno"> 2634</span>&#160;    <span class="keywordflow">return</span> LstmUtilsVectorBatchVectorCwiseProductTestImpl&lt;armnn::DataType::Float32&gt;(vector, batchVector,</div><div class="line"><a name="l02635"></a><span class="lineno"> 2635</span>&#160;            vecSize, batchSize, expectedOutput);</div><div class="line"><a name="l02636"></a><span class="lineno"> 2636</span>&#160;}</div><div class="line"><a name="l02637"></a><span class="lineno"> 2637</span>&#160;</div><div class="line"><a name="l02638"></a><span class="lineno"> 2638</span>&#160;<span class="keywordtype">void</span> LstmUtilsVectorBatchVectorAddTest()</div><div class="line"><a name="l02639"></a><span class="lineno"> 2639</span>&#160;{</div><div class="line"><a name="l02640"></a><span class="lineno"> 2640</span>&#160;    uint32_t batchSize = 2;</div><div class="line"><a name="l02641"></a><span class="lineno"> 2641</span>&#160;    uint32_t vecSize = 3;</div><div class="line"><a name="l02642"></a><span class="lineno"> 2642</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> vecDesc({vecSize}, <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a166495adc0d0f53bee6baecc577f5204">armnn::DataType::Float32</a>);</div><div class="line"><a name="l02643"></a><span class="lineno"> 2643</span>&#160;    boost::multi_array&lt;float, 1&gt; vector = MakeTensor&lt;float, 1&gt;(vecDesc, std::vector&lt;float&gt;(</div><div class="line"><a name="l02644"></a><span class="lineno"> 2644</span>&#160;            { 0.0f, -0.5f, 1.0f}));</div><div class="line"><a name="l02645"></a><span class="lineno"> 2645</span>&#160;</div><div class="line"><a name="l02646"></a><span class="lineno"> 2646</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> batchVecDesc({batchSize, vecSize}, <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a166495adc0d0f53bee6baecc577f5204">armnn::DataType::Float32</a>);</div><div class="line"><a name="l02647"></a><span class="lineno"> 2647</span>&#160;    boost::multi_array&lt;float, 2&gt; batchVector = MakeTensor&lt;float, 2&gt;(batchVecDesc, std::vector&lt;float&gt;(</div><div class="line"><a name="l02648"></a><span class="lineno"> 2648</span>&#160;            { 1.0f, 2.0f, 3.0f,    <span class="comment">//batch 0</span></div><div class="line"><a name="l02649"></a><span class="lineno"> 2649</span>&#160;              4.0f, 5.0f, 6.0f})); <span class="comment">//batch 1</span></div><div class="line"><a name="l02650"></a><span class="lineno"> 2650</span>&#160;</div><div class="line"><a name="l02651"></a><span class="lineno"> 2651</span>&#160;    boost::multi_array&lt;float, 2&gt; expectedOutput = MakeTensor&lt;float, 2&gt;(batchVecDesc, std::vector&lt;float&gt;(</div><div class="line"><a name="l02652"></a><span class="lineno"> 2652</span>&#160;            { 1.0f, 1.5f, 4.0f,</div><div class="line"><a name="l02653"></a><span class="lineno"> 2653</span>&#160;              4.0f, 4.5f, 7.0f}));</div><div class="line"><a name="l02654"></a><span class="lineno"> 2654</span>&#160;</div><div class="line"><a name="l02655"></a><span class="lineno"> 2655</span>&#160;    <span class="keywordflow">return</span> LstmUtilsVectorBatchVectorAddTestImpl&lt;armnn::DataType::Float32&gt;(vector, batchVector,</div><div class="line"><a name="l02656"></a><span class="lineno"> 2656</span>&#160;            vecSize, batchSize, expectedOutput);</div><div class="line"><a name="l02657"></a><span class="lineno"> 2657</span>&#160;}</div><div class="line"><a name="l02658"></a><span class="lineno"> 2658</span>&#160;</div><div class="line"><a name="l02659"></a><span class="lineno"> 2659</span>&#160;<span class="preprocessor">#endif</span></div><div class="line"><a name="l02660"></a><span class="lineno"> 2660</span>&#160;</div><div class="line"><a name="l02661"></a><span class="lineno"><a class="line" href="_lstm_test_impl_8hpp.xhtml#a84253a0be59acfd80d588141c07d4170"> 2661</a></span>&#160;<a class="code" href="struct_layer_test_result.xhtml">LayerTestResult&lt;float, 2&gt;</a> <a class="code" href="_lstm_test_impl_8cpp.xhtml#a84253a0be59acfd80d588141c07d4170">LstmLayerFloat32WithCifgWithPeepholeNoProjectionTest</a>(</div><div class="line"><a name="l02662"></a><span class="lineno"> 2662</span>&#160;    <a class="code" href="classarmnn_1_1_i_workload_factory.xhtml">armnn::IWorkloadFactory</a>&amp; workloadFactory,</div><div class="line"><a name="l02663"></a><span class="lineno"> 2663</span>&#160;    <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_backend_internal.xhtml#a693b40e6b94e958836aeb0410ca186bd">armnn::IBackendInternal::IMemoryManagerSharedPtr</a>&amp; memoryManager,</div><div class="line"><a name="l02664"></a><span class="lineno"> 2664</span>&#160;    <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml">armnn::ITensorHandleFactory</a>&amp; tensorHandleFactory)</div><div class="line"><a name="l02665"></a><span class="lineno"> 2665</span>&#160;{</div><div class="line"><a name="l02666"></a><span class="lineno"> 2666</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> inputDesc({ 2, 2 }, <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a166495adc0d0f53bee6baecc577f5204">armnn::DataType::Float32</a>);</div><div class="line"><a name="l02667"></a><span class="lineno"> 2667</span>&#160;    boost::multi_array&lt;float, 2&gt; input = MakeTensor&lt;float, 2&gt;(inputDesc, std::vector&lt;float&gt;(</div><div class="line"><a name="l02668"></a><span class="lineno"> 2668</span>&#160;            { 2., 3., 3., 4. }));</div><div class="line"><a name="l02669"></a><span class="lineno"> 2669</span>&#160;</div><div class="line"><a name="l02670"></a><span class="lineno"> 2670</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> outputDesc({ 2, 4 }, <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a166495adc0d0f53bee6baecc577f5204">armnn::DataType::Float32</a>);</div><div class="line"><a name="l02671"></a><span class="lineno"> 2671</span>&#160;    boost::multi_array&lt;float, 2&gt; expectedOutput = MakeTensor&lt;float, 2&gt;(outputDesc, std::vector&lt;float&gt;(</div><div class="line"><a name="l02672"></a><span class="lineno"> 2672</span>&#160;            {-0.36444446f, -0.00352185f, 0.12886585f, -0.05163646f,</div><div class="line"><a name="l02673"></a><span class="lineno"> 2673</span>&#160;             -0.42734814f, -0.00478661f,  0.13455015f, -0.03560682f}));</div><div class="line"><a name="l02674"></a><span class="lineno"> 2674</span>&#160;    <span class="keywordflow">return</span> LstmLayerWithCifgWithPeepholeNoProjectionTestImpl&lt;armnn::DataType::Float32&gt;(</div><div class="line"><a name="l02675"></a><span class="lineno"> 2675</span>&#160;        workloadFactory, memoryManager, tensorHandleFactory, input, expectedOutput);</div><div class="line"><a name="l02676"></a><span class="lineno"> 2676</span>&#160;}</div><div class="line"><a name="l02677"></a><span class="lineno"> 2677</span>&#160;</div><div class="line"><a name="l02678"></a><span class="lineno"><a class="line" href="_lstm_test_impl_8hpp.xhtml#a8878c1a79890f404aac4c474204756c1"> 2678</a></span>&#160;<a class="code" href="struct_layer_test_result.xhtml">LayerTestResult&lt;float, 2&gt;</a> <a class="code" href="_lstm_test_impl_8cpp.xhtml#a8878c1a79890f404aac4c474204756c1">LstmLayerFloat32NoCifgWithPeepholeWithProjectionTest</a>(</div><div class="line"><a name="l02679"></a><span class="lineno"> 2679</span>&#160;    <a class="code" href="classarmnn_1_1_i_workload_factory.xhtml">armnn::IWorkloadFactory</a>&amp; workloadFactory,</div><div class="line"><a name="l02680"></a><span class="lineno"> 2680</span>&#160;    <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_backend_internal.xhtml#a693b40e6b94e958836aeb0410ca186bd">armnn::IBackendInternal::IMemoryManagerSharedPtr</a>&amp; memoryManager,</div><div class="line"><a name="l02681"></a><span class="lineno"> 2681</span>&#160;    <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml">armnn::ITensorHandleFactory</a>&amp; tensorHandleFactory)</div><div class="line"><a name="l02682"></a><span class="lineno"> 2682</span>&#160;{</div><div class="line"><a name="l02683"></a><span class="lineno"> 2683</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> inputDesc({ 2, 5 }, <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a166495adc0d0f53bee6baecc577f5204">armnn::DataType::Float32</a>);</div><div class="line"><a name="l02684"></a><span class="lineno"> 2684</span>&#160;    boost::multi_array&lt;float, 2&gt; input = MakeTensor&lt;float, 2&gt;(inputDesc, std::vector&lt;float&gt;(</div><div class="line"><a name="l02685"></a><span class="lineno"> 2685</span>&#160;            {0.787926f, 0.151646f, 0.071352f, 0.118426f, 0.458058f,</div><div class="line"><a name="l02686"></a><span class="lineno"> 2686</span>&#160;             0.295743f, 0.544053f, 0.690064f, 0.858138f, 0.497181f}));</div><div class="line"><a name="l02687"></a><span class="lineno"> 2687</span>&#160;</div><div class="line"><a name="l02688"></a><span class="lineno"> 2688</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> outputDesc({ 2, 16 }, <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a166495adc0d0f53bee6baecc577f5204">armnn::DataType::Float32</a>);</div><div class="line"><a name="l02689"></a><span class="lineno"> 2689</span>&#160;    boost::multi_array&lt;float, 2&gt; expectedOutput = MakeTensor&lt;float, 2&gt;(outputDesc, std::vector&lt;float&gt;(</div><div class="line"><a name="l02690"></a><span class="lineno"> 2690</span>&#160;            {-0.00396806f, 0.029352f,     -0.00279226f, 0.0159977f,   -0.00835576f,</div><div class="line"><a name="l02691"></a><span class="lineno"> 2691</span>&#160;             -0.0211779f,  0.0283512f,    -0.0114597f,  0.00907307f,  -0.0244004f,</div><div class="line"><a name="l02692"></a><span class="lineno"> 2692</span>&#160;             -0.0152191f,  -0.0259063f,   0.00914318f,  0.00415118f,  0.017147f,</div><div class="line"><a name="l02693"></a><span class="lineno"> 2693</span>&#160;             0.0134203f, -0.013869f,    0.0287268f,   -0.00334693f, 0.00733398f,  -0.0287926f,</div><div class="line"><a name="l02694"></a><span class="lineno"> 2694</span>&#160;             -0.0186926f,   0.0193662f,   -0.0115437f,  0.00422612f,  -0.0345232f,</div><div class="line"><a name="l02695"></a><span class="lineno"> 2695</span>&#160;             0.00223253f,   -0.00957321f, 0.0210624f,   0.013331f,    0.0150954f,</div><div class="line"><a name="l02696"></a><span class="lineno"> 2696</span>&#160;             0.02168f}));</div><div class="line"><a name="l02697"></a><span class="lineno"> 2697</span>&#160;    <span class="keywordflow">return</span> LstmLayerNoCifgWithPeepholeWithProjectionTestImpl&lt;armnn::DataType::Float32&gt;(</div><div class="line"><a name="l02698"></a><span class="lineno"> 2698</span>&#160;        workloadFactory, memoryManager, tensorHandleFactory, input, expectedOutput);</div><div class="line"><a name="l02699"></a><span class="lineno"> 2699</span>&#160;}</div><div class="line"><a name="l02700"></a><span class="lineno"> 2700</span>&#160;</div><div class="line"><a name="l02701"></a><span class="lineno"><a class="line" href="_lstm_test_impl_8hpp.xhtml#a347d60d5d81c1c5dfdae562b998503f9"> 2701</a></span>&#160;<a class="code" href="struct_layer_test_result.xhtml">LayerTestResult&lt;float, 2&gt;</a> <a class="code" href="_lstm_test_impl_8cpp.xhtml#a347d60d5d81c1c5dfdae562b998503f9">LstmLayerFloat32NoCifgNoPeepholeNoProjectionTest</a>(</div><div class="line"><a name="l02702"></a><span class="lineno"> 2702</span>&#160;    <a class="code" href="classarmnn_1_1_i_workload_factory.xhtml">armnn::IWorkloadFactory</a>&amp; workloadFactory,</div><div class="line"><a name="l02703"></a><span class="lineno"> 2703</span>&#160;    <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_backend_internal.xhtml#a693b40e6b94e958836aeb0410ca186bd">armnn::IBackendInternal::IMemoryManagerSharedPtr</a>&amp; memoryManager,</div><div class="line"><a name="l02704"></a><span class="lineno"> 2704</span>&#160;    <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml">armnn::ITensorHandleFactory</a>&amp; tensorHandleFactory)</div><div class="line"><a name="l02705"></a><span class="lineno"> 2705</span>&#160;{</div><div class="line"><a name="l02706"></a><span class="lineno"> 2706</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> inputDesc({2, 2}, <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a166495adc0d0f53bee6baecc577f5204">armnn::DataType::Float32</a>);</div><div class="line"><a name="l02707"></a><span class="lineno"> 2707</span>&#160;    boost::multi_array&lt;float, 2&gt; input = MakeTensor&lt;float, 2&gt;(inputDesc, std::vector&lt;float&gt;(</div><div class="line"><a name="l02708"></a><span class="lineno"> 2708</span>&#160;            {2., 3., 3., 4.}));</div><div class="line"><a name="l02709"></a><span class="lineno"> 2709</span>&#160;</div><div class="line"><a name="l02710"></a><span class="lineno"> 2710</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> outputDesc({2, 4}, <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a166495adc0d0f53bee6baecc577f5204">armnn::DataType::Float32</a>);</div><div class="line"><a name="l02711"></a><span class="lineno"> 2711</span>&#160;    boost::multi_array&lt;float, 2&gt; expectedOutput = MakeTensor&lt;float, 2&gt;(outputDesc, std::vector&lt;float&gt;(</div><div class="line"><a name="l02712"></a><span class="lineno"> 2712</span>&#160;            {{-0.02973187f, 0.1229473f,   0.20885126f, -0.15358765f,</div><div class="line"><a name="l02713"></a><span class="lineno"> 2713</span>&#160;              -0.0185422f,   0.11281417f,  0.24466537f, -0.1826292f}}));</div><div class="line"><a name="l02714"></a><span class="lineno"> 2714</span>&#160;</div><div class="line"><a name="l02715"></a><span class="lineno"> 2715</span>&#160;    <span class="keywordflow">return</span> LstmNoCifgNoPeepholeNoProjectionTestImpl&lt;armnn::DataType::Float32&gt;(</div><div class="line"><a name="l02716"></a><span class="lineno"> 2716</span>&#160;        workloadFactory, memoryManager, tensorHandleFactory, input, expectedOutput);</div><div class="line"><a name="l02717"></a><span class="lineno"> 2717</span>&#160;}</div><div class="line"><a name="l02718"></a><span class="lineno"> 2718</span>&#160;</div><div class="line"><a name="l02719"></a><span class="lineno"><a class="line" href="_lstm_test_impl_8hpp.xhtml#a64d9b9a6cd29cd7a7f4bbf514fde01d5"> 2719</a></span>&#160;<a class="code" href="struct_layer_test_result.xhtml">LayerTestResult&lt;float, 2&gt;</a> <a class="code" href="_lstm_test_impl_8cpp.xhtml#a64d9b9a6cd29cd7a7f4bbf514fde01d5">LstmLayerFloat32NoCifgWithPeepholeWithProjectionWithLayerNormTest</a>(</div><div class="line"><a name="l02720"></a><span class="lineno"> 2720</span>&#160;    <a class="code" href="classarmnn_1_1_i_workload_factory.xhtml">armnn::IWorkloadFactory</a>&amp; workloadFactory,</div><div class="line"><a name="l02721"></a><span class="lineno"> 2721</span>&#160;    <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_backend_internal.xhtml#a693b40e6b94e958836aeb0410ca186bd">armnn::IBackendInternal::IMemoryManagerSharedPtr</a>&amp; memoryManager,</div><div class="line"><a name="l02722"></a><span class="lineno"> 2722</span>&#160;    <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml">armnn::ITensorHandleFactory</a>&amp; tensorHandleFactory)</div><div class="line"><a name="l02723"></a><span class="lineno"> 2723</span>&#160;{</div><div class="line"><a name="l02724"></a><span class="lineno"> 2724</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> inputDesc({ 2, 5 }, <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a166495adc0d0f53bee6baecc577f5204">armnn::DataType::Float32</a>);</div><div class="line"><a name="l02725"></a><span class="lineno"> 2725</span>&#160;    boost::multi_array&lt;float, 2&gt; input = MakeTensor&lt;float, 2&gt;(inputDesc, std::vector&lt;float&gt;(</div><div class="line"><a name="l02726"></a><span class="lineno"> 2726</span>&#160;            {0.7f, 0.8f, 0.1f, 0.2f, 0.3f,     <span class="comment">//batch 0</span></div><div class="line"><a name="l02727"></a><span class="lineno"> 2727</span>&#160;             0.3f, 0.2f, 0.9f, 0.8f, 0.1f}));  <span class="comment">//batch 1</span></div><div class="line"><a name="l02728"></a><span class="lineno"> 2728</span>&#160;</div><div class="line"><a name="l02729"></a><span class="lineno"> 2729</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> outputDesc({ 2, 3 }, <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a166495adc0d0f53bee6baecc577f5204">armnn::DataType::Float32</a>);</div><div class="line"><a name="l02730"></a><span class="lineno"> 2730</span>&#160;    boost::multi_array&lt;float, 2&gt; expectedOutput = MakeTensor&lt;float, 2&gt;(outputDesc, std::vector&lt;float&gt;(</div><div class="line"><a name="l02731"></a><span class="lineno"> 2731</span>&#160;            {  0.0244077f,  0.128027f, -0.00170918f,    <span class="comment">//batch 0</span></div><div class="line"><a name="l02732"></a><span class="lineno"> 2732</span>&#160;             -0.00692428f, 0.0848741f,    0.063445f})); <span class="comment">//batch 1</span></div><div class="line"><a name="l02733"></a><span class="lineno"> 2733</span>&#160;    <span class="keywordflow">return</span> LstmLayerNoCifgWithPeepholeWithProjectionWithLayerNormTestImpl&lt;armnn::DataType::Float32&gt;(</div><div class="line"><a name="l02734"></a><span class="lineno"> 2734</span>&#160;        workloadFactory, memoryManager, tensorHandleFactory, input, expectedOutput);</div><div class="line"><a name="l02735"></a><span class="lineno"> 2735</span>&#160;}</div><div class="line"><a name="l02736"></a><span class="lineno"> 2736</span>&#160;</div><div class="line"><a name="l02737"></a><span class="lineno"><a class="line" href="_lstm_test_impl_8hpp.xhtml#ab1cbad4ed077c4b6279b28af3449b452"> 2737</a></span>&#160;<a class="code" href="struct_layer_test_result.xhtml">LayerTestResult&lt;int16_t, 2&gt;</a> <a class="code" href="_lstm_test_impl_8cpp.xhtml#ab1cbad4ed077c4b6279b28af3449b452">LstmLayerInt16NoCifgNoPeepholeNoProjectionTest</a>(</div><div class="line"><a name="l02738"></a><span class="lineno"> 2738</span>&#160;    <a class="code" href="classarmnn_1_1_i_workload_factory.xhtml">armnn::IWorkloadFactory</a>&amp; workloadFactory,</div><div class="line"><a name="l02739"></a><span class="lineno"> 2739</span>&#160;    <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_backend_internal.xhtml#a693b40e6b94e958836aeb0410ca186bd">armnn::IBackendInternal::IMemoryManagerSharedPtr</a>&amp; memoryManager,</div><div class="line"><a name="l02740"></a><span class="lineno"> 2740</span>&#160;    <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml">armnn::ITensorHandleFactory</a>&amp; tensorHandleFactory)</div><div class="line"><a name="l02741"></a><span class="lineno"> 2741</span>&#160;{</div><div class="line"><a name="l02742"></a><span class="lineno"> 2742</span>&#160;    <span class="keyword">const</span> <span class="keywordtype">float</span> qScale = 1.0f;</div><div class="line"><a name="l02743"></a><span class="lineno"> 2743</span>&#160;    <span class="keyword">const</span> int32_t qOffset = 0;</div><div class="line"><a name="l02744"></a><span class="lineno"> 2744</span>&#160;</div><div class="line"><a name="l02745"></a><span class="lineno"> 2745</span>&#160;    <span class="keyword">const</span> <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6">armnn::DataType</a> datatype = <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a053c769dcf82d66ef326c86980c02ba7">armnn::DataType::QSymmS16</a>;</div><div class="line"><a name="l02746"></a><span class="lineno"> 2746</span>&#160;    <span class="keyword">const</span> <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6">armnn::DataType</a> constantDatatype = <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a0a3f57c876f5a230244c38e1453a8a6e">armnn::DataType::QAsymmU8</a>;</div><div class="line"><a name="l02747"></a><span class="lineno"> 2747</span>&#160;</div><div class="line"><a name="l02748"></a><span class="lineno"> 2748</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> inputDesc({2, 2}, datatype);</div><div class="line"><a name="l02749"></a><span class="lineno"> 2749</span>&#160;    boost::multi_array&lt;int16_t , 2&gt; input = MakeTensor&lt;int16_t , 2&gt;(</div><div class="line"><a name="l02750"></a><span class="lineno"> 2750</span>&#160;        inputDesc,</div><div class="line"><a name="l02751"></a><span class="lineno"> 2751</span>&#160;        armnnUtils::QuantizedVector&lt;int16_t&gt;({ 2.f, 3.f, 3.f, 4.f }, qScale, qOffset));</div><div class="line"><a name="l02752"></a><span class="lineno"> 2752</span>&#160;</div><div class="line"><a name="l02753"></a><span class="lineno"> 2753</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> outputDesc({2, 4}, datatype);</div><div class="line"><a name="l02754"></a><span class="lineno"> 2754</span>&#160;    boost::multi_array&lt;int16_t, 2&gt; expectedOutput = MakeTensor&lt;int16_t, 2&gt;(</div><div class="line"><a name="l02755"></a><span class="lineno"> 2755</span>&#160;        outputDesc,</div><div class="line"><a name="l02756"></a><span class="lineno"> 2756</span>&#160;        armnnUtils::QuantizedVector&lt;int16_t&gt;(</div><div class="line"><a name="l02757"></a><span class="lineno"> 2757</span>&#160;            {</div><div class="line"><a name="l02758"></a><span class="lineno"> 2758</span>&#160;                -0.02973187f, 0.12294730f, 0.20885126f, -0.15358765f,</div><div class="line"><a name="l02759"></a><span class="lineno"> 2759</span>&#160;                -0.01854220f, 0.11281417f, 0.24466537f, -0.18262920f</div><div class="line"><a name="l02760"></a><span class="lineno"> 2760</span>&#160;            },</div><div class="line"><a name="l02761"></a><span class="lineno"> 2761</span>&#160;            qScale, qOffset));</div><div class="line"><a name="l02762"></a><span class="lineno"> 2762</span>&#160;</div><div class="line"><a name="l02763"></a><span class="lineno"> 2763</span>&#160;    <span class="keywordflow">return</span> LstmNoCifgNoPeepholeNoProjectionTestImpl&lt;datatype&gt;(</div><div class="line"><a name="l02764"></a><span class="lineno"> 2764</span>&#160;        workloadFactory, memoryManager, tensorHandleFactory, input, expectedOutput, qScale, qOffset, constantDatatype);</div><div class="line"><a name="l02765"></a><span class="lineno"> 2765</span>&#160;</div><div class="line"><a name="l02766"></a><span class="lineno"> 2766</span>&#160;}</div><div class="line"><a name="l02767"></a><span class="lineno"> 2767</span>&#160;</div><div class="line"><a name="l02768"></a><span class="lineno"><a class="line" href="_lstm_test_impl_8hpp.xhtml#a0c1fe44fe410e2ef4103d9a724234fc3"> 2768</a></span>&#160;<a class="code" href="struct_layer_test_result.xhtml">LayerTestResult&lt;int16_t, 2&gt;</a> <a class="code" href="_lstm_test_impl_8cpp.xhtml#a0c1fe44fe410e2ef4103d9a724234fc3">LstmLayerInt16WithCifgWithPeepholeNoProjectionTest</a>(</div><div class="line"><a name="l02769"></a><span class="lineno"> 2769</span>&#160;    <a class="code" href="classarmnn_1_1_i_workload_factory.xhtml">armnn::IWorkloadFactory</a>&amp; workloadFactory,</div><div class="line"><a name="l02770"></a><span class="lineno"> 2770</span>&#160;    <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_backend_internal.xhtml#a693b40e6b94e958836aeb0410ca186bd">armnn::IBackendInternal::IMemoryManagerSharedPtr</a>&amp; memoryManager,</div><div class="line"><a name="l02771"></a><span class="lineno"> 2771</span>&#160;    <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml">armnn::ITensorHandleFactory</a>&amp; tensorHandleFactory)</div><div class="line"><a name="l02772"></a><span class="lineno"> 2772</span>&#160;{</div><div class="line"><a name="l02773"></a><span class="lineno"> 2773</span>&#160;    <span class="keyword">const</span> <span class="keywordtype">float</span> qScale = 1.0f;</div><div class="line"><a name="l02774"></a><span class="lineno"> 2774</span>&#160;    <span class="keyword">const</span> int32_t qOffset = 0;</div><div class="line"><a name="l02775"></a><span class="lineno"> 2775</span>&#160;</div><div class="line"><a name="l02776"></a><span class="lineno"> 2776</span>&#160;    <span class="keyword">const</span> <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6">armnn::DataType</a> datatype = <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a053c769dcf82d66ef326c86980c02ba7">armnn::DataType::QSymmS16</a>;</div><div class="line"><a name="l02777"></a><span class="lineno"> 2777</span>&#160;    <span class="keyword">const</span> <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6">armnn::DataType</a> constantDatatype = <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a0a3f57c876f5a230244c38e1453a8a6e">armnn::DataType::QAsymmU8</a>;</div><div class="line"><a name="l02778"></a><span class="lineno"> 2778</span>&#160;</div><div class="line"><a name="l02779"></a><span class="lineno"> 2779</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> inputDesc({ 2, 2 }, datatype);</div><div class="line"><a name="l02780"></a><span class="lineno"> 2780</span>&#160;    boost::multi_array&lt;int16_t, 2&gt; input =</div><div class="line"><a name="l02781"></a><span class="lineno"> 2781</span>&#160;        MakeTensor&lt;int16_t, 2&gt;(</div><div class="line"><a name="l02782"></a><span class="lineno"> 2782</span>&#160;            inputDesc,</div><div class="line"><a name="l02783"></a><span class="lineno"> 2783</span>&#160;            armnnUtils::QuantizedVector&lt;int16_t&gt;({ 2.f, 3.f, 3.f, 4.f }, qScale, qOffset));</div><div class="line"><a name="l02784"></a><span class="lineno"> 2784</span>&#160;</div><div class="line"><a name="l02785"></a><span class="lineno"> 2785</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> outputDesc({ 2, 4 }, datatype);</div><div class="line"><a name="l02786"></a><span class="lineno"> 2786</span>&#160;    boost::multi_array&lt;int16_t, 2&gt; expectedOutput =</div><div class="line"><a name="l02787"></a><span class="lineno"> 2787</span>&#160;        MakeTensor&lt;int16_t, 2&gt;(</div><div class="line"><a name="l02788"></a><span class="lineno"> 2788</span>&#160;            outputDesc,</div><div class="line"><a name="l02789"></a><span class="lineno"> 2789</span>&#160;            armnnUtils::QuantizedVector&lt;int16_t&gt;(</div><div class="line"><a name="l02790"></a><span class="lineno"> 2790</span>&#160;                {</div><div class="line"><a name="l02791"></a><span class="lineno"> 2791</span>&#160;                    -0.36444446f, -0.00352185f, 0.12886585f, -0.05163646f,</div><div class="line"><a name="l02792"></a><span class="lineno"> 2792</span>&#160;                    -0.42734814f, -0.00478661f, 0.13455015f, -0.03560682f</div><div class="line"><a name="l02793"></a><span class="lineno"> 2793</span>&#160;                },</div><div class="line"><a name="l02794"></a><span class="lineno"> 2794</span>&#160;                qScale, qOffset));</div><div class="line"><a name="l02795"></a><span class="lineno"> 2795</span>&#160;</div><div class="line"><a name="l02796"></a><span class="lineno"> 2796</span>&#160;    <span class="keywordflow">return</span> LstmLayerWithCifgWithPeepholeNoProjectionTestImpl&lt;datatype&gt;(</div><div class="line"><a name="l02797"></a><span class="lineno"> 2797</span>&#160;        workloadFactory, memoryManager, tensorHandleFactory, input, expectedOutput, qScale, qOffset, constantDatatype);</div><div class="line"><a name="l02798"></a><span class="lineno"> 2798</span>&#160;}</div><div class="line"><a name="l02799"></a><span class="lineno"> 2799</span>&#160;</div><div class="line"><a name="l02800"></a><span class="lineno"><a class="line" href="_lstm_test_impl_8hpp.xhtml#af125f50b8c17b270fd1a9208c3391722"> 2800</a></span>&#160;<a class="code" href="struct_layer_test_result.xhtml">LayerTestResult&lt;int16_t, 2&gt;</a> <a class="code" href="_lstm_test_impl_8cpp.xhtml#af125f50b8c17b270fd1a9208c3391722">LstmLayerInt16NoCifgWithPeepholeWithProjectionTest</a>(</div><div class="line"><a name="l02801"></a><span class="lineno"> 2801</span>&#160;    <a class="code" href="classarmnn_1_1_i_workload_factory.xhtml">armnn::IWorkloadFactory</a>&amp; workloadFactory,</div><div class="line"><a name="l02802"></a><span class="lineno"> 2802</span>&#160;    <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_backend_internal.xhtml#a693b40e6b94e958836aeb0410ca186bd">armnn::IBackendInternal::IMemoryManagerSharedPtr</a>&amp; memoryManager,</div><div class="line"><a name="l02803"></a><span class="lineno"> 2803</span>&#160;    <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml">armnn::ITensorHandleFactory</a>&amp; tensorHandleFactory)</div><div class="line"><a name="l02804"></a><span class="lineno"> 2804</span>&#160;{</div><div class="line"><a name="l02805"></a><span class="lineno"> 2805</span>&#160;    <span class="keyword">const</span> <span class="keywordtype">float</span> qScale = 2.0f;</div><div class="line"><a name="l02806"></a><span class="lineno"> 2806</span>&#160;    <span class="keyword">const</span> int32_t qOffset = 0;</div><div class="line"><a name="l02807"></a><span class="lineno"> 2807</span>&#160;</div><div class="line"><a name="l02808"></a><span class="lineno"> 2808</span>&#160;    <span class="keyword">const</span> <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6">armnn::DataType</a> datatype = <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a053c769dcf82d66ef326c86980c02ba7">armnn::DataType::QSymmS16</a>;</div><div class="line"><a name="l02809"></a><span class="lineno"> 2809</span>&#160;    <span class="keyword">const</span> <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6">armnn::DataType</a> constantDatatype = <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a0a3f57c876f5a230244c38e1453a8a6e">armnn::DataType::QAsymmU8</a>;</div><div class="line"><a name="l02810"></a><span class="lineno"> 2810</span>&#160;</div><div class="line"><a name="l02811"></a><span class="lineno"> 2811</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> inputDesc({ 2, 5 }, datatype);</div><div class="line"><a name="l02812"></a><span class="lineno"> 2812</span>&#160;    boost::multi_array&lt;int16_t, 2&gt; input =</div><div class="line"><a name="l02813"></a><span class="lineno"> 2813</span>&#160;        MakeTensor&lt;int16_t, 2&gt;(</div><div class="line"><a name="l02814"></a><span class="lineno"> 2814</span>&#160;            inputDesc,</div><div class="line"><a name="l02815"></a><span class="lineno"> 2815</span>&#160;            armnnUtils::QuantizedVector&lt;int16_t&gt;(</div><div class="line"><a name="l02816"></a><span class="lineno"> 2816</span>&#160;                {</div><div class="line"><a name="l02817"></a><span class="lineno"> 2817</span>&#160;                    0.787926f, 0.151646f, 0.071352f, 0.118426f, 0.458058f,</div><div class="line"><a name="l02818"></a><span class="lineno"> 2818</span>&#160;                    0.295743f, 0.544053f, 0.690064f, 0.858138f, 0.497181f</div><div class="line"><a name="l02819"></a><span class="lineno"> 2819</span>&#160;                },</div><div class="line"><a name="l02820"></a><span class="lineno"> 2820</span>&#160;                qScale, qOffset));</div><div class="line"><a name="l02821"></a><span class="lineno"> 2821</span>&#160;</div><div class="line"><a name="l02822"></a><span class="lineno"> 2822</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> outputDesc({ 2, 16 }, datatype);</div><div class="line"><a name="l02823"></a><span class="lineno"> 2823</span>&#160;    boost::multi_array&lt;int16_t, 2&gt; expectedOutput =</div><div class="line"><a name="l02824"></a><span class="lineno"> 2824</span>&#160;        MakeTensor&lt;int16_t, 2&gt;(</div><div class="line"><a name="l02825"></a><span class="lineno"> 2825</span>&#160;            outputDesc,</div><div class="line"><a name="l02826"></a><span class="lineno"> 2826</span>&#160;            armnnUtils::QuantizedVector&lt;int16_t&gt;(</div><div class="line"><a name="l02827"></a><span class="lineno"> 2827</span>&#160;                {</div><div class="line"><a name="l02828"></a><span class="lineno"> 2828</span>&#160;                    -0.00396806f,  0.02935200f, -0.00279226f,  0.01599770f,</div><div class="line"><a name="l02829"></a><span class="lineno"> 2829</span>&#160;                    -0.00835576f, -0.02117790f,  0.02835120f, -0.01145970f,</div><div class="line"><a name="l02830"></a><span class="lineno"> 2830</span>&#160;                     0.00907307f, -0.02440040f, -0.01521910f, -0.02590630f,</div><div class="line"><a name="l02831"></a><span class="lineno"> 2831</span>&#160;                     0.00914318f,  0.00415118f,  0.01714700f,  0.01342030f,</div><div class="line"><a name="l02832"></a><span class="lineno"> 2832</span>&#160;                    -0.01386900f,  0.02872680f, -0.00334693f,  0.00733398f,</div><div class="line"><a name="l02833"></a><span class="lineno"> 2833</span>&#160;                    -0.02879260f, -0.01869260f,  0.01936620f, -0.01154370f,</div><div class="line"><a name="l02834"></a><span class="lineno"> 2834</span>&#160;                     0.00422612f, -0.03452320f,  0.00223253f, -0.00957321f,</div><div class="line"><a name="l02835"></a><span class="lineno"> 2835</span>&#160;                     0.02106240f,  0.01333100f,  0.01509540f,  0.02168000f</div><div class="line"><a name="l02836"></a><span class="lineno"> 2836</span>&#160;                },</div><div class="line"><a name="l02837"></a><span class="lineno"> 2837</span>&#160;                qScale, qOffset));</div><div class="line"><a name="l02838"></a><span class="lineno"> 2838</span>&#160;</div><div class="line"><a name="l02839"></a><span class="lineno"> 2839</span>&#160;    <span class="keywordflow">return</span> LstmLayerNoCifgWithPeepholeWithProjectionTestImpl&lt;datatype&gt;(</div><div class="line"><a name="l02840"></a><span class="lineno"> 2840</span>&#160;        workloadFactory, memoryManager, tensorHandleFactory, input, expectedOutput, qScale, qOffset, constantDatatype);</div><div class="line"><a name="l02841"></a><span class="lineno"> 2841</span>&#160;}</div><div class="line"><a name="l02842"></a><span class="lineno"> 2842</span>&#160;</div><div class="line"><a name="l02843"></a><span class="lineno"><a class="line" href="_lstm_test_impl_8hpp.xhtml#a9430b95c437de781166385d51ced7bd0"> 2843</a></span>&#160;<a class="code" href="struct_layer_test_result.xhtml">LayerTestResult&lt;int16_t, 2&gt;</a> <a class="code" href="_lstm_test_impl_8cpp.xhtml#a9430b95c437de781166385d51ced7bd0">LstmLayerInt16NoCifgNoPeepholeNoProjectionInt16ConstantTest</a>(</div><div class="line"><a name="l02844"></a><span class="lineno"> 2844</span>&#160;    <a class="code" href="classarmnn_1_1_i_workload_factory.xhtml">armnn::IWorkloadFactory</a>&amp; workloadFactory,</div><div class="line"><a name="l02845"></a><span class="lineno"> 2845</span>&#160;    <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_backend_internal.xhtml#a693b40e6b94e958836aeb0410ca186bd">armnn::IBackendInternal::IMemoryManagerSharedPtr</a>&amp; memoryManager,</div><div class="line"><a name="l02846"></a><span class="lineno"> 2846</span>&#160;    <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml">armnn::ITensorHandleFactory</a>&amp; tensorHandleFactory)</div><div class="line"><a name="l02847"></a><span class="lineno"> 2847</span>&#160;{</div><div class="line"><a name="l02848"></a><span class="lineno"> 2848</span>&#160;    <span class="keyword">const</span> <span class="keywordtype">float</span> qScale = 1.0f;</div><div class="line"><a name="l02849"></a><span class="lineno"> 2849</span>&#160;    <span class="keyword">const</span> int32_t qOffset = 0;</div><div class="line"><a name="l02850"></a><span class="lineno"> 2850</span>&#160;</div><div class="line"><a name="l02851"></a><span class="lineno"> 2851</span>&#160;    <span class="keyword">const</span> <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6">armnn::DataType</a> datatype = <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a053c769dcf82d66ef326c86980c02ba7">armnn::DataType::QSymmS16</a>; <span class="comment">// datatype &amp; constants set to QSymm16</span></div><div class="line"><a name="l02852"></a><span class="lineno"> 2852</span>&#160;</div><div class="line"><a name="l02853"></a><span class="lineno"> 2853</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> inputDesc({2, 2}, datatype);</div><div class="line"><a name="l02854"></a><span class="lineno"> 2854</span>&#160;    boost::multi_array&lt;int16_t , 2&gt; input =</div><div class="line"><a name="l02855"></a><span class="lineno"> 2855</span>&#160;        MakeTensor&lt;int16_t , 2&gt;(inputDesc,</div><div class="line"><a name="l02856"></a><span class="lineno"> 2856</span>&#160;                                armnnUtils::QuantizedVector&lt;int16_t&gt;({ 2.f, 3.f, 3.f, 4.f }, qScale, qOffset));</div><div class="line"><a name="l02857"></a><span class="lineno"> 2857</span>&#160;</div><div class="line"><a name="l02858"></a><span class="lineno"> 2858</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> outputDesc({2, 4}, datatype);</div><div class="line"><a name="l02859"></a><span class="lineno"> 2859</span>&#160;    boost::multi_array&lt;int16_t, 2&gt; expectedOutput =</div><div class="line"><a name="l02860"></a><span class="lineno"> 2860</span>&#160;        MakeTensor&lt;int16_t, 2&gt;(</div><div class="line"><a name="l02861"></a><span class="lineno"> 2861</span>&#160;            outputDesc,</div><div class="line"><a name="l02862"></a><span class="lineno"> 2862</span>&#160;            armnnUtils::QuantizedVector&lt;int16_t&gt;(</div><div class="line"><a name="l02863"></a><span class="lineno"> 2863</span>&#160;                {</div><div class="line"><a name="l02864"></a><span class="lineno"> 2864</span>&#160;                    -0.02973187f, 0.12294730f, 0.20885126f, -0.15358765f,</div><div class="line"><a name="l02865"></a><span class="lineno"> 2865</span>&#160;                    -0.01854220f, 0.11281417f, 0.24466537f, -0.18262920f</div><div class="line"><a name="l02866"></a><span class="lineno"> 2866</span>&#160;                },</div><div class="line"><a name="l02867"></a><span class="lineno"> 2867</span>&#160;                qScale, qOffset));</div><div class="line"><a name="l02868"></a><span class="lineno"> 2868</span>&#160;</div><div class="line"><a name="l02869"></a><span class="lineno"> 2869</span>&#160;    <span class="keywordflow">return</span> LstmNoCifgNoPeepholeNoProjectionTestImpl&lt;datatype&gt;(</div><div class="line"><a name="l02870"></a><span class="lineno"> 2870</span>&#160;        workloadFactory, memoryManager, tensorHandleFactory, input, expectedOutput, qScale, qOffset, datatype);</div><div class="line"><a name="l02871"></a><span class="lineno"> 2871</span>&#160;}</div><div class="line"><a name="l02872"></a><span class="lineno"> 2872</span>&#160;</div><div class="line"><a name="l02873"></a><span class="lineno"> 2873</span>&#160;<span class="comment">//</span></div><div class="line"><a name="l02874"></a><span class="lineno"> 2874</span>&#160;<span class="comment">// QuantizedLstm</span></div><div class="line"><a name="l02875"></a><span class="lineno"> 2875</span>&#160;<span class="comment">//</span></div><div class="line"><a name="l02876"></a><span class="lineno"> 2876</span>&#160;</div><div class="line"><a name="l02877"></a><span class="lineno"><a class="line" href="_lstm_test_impl_8hpp.xhtml#a8d9469ec08347dd451d782f102a6c8fa"> 2877</a></span>&#160;<a class="code" href="struct_layer_test_result.xhtml">LayerTestResult&lt;uint8_t, 2&gt;</a> <a class="code" href="_lstm_test_impl_8cpp.xhtml#a8d9469ec08347dd451d782f102a6c8fa">QuantizedLstmTest</a>(</div><div class="line"><a name="l02878"></a><span class="lineno"> 2878</span>&#160;    <a class="code" href="classarmnn_1_1_i_workload_factory.xhtml">armnn::IWorkloadFactory</a>&amp; workloadFactory,</div><div class="line"><a name="l02879"></a><span class="lineno"> 2879</span>&#160;    <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_backend_internal.xhtml#a693b40e6b94e958836aeb0410ca186bd">armnn::IBackendInternal::IMemoryManagerSharedPtr</a>&amp; memoryManager,</div><div class="line"><a name="l02880"></a><span class="lineno"> 2880</span>&#160;    <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml">armnn::ITensorHandleFactory</a>&amp; tensorHandleFactory)</div><div class="line"><a name="l02881"></a><span class="lineno"> 2881</span>&#160;{</div><div class="line"><a name="l02882"></a><span class="lineno"> 2882</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> inputDesc({2, 2}, <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a0a3f57c876f5a230244c38e1453a8a6e">armnn::DataType::QAsymmU8</a>);</div><div class="line"><a name="l02883"></a><span class="lineno"> 2883</span>&#160;    boost::multi_array&lt;uint8_t, 2&gt; input = MakeTensor&lt;uint8_t, 2&gt;(inputDesc, std::vector&lt;uint8_t&gt;(</div><div class="line"><a name="l02884"></a><span class="lineno"> 2884</span>&#160;        {166, 179, 50, 150}));</div><div class="line"><a name="l02885"></a><span class="lineno"> 2885</span>&#160;</div><div class="line"><a name="l02886"></a><span class="lineno"> 2886</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> outputDesc({2, 4}, <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a0a3f57c876f5a230244c38e1453a8a6e">armnn::DataType::QAsymmU8</a>);</div><div class="line"><a name="l02887"></a><span class="lineno"> 2887</span>&#160;    boost::multi_array&lt;uint8_t, 2&gt; expectedOutput = MakeTensor&lt;uint8_t, 2&gt;(outputDesc, std::vector&lt;uint8_t&gt;(</div><div class="line"><a name="l02888"></a><span class="lineno"> 2888</span>&#160;        {140, 151, 146, 112, 136, 156, 142, 112 }));</div><div class="line"><a name="l02889"></a><span class="lineno"> 2889</span>&#160;</div><div class="line"><a name="l02890"></a><span class="lineno"> 2890</span>&#160;    <span class="keywordflow">return</span> QuantizedLstmTestImpl(workloadFactory, memoryManager, tensorHandleFactory, input, expectedOutput);</div><div class="line"><a name="l02891"></a><span class="lineno"> 2891</span>&#160;}</div><div class="line"><a name="l02892"></a><span class="lineno"> 2892</span>&#160;</div><div class="line"><a name="l02893"></a><span class="lineno"> 2893</span>&#160;<span class="comment">// QLSTM</span></div><div class="line"><a name="l02894"></a><span class="lineno"><a class="line" href="_lstm_test_impl_8hpp.xhtml#a3ca648bd28b5f0b835868282409b3458"> 2894</a></span>&#160;<a class="code" href="struct_layer_test_result.xhtml">LayerTestResult&lt;int8_t, 2&gt;</a> <a class="code" href="_lstm_test_impl_8cpp.xhtml#a3ca648bd28b5f0b835868282409b3458">QLstmTest</a>(</div><div class="line"><a name="l02895"></a><span class="lineno"> 2895</span>&#160;    <a class="code" href="classarmnn_1_1_i_workload_factory.xhtml">armnn::IWorkloadFactory</a>&amp; workloadFactory,</div><div class="line"><a name="l02896"></a><span class="lineno"> 2896</span>&#160;    <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_backend_internal.xhtml#a693b40e6b94e958836aeb0410ca186bd">armnn::IBackendInternal::IMemoryManagerSharedPtr</a>&amp; memoryManager,</div><div class="line"><a name="l02897"></a><span class="lineno"> 2897</span>&#160;    <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml">armnn::ITensorHandleFactory</a>&amp; tensorHandleFactory)</div><div class="line"><a name="l02898"></a><span class="lineno"> 2898</span>&#160;{</div><div class="line"><a name="l02899"></a><span class="lineno"> 2899</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> inputDesc({2, 5}, <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a9d02ea768c081d4bdb2b7cab0b3f510d">armnn::DataType::QAsymmS8</a>);</div><div class="line"><a name="l02900"></a><span class="lineno"> 2900</span>&#160;    boost::multi_array&lt;int8_t, 2&gt; input = MakeTensor&lt;int8_t, 2&gt;(inputDesc, std::vector&lt;int8_t&gt;(</div><div class="line"><a name="l02901"></a><span class="lineno"> 2901</span>&#160;            {90, 102, 13, 26, 38, 102, 13, 26, 51, 64}));</div><div class="line"><a name="l02902"></a><span class="lineno"> 2902</span>&#160;</div><div class="line"><a name="l02903"></a><span class="lineno"> 2903</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> outputDesc({2, 4}, <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a9d02ea768c081d4bdb2b7cab0b3f510d">armnn::DataType::QAsymmS8</a>);</div><div class="line"><a name="l02904"></a><span class="lineno"> 2904</span>&#160;    boost::multi_array&lt;int8_t, 2&gt; expectedOutput = MakeTensor&lt;int8_t, 2&gt;(outputDesc, std::vector&lt;int8_t&gt;(</div><div class="line"><a name="l02905"></a><span class="lineno"> 2905</span>&#160;            {-15, 21, 14, 20, -15, 15, 5, 27}));</div><div class="line"><a name="l02906"></a><span class="lineno"> 2906</span>&#160;</div><div class="line"><a name="l02907"></a><span class="lineno"> 2907</span>&#160;    <span class="keywordflow">return</span> QLstmTestImpl(workloadFactory, memoryManager, tensorHandleFactory, input, expectedOutput);</div><div class="line"><a name="l02908"></a><span class="lineno"> 2908</span>&#160;}</div><div class="line"><a name="l02909"></a><span class="lineno"> 2909</span>&#160;</div><div class="line"><a name="l02910"></a><span class="lineno"><a class="line" href="_lstm_test_impl_8hpp.xhtml#a95ad313188cccfdd052c4620b4dc0743"> 2910</a></span>&#160;<a class="code" href="struct_layer_test_result.xhtml">LayerTestResult&lt;int8_t, 2&gt;</a> <a class="code" href="_lstm_test_impl_8cpp.xhtml#a95ad313188cccfdd052c4620b4dc0743">QLstmTest1</a>(</div><div class="line"><a name="l02911"></a><span class="lineno"> 2911</span>&#160;    <a class="code" href="classarmnn_1_1_i_workload_factory.xhtml">armnn::IWorkloadFactory</a>&amp; workloadFactory,</div><div class="line"><a name="l02912"></a><span class="lineno"> 2912</span>&#160;    <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_backend_internal.xhtml#a693b40e6b94e958836aeb0410ca186bd">armnn::IBackendInternal::IMemoryManagerSharedPtr</a>&amp; memoryManager,</div><div class="line"><a name="l02913"></a><span class="lineno"> 2913</span>&#160;    <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml">armnn::ITensorHandleFactory</a>&amp; tensorHandleFactory)</div><div class="line"><a name="l02914"></a><span class="lineno"> 2914</span>&#160;{</div><div class="line"><a name="l02915"></a><span class="lineno"> 2915</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> inputDesc({2, 5}, <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a9d02ea768c081d4bdb2b7cab0b3f510d">armnn::DataType::QAsymmS8</a>);</div><div class="line"><a name="l02916"></a><span class="lineno"> 2916</span>&#160;    boost::multi_array&lt;int8_t, 2&gt; input = MakeTensor&lt;int8_t, 2&gt;(inputDesc, std::vector&lt;int8_t&gt;(</div><div class="line"><a name="l02917"></a><span class="lineno"> 2917</span>&#160;            {90, 102, 13, 26, 38, 102, 13, 26, 51, 64}));</div><div class="line"><a name="l02918"></a><span class="lineno"> 2918</span>&#160;</div><div class="line"><a name="l02919"></a><span class="lineno"> 2919</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> outputDesc({2, 3}, <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a9d02ea768c081d4bdb2b7cab0b3f510d">armnn::DataType::QAsymmS8</a>);</div><div class="line"><a name="l02920"></a><span class="lineno"> 2920</span>&#160;    boost::multi_array&lt;int8_t, 2&gt; expectedOutput = MakeTensor&lt;int8_t, 2&gt;(outputDesc, std::vector&lt;int8_t&gt;(</div><div class="line"><a name="l02921"></a><span class="lineno"> 2921</span>&#160;            {127, 127, -108, -67, 127, 127}));</div><div class="line"><a name="l02922"></a><span class="lineno"> 2922</span>&#160;</div><div class="line"><a name="l02923"></a><span class="lineno"> 2923</span>&#160;    <span class="keywordflow">return</span> QLstmTestImpl1(workloadFactory, memoryManager, tensorHandleFactory, input, expectedOutput);</div><div class="line"><a name="l02924"></a><span class="lineno"> 2924</span>&#160;}</div><div class="line"><a name="l02925"></a><span class="lineno"> 2925</span>&#160;</div><div class="line"><a name="l02926"></a><span class="lineno"><a class="line" href="_lstm_test_impl_8hpp.xhtml#a017dcda811d5b5dc185f8c1d2e9b29f3"> 2926</a></span>&#160;<a class="code" href="struct_layer_test_result.xhtml">LayerTestResult&lt;int8_t, 2&gt;</a> <a class="code" href="_lstm_test_impl_8cpp.xhtml#a017dcda811d5b5dc185f8c1d2e9b29f3">QLstmTest2</a>(</div><div class="line"><a name="l02927"></a><span class="lineno"> 2927</span>&#160;    <a class="code" href="classarmnn_1_1_i_workload_factory.xhtml">armnn::IWorkloadFactory</a>&amp; workloadFactory,</div><div class="line"><a name="l02928"></a><span class="lineno"> 2928</span>&#160;    <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_backend_internal.xhtml#a693b40e6b94e958836aeb0410ca186bd">armnn::IBackendInternal::IMemoryManagerSharedPtr</a>&amp; memoryManager,</div><div class="line"><a name="l02929"></a><span class="lineno"> 2929</span>&#160;    <span class="keyword">const</span> <a class="code" href="classarmnn_1_1_i_tensor_handle_factory.xhtml">armnn::ITensorHandleFactory</a>&amp; tensorHandleFactory)</div><div class="line"><a name="l02930"></a><span class="lineno"> 2930</span>&#160;{</div><div class="line"><a name="l02931"></a><span class="lineno"> 2931</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> inputDesc({2, 5}, <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a9d02ea768c081d4bdb2b7cab0b3f510d">armnn::DataType::QAsymmS8</a>);</div><div class="line"><a name="l02932"></a><span class="lineno"> 2932</span>&#160;    boost::multi_array&lt;int8_t, 2&gt; input = MakeTensor&lt;int8_t, 2&gt;(inputDesc, std::vector&lt;int8_t&gt;(</div><div class="line"><a name="l02933"></a><span class="lineno"> 2933</span>&#160;            {90, 102, 13, 26, 38, 102, 13, 26, 51, 64}));</div><div class="line"><a name="l02934"></a><span class="lineno"> 2934</span>&#160;</div><div class="line"><a name="l02935"></a><span class="lineno"> 2935</span>&#160;    <a class="code" href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a> outputDesc({2, 3}, <a class="code" href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a9d02ea768c081d4bdb2b7cab0b3f510d">armnn::DataType::QAsymmS8</a>);</div><div class="line"><a name="l02936"></a><span class="lineno"> 2936</span>&#160;    boost::multi_array&lt;int8_t, 2&gt; expectedOutput = MakeTensor&lt;int8_t, 2&gt;(outputDesc, std::vector&lt;int8_t&gt;(</div><div class="line"><a name="l02937"></a><span class="lineno"> 2937</span>&#160;            {127, 127, 127, -128, 127, 127}));</div><div class="line"><a name="l02938"></a><span class="lineno"> 2938</span>&#160;</div><div class="line"><a name="l02939"></a><span class="lineno"> 2939</span>&#160;    <span class="keywordflow">return</span> QLstmTestImpl2(workloadFactory, memoryManager, tensorHandleFactory, input, expectedOutput);</div><div class="line"><a name="l02940"></a><span class="lineno"> 2940</span>&#160;}</div><div class="ttc" id="_lstm_utils_8cpp_xhtml_a0ed27dd6d6125a06bf654080f4184360"><div class="ttname"><a href="_lstm_utils_8cpp.xhtml#a0ed27dd6d6125a06bf654080f4184360">MeanStddevNormalization</a></div><div class="ttdeci">void MeanStddevNormalization(armnn::Decoder&lt; float &gt; &amp;input_vector, armnn::Encoder&lt; float &gt; &amp;output_vector, uint32_t v_size, uint32_t n_batch, float normalization_epsilon)</div><div class="ttdef"><b>Definition:</b> <a href="_lstm_utils_8cpp_source.xhtml#l00040">LstmUtils.cpp:40</a></div></div>
<div class="ttc" id="_lstm_utils_8cpp_xhtml_a389c4bbafd0fff7060cbb183f20a2134"><div class="ttname"><a href="_lstm_utils_8cpp.xhtml#a389c4bbafd0fff7060cbb183f20a2134">VectorBatchVectorAdd</a></div><div class="ttdeci">void VectorBatchVectorAdd(armnn::Decoder&lt; float &gt; &amp;vector, uint32_t vSize, armnn::Decoder&lt; float &gt; &amp;batchVector, uint32_t nBatch, armnn::Encoder&lt; float &gt; &amp;outResult)</div><div class="ttdef"><b>Definition:</b> <a href="_lstm_utils_8cpp_source.xhtml#l00016">LstmUtils.cpp:16</a></div></div>
<div class="ttc" id="_lstm_test_impl_8cpp_xhtml_a8878c1a79890f404aac4c474204756c1"><div class="ttname"><a href="_lstm_test_impl_8cpp.xhtml#a8878c1a79890f404aac4c474204756c1">LstmLayerFloat32NoCifgWithPeepholeWithProjectionTest</a></div><div class="ttdeci">LayerTestResult&lt; float, 2 &gt; LstmLayerFloat32NoCifgWithPeepholeWithProjectionTest(armnn::IWorkloadFactory &amp;workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &amp;memoryManager, const armnn::ITensorHandleFactory &amp;tensorHandleFactory)</div><div class="ttdef"><b>Definition:</b> <a href="_lstm_test_impl_8cpp_source.xhtml#l02678">LstmTestImpl.cpp:2678</a></div></div>
<div class="ttc" id="_lstm_test_impl_8cpp_xhtml_a0c1fe44fe410e2ef4103d9a724234fc3"><div class="ttname"><a href="_lstm_test_impl_8cpp.xhtml#a0c1fe44fe410e2ef4103d9a724234fc3">LstmLayerInt16WithCifgWithPeepholeNoProjectionTest</a></div><div class="ttdeci">LayerTestResult&lt; int16_t, 2 &gt; LstmLayerInt16WithCifgWithPeepholeNoProjectionTest(armnn::IWorkloadFactory &amp;workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &amp;memoryManager, const armnn::ITensorHandleFactory &amp;tensorHandleFactory)</div><div class="ttdef"><b>Definition:</b> <a href="_lstm_test_impl_8cpp_source.xhtml#l02768">LstmTestImpl.cpp:2768</a></div></div>
<div class="ttc" id="structarmnn_1_1_lstm_descriptor_xhtml_a6c9de81fc65b3c4924cab11907075a17"><div class="ttname"><a href="structarmnn_1_1_lstm_descriptor.xhtml#a6c9de81fc65b3c4924cab11907075a17">armnn::LstmDescriptor::m_ProjectionEnabled</a></div><div class="ttdeci">bool m_ProjectionEnabled</div><div class="ttdoc">Enable/disable the projection layer. </div><div class="ttdef"><b>Definition:</b> <a href="_descriptors_8hpp_source.xhtml#l00945">Descriptors.hpp:945</a></div></div>
<div class="ttc" id="structarmnn_1_1_quantized_lstm_queue_descriptor_xhtml_a45d73e66cbb2b65049e4016c20657ccf"><div class="ttname"><a href="structarmnn_1_1_quantized_lstm_queue_descriptor.xhtml#a45d73e66cbb2b65049e4016c20657ccf">armnn::QuantizedLstmQueueDescriptor::m_RecurrentToForgetWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_RecurrentToForgetWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00629">WorkloadData.hpp:629</a></div></div>
<div class="ttc" id="_tensor_copy_utils_8hpp_xhtml"><div class="ttname"><a href="_tensor_copy_utils_8hpp.xhtml">TensorCopyUtils.hpp</a></div></div>
<div class="ttc" id="structarmnn_1_1_quantized_lstm_queue_descriptor_xhtml_acb3aade8fae984f7293e222dcbe66030"><div class="ttname"><a href="structarmnn_1_1_quantized_lstm_queue_descriptor.xhtml#acb3aade8fae984f7293e222dcbe66030">armnn::QuantizedLstmQueueDescriptor::m_InputGateBias</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_InputGateBias</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00633">WorkloadData.hpp:633</a></div></div>
<div class="ttc" id="structarmnn_1_1_q_lstm_queue_descriptor_xhtml_af3c52626a6f05597d82ed095d0765962"><div class="ttname"><a href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#af3c52626a6f05597d82ed095d0765962">armnn::QLstmQueueDescriptor::m_ProjectionWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_ProjectionWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00594">WorkloadData.hpp:594</a></div></div>
<div class="ttc" id="structarmnn_1_1_lstm_queue_descriptor_xhtml_a45d73e66cbb2b65049e4016c20657ccf"><div class="ttname"><a href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a45d73e66cbb2b65049e4016c20657ccf">armnn::LstmQueueDescriptor::m_RecurrentToForgetWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_RecurrentToForgetWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00419">WorkloadData.hpp:419</a></div></div>
<div class="ttc" id="_lstm_test_impl_8cpp_xhtml_a95ad313188cccfdd052c4620b4dc0743"><div class="ttname"><a href="_lstm_test_impl_8cpp.xhtml#a95ad313188cccfdd052c4620b4dc0743">QLstmTest1</a></div><div class="ttdeci">LayerTestResult&lt; int8_t, 2 &gt; QLstmTest1(armnn::IWorkloadFactory &amp;workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &amp;memoryManager, const armnn::ITensorHandleFactory &amp;tensorHandleFactory)</div><div class="ttdef"><b>Definition:</b> <a href="_lstm_test_impl_8cpp_source.xhtml#l02910">LstmTestImpl.cpp:2910</a></div></div>
<div class="ttc" id="structarmnn_1_1_quantized_lstm_queue_descriptor_xhtml"><div class="ttname"><a href="structarmnn_1_1_quantized_lstm_queue_descriptor.xhtml">armnn::QuantizedLstmQueueDescriptor</a></div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00604">WorkloadData.hpp:604</a></div></div>
<div class="ttc" id="structarmnn_1_1_lstm_queue_descriptor_xhtml_a6f74071b0e07bbe2cb20a8f78826e084"><div class="ttname"><a href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a6f74071b0e07bbe2cb20a8f78826e084">armnn::LstmQueueDescriptor::m_CellToOutputWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_CellToOutputWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00424">WorkloadData.hpp:424</a></div></div>
<div class="ttc" id="structarmnn_1_1_lstm_queue_descriptor_xhtml_a28ad98d17603fd8b12e046f8ece58970"><div class="ttname"><a href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a28ad98d17603fd8b12e046f8ece58970">armnn::LstmQueueDescriptor::m_InputToCellWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_InputToCellWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00416">WorkloadData.hpp:416</a></div></div>
<div class="ttc" id="structarmnn_1_1_lstm_queue_descriptor_xhtml_a83dc9086b2e4a4e4cadb66bd874df798"><div class="ttname"><a href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a83dc9086b2e4a4e4cadb66bd874df798">armnn::LstmQueueDescriptor::m_InputToOutputWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_InputToOutputWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00417">WorkloadData.hpp:417</a></div></div>
<div class="ttc" id="classarmnn_1_1_tensor_info_xhtml"><div class="ttname"><a href="classarmnn_1_1_tensor_info.xhtml">armnn::TensorInfo</a></div><div class="ttdef"><b>Definition:</b> <a href="_tensor_8hpp_source.xhtml#l00152">Tensor.hpp:152</a></div></div>
<div class="ttc" id="_quantize_helper_8hpp_xhtml"><div class="ttname"><a href="_quantize_helper_8hpp.xhtml">QuantizeHelper.hpp</a></div></div>
<div class="ttc" id="classarmnn_1_1_i_workload_factory_xhtml"><div class="ttname"><a href="classarmnn_1_1_i_workload_factory.xhtml">armnn::IWorkloadFactory</a></div><div class="ttdef"><b>Definition:</b> <a href="_workload_factory_8hpp_source.xhtml#l00022">WorkloadFactory.hpp:22</a></div></div>
<div class="ttc" id="_workload_test_utils_8hpp_xhtml"><div class="ttname"><a href="_workload_test_utils_8hpp.xhtml">WorkloadTestUtils.hpp</a></div></div>
<div class="ttc" id="_tensor_helpers_8hpp_xhtml_aa5a4b75c5fa1d312b4f3615b2315ff58"><div class="ttname"><a href="_tensor_helpers_8hpp.xhtml#aa5a4b75c5fa1d312b4f3615b2315ff58">CompareTensors</a></div><div class="ttdeci">boost::test_tools::predicate_result CompareTensors(const boost::multi_array&lt; T, n &gt; &amp;a, const boost::multi_array&lt; T, n &gt; &amp;b, bool compareBoolean=false, bool isDynamic=false)</div><div class="ttdef"><b>Definition:</b> <a href="_tensor_helpers_8hpp_source.xhtml#l00073">TensorHelpers.hpp:73</a></div></div>
<div class="ttc" id="structarmnn_1_1_q_lstm_descriptor_xhtml_a2837b4396f20c956952d1a7286cab5f8"><div class="ttname"><a href="structarmnn_1_1_q_lstm_descriptor.xhtml#a2837b4396f20c956952d1a7286cab5f8">armnn::QLstmDescriptor::m_PeepholeEnabled</a></div><div class="ttdeci">bool m_PeepholeEnabled</div><div class="ttdoc">Enable/disable peephole. </div><div class="ttdef"><b>Definition:</b> <a href="_descriptors_8hpp_source.xhtml#l01193">Descriptors.hpp:1193</a></div></div>
<div class="ttc" id="namespacearmnn_xhtml_ad8ed01ff3ff33333d8e19db4d2818bb6accedffbc6e5308e33d3843e8bdc0dad7"><div class="ttname"><a href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6accedffbc6e5308e33d3843e8bdc0dad7">armnn::DataType::Signed32</a></div></div>
<div class="ttc" id="structarmnn_1_1_quantized_lstm_queue_descriptor_xhtml_a28ad98d17603fd8b12e046f8ece58970"><div class="ttname"><a href="structarmnn_1_1_quantized_lstm_queue_descriptor.xhtml#a28ad98d17603fd8b12e046f8ece58970">armnn::QuantizedLstmQueueDescriptor::m_InputToCellWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_InputToCellWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00625">WorkloadData.hpp:625</a></div></div>
<div class="ttc" id="structarmnn_1_1_q_lstm_descriptor_xhtml_af8f724af7210b52529216feefa993c98"><div class="ttname"><a href="structarmnn_1_1_q_lstm_descriptor.xhtml#af8f724af7210b52529216feefa993c98">armnn::QLstmDescriptor::m_HiddenStateScale</a></div><div class="ttdeci">float m_HiddenStateScale</div><div class="ttdoc">Hidden State quantization scale. </div><div class="ttdef"><b>Definition:</b> <a href="_descriptors_8hpp_source.xhtml#l01209">Descriptors.hpp:1209</a></div></div>
<div class="ttc" id="structarmnn_1_1_q_lstm_queue_descriptor_xhtml_a453a4af385d0c060c9aac990fceaa1ef"><div class="ttname"><a href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a453a4af385d0c060c9aac990fceaa1ef">armnn::QLstmQueueDescriptor::m_ForgetLayerNormWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_ForgetLayerNormWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00597">WorkloadData.hpp:597</a></div></div>
<div class="ttc" id="structarmnn_1_1_lstm_queue_descriptor_xhtml_a2ba352eb1fdf6dc5ecf7f2e6b6b48f94"><div class="ttname"><a href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a2ba352eb1fdf6dc5ecf7f2e6b6b48f94">armnn::LstmQueueDescriptor::m_ProjectionBias</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_ProjectionBias</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00430">WorkloadData.hpp:430</a></div></div>
<div class="ttc" id="_lstm_test_impl_8cpp_xhtml_af125f50b8c17b270fd1a9208c3391722"><div class="ttname"><a href="_lstm_test_impl_8cpp.xhtml#af125f50b8c17b270fd1a9208c3391722">LstmLayerInt16NoCifgWithPeepholeWithProjectionTest</a></div><div class="ttdeci">LayerTestResult&lt; int16_t, 2 &gt; LstmLayerInt16NoCifgWithPeepholeWithProjectionTest(armnn::IWorkloadFactory &amp;workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &amp;memoryManager, const armnn::ITensorHandleFactory &amp;tensorHandleFactory)</div><div class="ttdef"><b>Definition:</b> <a href="_lstm_test_impl_8cpp_source.xhtml#l02800">LstmTestImpl.cpp:2800</a></div></div>
<div class="ttc" id="classarmnn_1_1_i_workload_factory_xhtml_ab6bd7aaf685d4e956d780f8655a6f174"><div class="ttname"><a href="classarmnn_1_1_i_workload_factory.xhtml#ab6bd7aaf685d4e956d780f8655a6f174">armnn::IWorkloadFactory::CreateLstm</a></div><div class="ttdeci">virtual std::unique_ptr&lt; IWorkload &gt; CreateLstm(const LstmQueueDescriptor &amp;descriptor, const WorkloadInfo &amp;info) const</div><div class="ttdef"><b>Definition:</b> <a href="_workload_factory_8cpp_source.xhtml#l01489">WorkloadFactory.cpp:1489</a></div></div>
<div class="ttc" id="structarmnn_1_1_q_lstm_descriptor_xhtml_aa43409f9b457352c95c89f20ce5d844d"><div class="ttname"><a href="structarmnn_1_1_q_lstm_descriptor.xhtml#aa43409f9b457352c95c89f20ce5d844d">armnn::QLstmDescriptor::m_OutputIntermediateScale</a></div><div class="ttdeci">float m_OutputIntermediateScale</div><div class="ttdoc">Output intermediate quantization scale. </div><div class="ttdef"><b>Definition:</b> <a href="_descriptors_8hpp_source.xhtml#l01205">Descriptors.hpp:1205</a></div></div>
<div class="ttc" id="structarmnn_1_1_q_lstm_queue_descriptor_xhtml_a518f0195d0278a892b49649b8860d17f"><div class="ttname"><a href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a518f0195d0278a892b49649b8860d17f">armnn::QLstmQueueDescriptor::m_CellLayerNormWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_CellLayerNormWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00598">WorkloadData.hpp:598</a></div></div>
<div class="ttc" id="_lstm_test_impl_8cpp_xhtml_a3ca648bd28b5f0b835868282409b3458"><div class="ttname"><a href="_lstm_test_impl_8cpp.xhtml#a3ca648bd28b5f0b835868282409b3458">QLstmTest</a></div><div class="ttdeci">LayerTestResult&lt; int8_t, 2 &gt; QLstmTest(armnn::IWorkloadFactory &amp;workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &amp;memoryManager, const armnn::ITensorHandleFactory &amp;tensorHandleFactory)</div><div class="ttdef"><b>Definition:</b> <a href="_lstm_test_impl_8cpp_source.xhtml#l02894">LstmTestImpl.cpp:2894</a></div></div>
<div class="ttc" id="structarmnn_1_1_lstm_queue_descriptor_xhtml_a332551528a4b3534c2d6c89ce816fcd9"><div class="ttname"><a href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a332551528a4b3534c2d6c89ce816fcd9">armnn::LstmQueueDescriptor::m_OutputGateBias</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_OutputGateBias</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00428">WorkloadData.hpp:428</a></div></div>
<div class="ttc" id="classarmnn_1_1_i_workload_factory_xhtml_ab5ceda49651dcd53fb7eb05658b5a0cb"><div class="ttname"><a href="classarmnn_1_1_i_workload_factory.xhtml#ab5ceda49651dcd53fb7eb05658b5a0cb">armnn::IWorkloadFactory::CreateQuantizedLstm</a></div><div class="ttdeci">virtual std::unique_ptr&lt; IWorkload &gt; CreateQuantizedLstm(const QuantizedLstmQueueDescriptor &amp;descriptor, const WorkloadInfo &amp;info) const</div><div class="ttdef"><b>Definition:</b> <a href="_workload_factory_8cpp_source.xhtml#l01597">WorkloadFactory.cpp:1597</a></div></div>
<div class="ttc" id="structarmnn_1_1_lstm_queue_descriptor_xhtml_a5c1c0a7ead7273788976c9e97cffaab7"><div class="ttname"><a href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a5c1c0a7ead7273788976c9e97cffaab7">armnn::LstmQueueDescriptor::m_CellToInputWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_CellToInputWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00422">WorkloadData.hpp:422</a></div></div>
<div class="ttc" id="structarmnn_1_1_q_lstm_queue_descriptor_xhtml_aea142bd50ffb93631c2e08324ec92a1e"><div class="ttname"><a href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#aea142bd50ffb93631c2e08324ec92a1e">armnn::QLstmQueueDescriptor::m_RecurrentToCellWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_RecurrentToCellWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00585">WorkloadData.hpp:585</a></div></div>
<div class="ttc" id="classarmnn_1_1_i_workload_factory_xhtml_ab17bf35d906f8daad42be0a0799c72ab"><div class="ttname"><a href="classarmnn_1_1_i_workload_factory.xhtml#ab17bf35d906f8daad42be0a0799c72ab">armnn::IWorkloadFactory::CreateQLstm</a></div><div class="ttdeci">virtual std::unique_ptr&lt; IWorkload &gt; CreateQLstm(const QLstmQueueDescriptor &amp;descriptor, const WorkloadInfo &amp;info) const</div><div class="ttdef"><b>Definition:</b> <a href="_workload_factory_8cpp_source.xhtml#l01591">WorkloadFactory.cpp:1591</a></div></div>
<div class="ttc" id="structarmnn_1_1_q_lstm_queue_descriptor_xhtml_a98d377149071d8842d610cc0734d1cfe"><div class="ttname"><a href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a98d377149071d8842d610cc0734d1cfe">armnn::QLstmQueueDescriptor::m_RecurrentToInputWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_RecurrentToInputWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00583">WorkloadData.hpp:583</a></div></div>
<div class="ttc" id="namespacearmnn_xhtml_ad8ed01ff3ff33333d8e19db4d2818bb6a9d02ea768c081d4bdb2b7cab0b3f510d"><div class="ttname"><a href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a9d02ea768c081d4bdb2b7cab0b3f510d">armnn::DataType::QAsymmS8</a></div></div>
<div class="ttc" id="structarmnn_1_1_lstm_queue_descriptor_xhtml_a518f0195d0278a892b49649b8860d17f"><div class="ttname"><a href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a518f0195d0278a892b49649b8860d17f">armnn::LstmQueueDescriptor::m_CellLayerNormWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_CellLayerNormWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00433">WorkloadData.hpp:433</a></div></div>
<div class="ttc" id="namespacearmnn_xhtml_ad8ed01ff3ff33333d8e19db4d2818bb6a053c769dcf82d66ef326c86980c02ba7"><div class="ttname"><a href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a053c769dcf82d66ef326c86980c02ba7">armnn::DataType::QSymmS16</a></div></div>
<div class="ttc" id="_lstm_utils_8cpp_xhtml_a4c20bc573b70e89327b334f924da97b5"><div class="ttname"><a href="_lstm_utils_8cpp.xhtml#a4c20bc573b70e89327b334f924da97b5">ZeroVector</a></div><div class="ttdeci">void ZeroVector(armnn::Encoder&lt; float &gt; &amp;vector, uint32_t vSize)</div><div class="ttdef"><b>Definition:</b> <a href="_lstm_utils_8cpp_source.xhtml#l00076">LstmUtils.cpp:76</a></div></div>
<div class="ttc" id="structarmnn_1_1_q_lstm_queue_descriptor_xhtml_a332551528a4b3534c2d6c89ce816fcd9"><div class="ttname"><a href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a332551528a4b3534c2d6c89ce816fcd9">armnn::QLstmQueueDescriptor::m_OutputGateBias</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_OutputGateBias</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00593">WorkloadData.hpp:593</a></div></div>
<div class="ttc" id="structarmnn_1_1_q_lstm_queue_descriptor_xhtml_a75980b5795efd899a0c678a06a900c6d"><div class="ttname"><a href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a75980b5795efd899a0c678a06a900c6d">armnn::QLstmQueueDescriptor::m_CellBias</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_CellBias</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00592">WorkloadData.hpp:592</a></div></div>
<div class="ttc" id="namespacearmnn_xhtml_a44affeeb090c3c6a3062830562672e84"><div class="ttname"><a href="namespacearmnn.xhtml#a44affeeb090c3c6a3062830562672e84">armnn::IgnoreUnused</a></div><div class="ttdeci">void IgnoreUnused(Ts &amp;&amp;...)</div><div class="ttdef"><b>Definition:</b> <a href="_ignore_unused_8hpp_source.xhtml#l00014">IgnoreUnused.hpp:14</a></div></div>
<div class="ttc" id="structarmnn_1_1_lstm_queue_descriptor_xhtml_acefa49d7faf26933e27e473e7bdb4175"><div class="ttname"><a href="structarmnn_1_1_lstm_queue_descriptor.xhtml#acefa49d7faf26933e27e473e7bdb4175">armnn::LstmQueueDescriptor::m_CellToForgetWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_CellToForgetWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00423">WorkloadData.hpp:423</a></div></div>
<div class="ttc" id="structarmnn_1_1_lstm_queue_descriptor_xhtml_a75980b5795efd899a0c678a06a900c6d"><div class="ttname"><a href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a75980b5795efd899a0c678a06a900c6d">armnn::LstmQueueDescriptor::m_CellBias</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_CellBias</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00427">WorkloadData.hpp:427</a></div></div>
<div class="ttc" id="structarmnn_1_1_lstm_queue_descriptor_xhtml_a98d377149071d8842d610cc0734d1cfe"><div class="ttname"><a href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a98d377149071d8842d610cc0734d1cfe">armnn::LstmQueueDescriptor::m_RecurrentToInputWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_RecurrentToInputWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00418">WorkloadData.hpp:418</a></div></div>
<div class="ttc" id="structarmnn_1_1_queue_descriptor_with_parameters_xhtml_aad91b9bbf7aa365d304febe79a3d1333"><div class="ttname"><a href="structarmnn_1_1_queue_descriptor_with_parameters.xhtml#aad91b9bbf7aa365d304febe79a3d1333">armnn::QueueDescriptorWithParameters::m_Parameters</a></div><div class="ttdeci">LayerDescriptor m_Parameters</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00057">WorkloadData.hpp:57</a></div></div>
<div class="ttc" id="_lstm_utils_8cpp_xhtml_a1d7ad9698b02282a57fdb17b3af745f9"><div class="ttname"><a href="_lstm_utils_8cpp.xhtml#a1d7ad9698b02282a57fdb17b3af745f9">VectorBatchVectorCwiseProduct</a></div><div class="ttdeci">void VectorBatchVectorCwiseProduct(armnn::Decoder&lt; float &gt; &amp;vector, uint32_t vSize, armnn::Decoder&lt; float &gt; &amp;batchVector, uint32_t nBatch, armnn::Encoder&lt; float &gt; &amp;outResult)</div><div class="ttdef"><b>Definition:</b> <a href="_lstm_utils_8cpp_source.xhtml#l00152">LstmUtils.cpp:152</a></div></div>
<div class="ttc" id="_numeric_cast_8hpp_xhtml"><div class="ttname"><a href="_numeric_cast_8hpp.xhtml">NumericCast.hpp</a></div></div>
<div class="ttc" id="_encoders_8hpp_xhtml"><div class="ttname"><a href="_encoders_8hpp.xhtml">Encoders.hpp</a></div></div>
<div class="ttc" id="structarmnn_1_1_q_lstm_queue_descriptor_xhtml_aa3f07e27230d6d99adc2c82ba681df2b"><div class="ttname"><a href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#aa3f07e27230d6d99adc2c82ba681df2b">armnn::QLstmQueueDescriptor::m_OutputLayerNormWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_OutputLayerNormWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00599">WorkloadData.hpp:599</a></div></div>
<div class="ttc" id="_lstm_test_impl_8cpp_xhtml_a84253a0be59acfd80d588141c07d4170"><div class="ttname"><a href="_lstm_test_impl_8cpp.xhtml#a84253a0be59acfd80d588141c07d4170">LstmLayerFloat32WithCifgWithPeepholeNoProjectionTest</a></div><div class="ttdeci">LayerTestResult&lt; float, 2 &gt; LstmLayerFloat32WithCifgWithPeepholeNoProjectionTest(armnn::IWorkloadFactory &amp;workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &amp;memoryManager, const armnn::ITensorHandleFactory &amp;tensorHandleFactory)</div><div class="ttdef"><b>Definition:</b> <a href="_lstm_test_impl_8cpp_source.xhtml#l02661">LstmTestImpl.cpp:2661</a></div></div>
<div class="ttc" id="_tensor_helpers_8hpp_xhtml"><div class="ttname"><a href="_tensor_helpers_8hpp.xhtml">TensorHelpers.hpp</a></div></div>
<div class="ttc" id="structarmnn_1_1_lstm_queue_descriptor_xhtml_a9cc28aa4fff6ba9a8abdb340c1abdd57"><div class="ttname"><a href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a9cc28aa4fff6ba9a8abdb340c1abdd57">armnn::LstmQueueDescriptor::m_InputLayerNormWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_InputLayerNormWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00431">WorkloadData.hpp:431</a></div></div>
<div class="ttc" id="structarmnn_1_1_q_lstm_descriptor_xhtml_a4a8ec49f130084445d44297549254780"><div class="ttname"><a href="structarmnn_1_1_q_lstm_descriptor.xhtml#a4a8ec49f130084445d44297549254780">armnn::QLstmDescriptor::m_LayerNormEnabled</a></div><div class="ttdeci">bool m_LayerNormEnabled</div><div class="ttdoc">Enable/disable layer normalization. </div><div class="ttdef"><b>Definition:</b> <a href="_descriptors_8hpp_source.xhtml#l01197">Descriptors.hpp:1197</a></div></div>
<div class="ttc" id="structarmnn_1_1_lstm_queue_descriptor_xhtml_aa3f07e27230d6d99adc2c82ba681df2b"><div class="ttname"><a href="structarmnn_1_1_lstm_queue_descriptor.xhtml#aa3f07e27230d6d99adc2c82ba681df2b">armnn::LstmQueueDescriptor::m_OutputLayerNormWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_OutputLayerNormWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00434">WorkloadData.hpp:434</a></div></div>
<div class="ttc" id="namespacearmnn_xhtml_ad8ed01ff3ff33333d8e19db4d2818bb6"><div class="ttname"><a href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6">armnn::DataType</a></div><div class="ttdeci">DataType</div><div class="ttdef"><b>Definition:</b> <a href="_types_8hpp_source.xhtml#l00032">Types.hpp:32</a></div></div>
<div class="ttc" id="structarmnn_1_1_lstm_queue_descriptor_xhtml"><div class="ttname"><a href="structarmnn_1_1_lstm_queue_descriptor.xhtml">armnn::LstmQueueDescriptor</a></div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00387">WorkloadData.hpp:387</a></div></div>
<div class="ttc" id="structarmnn_1_1_lstm_queue_descriptor_xhtml_aba3ffe91d818266b8785ce971548eb59"><div class="ttname"><a href="structarmnn_1_1_lstm_queue_descriptor.xhtml#aba3ffe91d818266b8785ce971548eb59">armnn::LstmQueueDescriptor::m_ForgetGateBias</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_ForgetGateBias</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00426">WorkloadData.hpp:426</a></div></div>
<div class="ttc" id="structarmnn_1_1_q_lstm_queue_descriptor_xhtml_a3ea82566d98c5a657c76c3d851c47848"><div class="ttname"><a href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a3ea82566d98c5a657c76c3d851c47848">armnn::QLstmQueueDescriptor::m_InputToForgetWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_InputToForgetWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00580">WorkloadData.hpp:580</a></div></div>
<div class="ttc" id="classarmnn_1_1_i_backend_internal_xhtml_a693b40e6b94e958836aeb0410ca186bd"><div class="ttname"><a href="classarmnn_1_1_i_backend_internal.xhtml#a693b40e6b94e958836aeb0410ca186bd">armnn::IBackendInternal::IMemoryManagerSharedPtr</a></div><div class="ttdeci">std::shared_ptr&lt; IMemoryManager &gt; IMemoryManagerSharedPtr</div><div class="ttdef"><b>Definition:</b> <a href="include_2armnn_2backends_2_i_backend_internal_8hpp_source.xhtml#l00092">IBackendInternal.hpp:92</a></div></div>
<div class="ttc" id="structarmnn_1_1_q_lstm_descriptor_xhtml_aa6a518b65088f34803b3214334bdff61"><div class="ttname"><a href="structarmnn_1_1_q_lstm_descriptor.xhtml#aa6a518b65088f34803b3214334bdff61">armnn::QLstmDescriptor::m_ProjectionClip</a></div><div class="ttdeci">float m_ProjectionClip</div><div class="ttdoc">Clipping threshold value for the projection. </div><div class="ttdef"><b>Definition:</b> <a href="_descriptors_8hpp_source.xhtml#l01189">Descriptors.hpp:1189</a></div></div>
<div class="ttc" id="namespacearmnn_xhtml_ad8ed01ff3ff33333d8e19db4d2818bb6a0a3f57c876f5a230244c38e1453a8a6e"><div class="ttname"><a href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a0a3f57c876f5a230244c38e1453a8a6e">armnn::DataType::QAsymmU8</a></div></div>
<div class="ttc" id="structarmnn_1_1_quantized_lstm_queue_descriptor_xhtml_aba3ffe91d818266b8785ce971548eb59"><div class="ttname"><a href="structarmnn_1_1_quantized_lstm_queue_descriptor.xhtml#aba3ffe91d818266b8785ce971548eb59">armnn::QuantizedLstmQueueDescriptor::m_ForgetGateBias</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_ForgetGateBias</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00634">WorkloadData.hpp:634</a></div></div>
<div class="ttc" id="structarmnn_1_1_lstm_queue_descriptor_xhtml_a08a1932be591c315a512a877d38b22df"><div class="ttname"><a href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a08a1932be591c315a512a877d38b22df">armnn::LstmQueueDescriptor::m_InputToInputWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_InputToInputWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00414">WorkloadData.hpp:414</a></div></div>
<div class="ttc" id="structarmnn_1_1_q_lstm_descriptor_xhtml_a09e1f097944f61cc901240f9300364cf"><div class="ttname"><a href="structarmnn_1_1_q_lstm_descriptor.xhtml#a09e1f097944f61cc901240f9300364cf">armnn::QLstmDescriptor::m_InputIntermediateScale</a></div><div class="ttdeci">float m_InputIntermediateScale</div><div class="ttdoc">Input intermediate quantization scale. </div><div class="ttdef"><b>Definition:</b> <a href="_descriptors_8hpp_source.xhtml#l01199">Descriptors.hpp:1199</a></div></div>
<div class="ttc" id="structarmnn_1_1_lstm_descriptor_xhtml_a2837b4396f20c956952d1a7286cab5f8"><div class="ttname"><a href="structarmnn_1_1_lstm_descriptor.xhtml#a2837b4396f20c956952d1a7286cab5f8">armnn::LstmDescriptor::m_PeepholeEnabled</a></div><div class="ttdeci">bool m_PeepholeEnabled</div><div class="ttdoc">Enable/disable peephole. </div><div class="ttdef"><b>Definition:</b> <a href="_descriptors_8hpp_source.xhtml#l00943">Descriptors.hpp:943</a></div></div>
<div class="ttc" id="structarmnn_1_1_quantized_lstm_queue_descriptor_xhtml_a98d377149071d8842d610cc0734d1cfe"><div class="ttname"><a href="structarmnn_1_1_quantized_lstm_queue_descriptor.xhtml#a98d377149071d8842d610cc0734d1cfe">armnn::QuantizedLstmQueueDescriptor::m_RecurrentToInputWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_RecurrentToInputWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00628">WorkloadData.hpp:628</a></div></div>
<div class="ttc" id="_lstm_test_impl_8cpp_xhtml_a8d9469ec08347dd451d782f102a6c8fa"><div class="ttname"><a href="_lstm_test_impl_8cpp.xhtml#a8d9469ec08347dd451d782f102a6c8fa">QuantizedLstmTest</a></div><div class="ttdeci">LayerTestResult&lt; uint8_t, 2 &gt; QuantizedLstmTest(armnn::IWorkloadFactory &amp;workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &amp;memoryManager, const armnn::ITensorHandleFactory &amp;tensorHandleFactory)</div><div class="ttdef"><b>Definition:</b> <a href="_lstm_test_impl_8cpp_source.xhtml#l02877">LstmTestImpl.cpp:2877</a></div></div>
<div class="ttc" id="structarmnn_1_1_quantized_lstm_queue_descriptor_xhtml_aea142bd50ffb93631c2e08324ec92a1e"><div class="ttname"><a href="structarmnn_1_1_quantized_lstm_queue_descriptor.xhtml#aea142bd50ffb93631c2e08324ec92a1e">armnn::QuantizedLstmQueueDescriptor::m_RecurrentToCellWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_RecurrentToCellWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00630">WorkloadData.hpp:630</a></div></div>
<div class="ttc" id="_tensor_copy_utils_8cpp_xhtml_afaaca8c3f3a467d124bba44067d2afa8"><div class="ttname"><a href="_tensor_copy_utils_8cpp.xhtml#afaaca8c3f3a467d124bba44067d2afa8">AllocateAndCopyDataToITensorHandle</a></div><div class="ttdeci">void AllocateAndCopyDataToITensorHandle(armnn::ITensorHandle *tensorHandle, const void *memory)</div><div class="ttdef"><b>Definition:</b> <a href="_tensor_copy_utils_8cpp_source.xhtml#l00019">TensorCopyUtils.cpp:19</a></div></div>
<div class="ttc" id="_tensor_copy_utils_8cpp_xhtml_a99b626c58a926dc7d6df78d22ec186c8"><div class="ttname"><a href="_tensor_copy_utils_8cpp.xhtml#a99b626c58a926dc7d6df78d22ec186c8">CopyDataFromITensorHandle</a></div><div class="ttdeci">void CopyDataFromITensorHandle(void *memory, const armnn::ITensorHandle *tensorHandle)</div><div class="ttdef"><b>Definition:</b> <a href="_tensor_copy_utils_8cpp_source.xhtml#l00014">TensorCopyUtils.cpp:14</a></div></div>
<div class="ttc" id="_decoders_8hpp_xhtml"><div class="ttname"><a href="_decoders_8hpp.xhtml">Decoders.hpp</a></div></div>
<div class="ttc" id="structarmnn_1_1_quantized_lstm_queue_descriptor_xhtml_adebc1771e5a1f4b113a7aa594ea74d2c"><div class="ttname"><a href="structarmnn_1_1_quantized_lstm_queue_descriptor.xhtml#adebc1771e5a1f4b113a7aa594ea74d2c">armnn::QuantizedLstmQueueDescriptor::m_RecurrentToOutputWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_RecurrentToOutputWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00631">WorkloadData.hpp:631</a></div></div>
<div class="ttc" id="structarmnn_1_1_lstm_descriptor_xhtml_ae1b07ed928036004bd257169e5aeeef4"><div class="ttname"><a href="structarmnn_1_1_lstm_descriptor.xhtml#ae1b07ed928036004bd257169e5aeeef4">armnn::LstmDescriptor::m_ActivationFunc</a></div><div class="ttdeci">uint32_t m_ActivationFunc</div><div class="ttdoc">The activation function to use. </div><div class="ttdef"><b>Definition:</b> <a href="_descriptors_8hpp_source.xhtml#l00935">Descriptors.hpp:935</a></div></div>
<div class="ttc" id="_lstm_test_impl_8cpp_xhtml_a347d60d5d81c1c5dfdae562b998503f9"><div class="ttname"><a href="_lstm_test_impl_8cpp.xhtml#a347d60d5d81c1c5dfdae562b998503f9">LstmLayerFloat32NoCifgNoPeepholeNoProjectionTest</a></div><div class="ttdeci">LayerTestResult&lt; float, 2 &gt; LstmLayerFloat32NoCifgNoPeepholeNoProjectionTest(armnn::IWorkloadFactory &amp;workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &amp;memoryManager, const armnn::ITensorHandleFactory &amp;tensorHandleFactory)</div><div class="ttdef"><b>Definition:</b> <a href="_lstm_test_impl_8cpp_source.xhtml#l02701">LstmTestImpl.cpp:2701</a></div></div>
<div class="ttc" id="structarmnn_1_1_q_lstm_descriptor_xhtml_afec7f36158448f723b426a9527acb189"><div class="ttname"><a href="structarmnn_1_1_q_lstm_descriptor.xhtml#afec7f36158448f723b426a9527acb189">armnn::QLstmDescriptor::m_ForgetIntermediateScale</a></div><div class="ttdeci">float m_ForgetIntermediateScale</div><div class="ttdoc">Forget intermediate quantization scale. </div><div class="ttdef"><b>Definition:</b> <a href="_descriptors_8hpp_source.xhtml#l01201">Descriptors.hpp:1201</a></div></div>
<div class="ttc" id="_cpu_tensor_handle_8hpp_xhtml"><div class="ttname"><a href="_cpu_tensor_handle_8hpp.xhtml">CpuTensorHandle.hpp</a></div></div>
<div class="ttc" id="structarmnn_1_1_q_lstm_descriptor_xhtml_ac81fb0e66dc623dc37c77f219f53a6d3"><div class="ttname"><a href="structarmnn_1_1_q_lstm_descriptor.xhtml#ac81fb0e66dc623dc37c77f219f53a6d3">armnn::QLstmDescriptor::m_CellClip</a></div><div class="ttdeci">float m_CellClip</div><div class="ttdoc">Clipping threshold value for the cell state. </div><div class="ttdef"><b>Definition:</b> <a href="_descriptors_8hpp_source.xhtml#l01187">Descriptors.hpp:1187</a></div></div>
<div class="ttc" id="structarmnn_1_1_lstm_descriptor_xhtml_ad474e5c51a0b194ef32e812b86c0cbdb"><div class="ttname"><a href="structarmnn_1_1_lstm_descriptor.xhtml#ad474e5c51a0b194ef32e812b86c0cbdb">armnn::LstmDescriptor::m_CifgEnabled</a></div><div class="ttdeci">bool m_CifgEnabled</div><div class="ttdoc">Enable/disable cifg (coupled input &amp; forget gate). </div><div class="ttdef"><b>Definition:</b> <a href="_descriptors_8hpp_source.xhtml#l00941">Descriptors.hpp:941</a></div></div>
<div class="ttc" id="structarmnn_1_1_lstm_queue_descriptor_xhtml_adebc1771e5a1f4b113a7aa594ea74d2c"><div class="ttname"><a href="structarmnn_1_1_lstm_queue_descriptor.xhtml#adebc1771e5a1f4b113a7aa594ea74d2c">armnn::LstmQueueDescriptor::m_RecurrentToOutputWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_RecurrentToOutputWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00421">WorkloadData.hpp:421</a></div></div>
<div class="ttc" id="_lstm_test_impl_8hpp_xhtml"><div class="ttname"><a href="_lstm_test_impl_8hpp.xhtml">LstmTestImpl.hpp</a></div></div>
<div class="ttc" id="classarmnn_1_1_scoped_cpu_tensor_handle_xhtml"><div class="ttname"><a href="classarmnn_1_1_scoped_cpu_tensor_handle.xhtml">armnn::ScopedCpuTensorHandle</a></div><div class="ttdef"><b>Definition:</b> <a href="_cpu_tensor_handle_8hpp_source.xhtml#l00106">CpuTensorHandle.hpp:106</a></div></div>
<div class="ttc" id="_lstm_test_impl_8cpp_xhtml_ab1cbad4ed077c4b6279b28af3449b452"><div class="ttname"><a href="_lstm_test_impl_8cpp.xhtml#ab1cbad4ed077c4b6279b28af3449b452">LstmLayerInt16NoCifgNoPeepholeNoProjectionTest</a></div><div class="ttdeci">LayerTestResult&lt; int16_t, 2 &gt; LstmLayerInt16NoCifgNoPeepholeNoProjectionTest(armnn::IWorkloadFactory &amp;workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &amp;memoryManager, const armnn::ITensorHandleFactory &amp;tensorHandleFactory)</div><div class="ttdef"><b>Definition:</b> <a href="_lstm_test_impl_8cpp_source.xhtml#l02737">LstmTestImpl.cpp:2737</a></div></div>
<div class="ttc" id="_lstm_test_impl_8cpp_xhtml_a9430b95c437de781166385d51ced7bd0"><div class="ttname"><a href="_lstm_test_impl_8cpp.xhtml#a9430b95c437de781166385d51ced7bd0">LstmLayerInt16NoCifgNoPeepholeNoProjectionInt16ConstantTest</a></div><div class="ttdeci">LayerTestResult&lt; int16_t, 2 &gt; LstmLayerInt16NoCifgNoPeepholeNoProjectionInt16ConstantTest(armnn::IWorkloadFactory &amp;workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &amp;memoryManager, const armnn::ITensorHandleFactory &amp;tensorHandleFactory)</div><div class="ttdef"><b>Definition:</b> <a href="_lstm_test_impl_8cpp_source.xhtml#l02843">LstmTestImpl.cpp:2843</a></div></div>
<div class="ttc" id="structarmnn_1_1_q_lstm_queue_descriptor_xhtml_adebc1771e5a1f4b113a7aa594ea74d2c"><div class="ttname"><a href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#adebc1771e5a1f4b113a7aa594ea74d2c">armnn::QLstmQueueDescriptor::m_RecurrentToOutputWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_RecurrentToOutputWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00586">WorkloadData.hpp:586</a></div></div>
<div class="ttc" id="structarmnn_1_1_lstm_queue_descriptor_xhtml_a453a4af385d0c060c9aac990fceaa1ef"><div class="ttname"><a href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a453a4af385d0c060c9aac990fceaa1ef">armnn::LstmQueueDescriptor::m_ForgetLayerNormWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_ForgetLayerNormWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00432">WorkloadData.hpp:432</a></div></div>
<div class="ttc" id="structarmnn_1_1_quantized_lstm_queue_descriptor_xhtml_a75980b5795efd899a0c678a06a900c6d"><div class="ttname"><a href="structarmnn_1_1_quantized_lstm_queue_descriptor.xhtml#a75980b5795efd899a0c678a06a900c6d">armnn::QuantizedLstmQueueDescriptor::m_CellBias</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_CellBias</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00635">WorkloadData.hpp:635</a></div></div>
<div class="ttc" id="structarmnn_1_1_q_lstm_queue_descriptor_xhtml"><div class="ttname"><a href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml">armnn::QLstmQueueDescriptor</a></div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00552">WorkloadData.hpp:552</a></div></div>
<div class="ttc" id="namespacearmnn_xhtml_a4dc0adc6737b5944e7671bee71788407acaf9b6b99962bf5c2264824231d7a40c"><div class="ttname"><a href="namespacearmnn.xhtml#a4dc0adc6737b5944e7671bee71788407acaf9b6b99962bf5c2264824231d7a40c">armnn::BoostLogSeverityMapping::info</a></div></div>
<div class="ttc" id="classarmnn_1_1_i_tensor_handle_factory_xhtml"><div class="ttname"><a href="classarmnn_1_1_i_tensor_handle_factory.xhtml">armnn::ITensorHandleFactory</a></div><div class="ttdef"><b>Definition:</b> <a href="include_2armnn_2backends_2_i_tensor_handle_factory_8hpp_source.xhtml#l00041">ITensorHandleFactory.hpp:41</a></div></div>
<div class="ttc" id="structarmnn_1_1_q_lstm_descriptor_xhtml_a6c9de81fc65b3c4924cab11907075a17"><div class="ttname"><a href="structarmnn_1_1_q_lstm_descriptor.xhtml#a6c9de81fc65b3c4924cab11907075a17">armnn::QLstmDescriptor::m_ProjectionEnabled</a></div><div class="ttdeci">bool m_ProjectionEnabled</div><div class="ttdoc">Enable/disable the projection layer. </div><div class="ttdef"><b>Definition:</b> <a href="_descriptors_8hpp_source.xhtml#l01195">Descriptors.hpp:1195</a></div></div>
<div class="ttc" id="structarmnn_1_1_quantized_lstm_queue_descriptor_xhtml_a332551528a4b3534c2d6c89ce816fcd9"><div class="ttname"><a href="structarmnn_1_1_quantized_lstm_queue_descriptor.xhtml#a332551528a4b3534c2d6c89ce816fcd9">armnn::QuantizedLstmQueueDescriptor::m_OutputGateBias</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_OutputGateBias</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00636">WorkloadData.hpp:636</a></div></div>
<div class="ttc" id="structarmnn_1_1_lstm_queue_descriptor_xhtml_acb3aade8fae984f7293e222dcbe66030"><div class="ttname"><a href="structarmnn_1_1_lstm_queue_descriptor.xhtml#acb3aade8fae984f7293e222dcbe66030">armnn::LstmQueueDescriptor::m_InputGateBias</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_InputGateBias</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00425">WorkloadData.hpp:425</a></div></div>
<div class="ttc" id="structarmnn_1_1_lstm_descriptor_xhtml_a4a8ec49f130084445d44297549254780"><div class="ttname"><a href="structarmnn_1_1_lstm_descriptor.xhtml#a4a8ec49f130084445d44297549254780">armnn::LstmDescriptor::m_LayerNormEnabled</a></div><div class="ttdeci">bool m_LayerNormEnabled</div><div class="ttdoc">Enable/disable layer normalization. </div><div class="ttdef"><b>Definition:</b> <a href="_descriptors_8hpp_source.xhtml#l00947">Descriptors.hpp:947</a></div></div>
<div class="ttc" id="namespacearmnn_xhtml_a375ca3cff9f1b005d1412dc5f3cf5b6e"><div class="ttname"><a href="namespacearmnn.xhtml#a375ca3cff9f1b005d1412dc5f3cf5b6e">armnn::numeric_cast</a></div><div class="ttdeci">std::enable_if_t&lt; std::is_unsigned&lt; Source &gt;::value &amp;&amp;std::is_unsigned&lt; Dest &gt;::value, Dest &gt; numeric_cast(Source source)</div><div class="ttdef"><b>Definition:</b> <a href="_numeric_cast_8hpp_source.xhtml#l00035">NumericCast.hpp:35</a></div></div>
<div class="ttc" id="structarmnn_1_1_q_lstm_queue_descriptor_xhtml_acb3aade8fae984f7293e222dcbe66030"><div class="ttname"><a href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#acb3aade8fae984f7293e222dcbe66030">armnn::QLstmQueueDescriptor::m_InputGateBias</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_InputGateBias</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00590">WorkloadData.hpp:590</a></div></div>
<div class="ttc" id="namespacearmnn_xhtml_ad8ed01ff3ff33333d8e19db4d2818bb6a166495adc0d0f53bee6baecc577f5204"><div class="ttname"><a href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a166495adc0d0f53bee6baecc577f5204">armnn::DataType::Float32</a></div></div>
<div class="ttc" id="structarmnn_1_1_workload_info_xhtml"><div class="ttname"><a href="structarmnn_1_1_workload_info.xhtml">armnn::WorkloadInfo</a></div><div class="ttdoc">Contains information about inputs and outputs to a layer. </div><div class="ttdef"><b>Definition:</b> <a href="include_2armnn_2backends_2_workload_info_8hpp_source.xhtml#l00016">WorkloadInfo.hpp:16</a></div></div>
<div class="ttc" id="structarmnn_1_1_lstm_queue_descriptor_xhtml_aea142bd50ffb93631c2e08324ec92a1e"><div class="ttname"><a href="structarmnn_1_1_lstm_queue_descriptor.xhtml#aea142bd50ffb93631c2e08324ec92a1e">armnn::LstmQueueDescriptor::m_RecurrentToCellWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_RecurrentToCellWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00420">WorkloadData.hpp:420</a></div></div>
<div class="ttc" id="struct_layer_test_result_xhtml"><div class="ttname"><a href="struct_layer_test_result.xhtml">LayerTestResult</a></div><div class="ttdef"><b>Definition:</b> <a href="_layer_test_result_8hpp_source.xhtml#l00030">LayerTestResult.hpp:30</a></div></div>
<div class="ttc" id="namespacearmnn_xhtml_ad8ed01ff3ff33333d8e19db4d2818bb6a9945327825b115e93a3b89f4302e76db"><div class="ttname"><a href="namespacearmnn.xhtml#ad8ed01ff3ff33333d8e19db4d2818bb6a9945327825b115e93a3b89f4302e76db">armnn::DataType::QSymmS8</a></div></div>
<div class="ttc" id="structarmnn_1_1_quantized_lstm_queue_descriptor_xhtml_a3ea82566d98c5a657c76c3d851c47848"><div class="ttname"><a href="structarmnn_1_1_quantized_lstm_queue_descriptor.xhtml#a3ea82566d98c5a657c76c3d851c47848">armnn::QuantizedLstmQueueDescriptor::m_InputToForgetWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_InputToForgetWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00624">WorkloadData.hpp:624</a></div></div>
<div class="ttc" id="structarmnn_1_1_q_lstm_queue_descriptor_xhtml_a9cc28aa4fff6ba9a8abdb340c1abdd57"><div class="ttname"><a href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a9cc28aa4fff6ba9a8abdb340c1abdd57">armnn::QLstmQueueDescriptor::m_InputLayerNormWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_InputLayerNormWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00596">WorkloadData.hpp:596</a></div></div>
<div class="ttc" id="structarmnn_1_1_q_lstm_queue_descriptor_xhtml_a45d73e66cbb2b65049e4016c20657ccf"><div class="ttname"><a href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a45d73e66cbb2b65049e4016c20657ccf">armnn::QLstmQueueDescriptor::m_RecurrentToForgetWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_RecurrentToForgetWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00584">WorkloadData.hpp:584</a></div></div>
<div class="ttc" id="structarmnn_1_1_quantized_lstm_queue_descriptor_xhtml_a83dc9086b2e4a4e4cadb66bd874df798"><div class="ttname"><a href="structarmnn_1_1_quantized_lstm_queue_descriptor.xhtml#a83dc9086b2e4a4e4cadb66bd874df798">armnn::QuantizedLstmQueueDescriptor::m_InputToOutputWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_InputToOutputWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00626">WorkloadData.hpp:626</a></div></div>
<div class="ttc" id="_lstm_test_impl_8cpp_xhtml_a64d9b9a6cd29cd7a7f4bbf514fde01d5"><div class="ttname"><a href="_lstm_test_impl_8cpp.xhtml#a64d9b9a6cd29cd7a7f4bbf514fde01d5">LstmLayerFloat32NoCifgWithPeepholeWithProjectionWithLayerNormTest</a></div><div class="ttdeci">LayerTestResult&lt; float, 2 &gt; LstmLayerFloat32NoCifgWithPeepholeWithProjectionWithLayerNormTest(armnn::IWorkloadFactory &amp;workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &amp;memoryManager, const armnn::ITensorHandleFactory &amp;tensorHandleFactory)</div><div class="ttdef"><b>Definition:</b> <a href="_lstm_test_impl_8cpp_source.xhtml#l02719">LstmTestImpl.cpp:2719</a></div></div>
<div class="ttc" id="structarmnn_1_1_q_lstm_queue_descriptor_xhtml_aba3ffe91d818266b8785ce971548eb59"><div class="ttname"><a href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#aba3ffe91d818266b8785ce971548eb59">armnn::QLstmQueueDescriptor::m_ForgetGateBias</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_ForgetGateBias</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00591">WorkloadData.hpp:591</a></div></div>
<div class="ttc" id="structarmnn_1_1_quantized_lstm_queue_descriptor_xhtml_a08a1932be591c315a512a877d38b22df"><div class="ttname"><a href="structarmnn_1_1_quantized_lstm_queue_descriptor.xhtml#a08a1932be591c315a512a877d38b22df">armnn::QuantizedLstmQueueDescriptor::m_InputToInputWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_InputToInputWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00623">WorkloadData.hpp:623</a></div></div>
<div class="ttc" id="structarmnn_1_1_q_lstm_descriptor_xhtml_a0477ee1b44ace6090119178eea78cb0b"><div class="ttname"><a href="structarmnn_1_1_q_lstm_descriptor.xhtml#a0477ee1b44ace6090119178eea78cb0b">armnn::QLstmDescriptor::m_CellIntermediateScale</a></div><div class="ttdeci">float m_CellIntermediateScale</div><div class="ttdoc">Cell intermediate quantization scale. </div><div class="ttdef"><b>Definition:</b> <a href="_descriptors_8hpp_source.xhtml#l01203">Descriptors.hpp:1203</a></div></div>
<div class="ttc" id="structarmnn_1_1_q_lstm_queue_descriptor_xhtml_a83dc9086b2e4a4e4cadb66bd874df798"><div class="ttname"><a href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a83dc9086b2e4a4e4cadb66bd874df798">armnn::QLstmQueueDescriptor::m_InputToOutputWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_InputToOutputWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00582">WorkloadData.hpp:582</a></div></div>
<div class="ttc" id="_lstm_utils_8hpp_xhtml"><div class="ttname"><a href="_lstm_utils_8hpp.xhtml">LstmUtils.hpp</a></div></div>
<div class="ttc" id="classarmnn_1_1_i_tensor_handle_factory_xhtml_a375f11dd42ff042435e8771cf287b20c"><div class="ttname"><a href="classarmnn_1_1_i_tensor_handle_factory.xhtml#a375f11dd42ff042435e8771cf287b20c">armnn::ITensorHandleFactory::CreateTensorHandle</a></div><div class="ttdeci">virtual std::unique_ptr&lt; ITensorHandle &gt; CreateTensorHandle(const TensorInfo &amp;tensorInfo) const =0</div></div>
<div class="ttc" id="structarmnn_1_1_q_lstm_descriptor_xhtml_ad474e5c51a0b194ef32e812b86c0cbdb"><div class="ttname"><a href="structarmnn_1_1_q_lstm_descriptor.xhtml#ad474e5c51a0b194ef32e812b86c0cbdb">armnn::QLstmDescriptor::m_CifgEnabled</a></div><div class="ttdeci">bool m_CifgEnabled</div><div class="ttdoc">Enable/disable CIFG (coupled input &amp; forget gate). </div><div class="ttdef"><b>Definition:</b> <a href="_descriptors_8hpp_source.xhtml#l01191">Descriptors.hpp:1191</a></div></div>
<div class="ttc" id="structarmnn_1_1_q_lstm_queue_descriptor_xhtml_a08a1932be591c315a512a877d38b22df"><div class="ttname"><a href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a08a1932be591c315a512a877d38b22df">armnn::QLstmQueueDescriptor::m_InputToInputWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_InputToInputWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00579">WorkloadData.hpp:579</a></div></div>
<div class="ttc" id="_lstm_test_impl_8cpp_xhtml_a017dcda811d5b5dc185f8c1d2e9b29f3"><div class="ttname"><a href="_lstm_test_impl_8cpp.xhtml#a017dcda811d5b5dc185f8c1d2e9b29f3">QLstmTest2</a></div><div class="ttdeci">LayerTestResult&lt; int8_t, 2 &gt; QLstmTest2(armnn::IWorkloadFactory &amp;workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr &amp;memoryManager, const armnn::ITensorHandleFactory &amp;tensorHandleFactory)</div><div class="ttdef"><b>Definition:</b> <a href="_lstm_test_impl_8cpp_source.xhtml#l02926">LstmTestImpl.cpp:2926</a></div></div>
<div class="ttc" id="structarmnn_1_1_lstm_queue_descriptor_xhtml_af3c52626a6f05597d82ed095d0765962"><div class="ttname"><a href="structarmnn_1_1_lstm_queue_descriptor.xhtml#af3c52626a6f05597d82ed095d0765962">armnn::LstmQueueDescriptor::m_ProjectionWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_ProjectionWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00429">WorkloadData.hpp:429</a></div></div>
<div class="ttc" id="_tensor_copy_utils_8cpp_xhtml_ae15f1a3c55d2db87683577de9fa4437c"><div class="ttname"><a href="_tensor_copy_utils_8cpp.xhtml#ae15f1a3c55d2db87683577de9fa4437c">CopyDataToITensorHandle</a></div><div class="ttdeci">void CopyDataToITensorHandle(armnn::ITensorHandle *tensorHandle, const void *memory)</div><div class="ttdef"><b>Definition:</b> <a href="_tensor_copy_utils_8cpp_source.xhtml#l00009">TensorCopyUtils.cpp:9</a></div></div>
<div class="ttc" id="structarmnn_1_1_q_lstm_descriptor_xhtml_a4556cbd764d4848d8ad0637a9eed580d"><div class="ttname"><a href="structarmnn_1_1_q_lstm_descriptor.xhtml#a4556cbd764d4848d8ad0637a9eed580d">armnn::QLstmDescriptor::m_HiddenStateZeroPoint</a></div><div class="ttdeci">int32_t m_HiddenStateZeroPoint</div><div class="ttdoc">Hidden State zero point. </div><div class="ttdef"><b>Definition:</b> <a href="_descriptors_8hpp_source.xhtml#l01207">Descriptors.hpp:1207</a></div></div>
<div class="ttc" id="structarmnn_1_1_lstm_queue_descriptor_xhtml_a3ea82566d98c5a657c76c3d851c47848"><div class="ttname"><a href="structarmnn_1_1_lstm_queue_descriptor.xhtml#a3ea82566d98c5a657c76c3d851c47848">armnn::LstmQueueDescriptor::m_InputToForgetWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_InputToForgetWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00415">WorkloadData.hpp:415</a></div></div>
<div class="ttc" id="structarmnn_1_1_q_lstm_queue_descriptor_xhtml_a28ad98d17603fd8b12e046f8ece58970"><div class="ttname"><a href="structarmnn_1_1_q_lstm_queue_descriptor.xhtml#a28ad98d17603fd8b12e046f8ece58970">armnn::QLstmQueueDescriptor::m_InputToCellWeights</a></div><div class="ttdeci">const ConstCpuTensorHandle * m_InputToCellWeights</div><div class="ttdef"><b>Definition:</b> <a href="_workload_data_8hpp_source.xhtml#l00581">WorkloadData.hpp:581</a></div></div>
</div><!-- fragment --></div><!-- contents -->
</div><!-- doc-content -->
<!-- start footer part -->
<div id="nav-path" class="navpath"><!-- id is needed for treeview function! -->
  <ul>
    <li class="navelem"><a class="el" href="dir_68267d1309a1af8e8297ef4c3efbcdba.xhtml">src</a></li><li class="navelem"><a class="el" href="dir_0f3cdec46afbc61a1ded8e1687c9c9a0.xhtml">backends</a></li><li class="navelem"><a class="el" href="dir_797a213d7d01b98ef12d53b0820ea64e.xhtml">backendsCommon</a></li><li class="navelem"><a class="el" href="dir_28bfe507f7e135bdae07c2a6b7f66696.xhtml">test</a></li><li class="navelem"><a class="el" href="dir_99a30439342d160875b21dac3498ad7f.xhtml">layerTests</a></li><li class="navelem"><a class="el" href="_lstm_test_impl_8cpp.xhtml">LstmTestImpl.cpp</a></li>
    <li class="footer">Generated on Thu Feb 25 2021 17:27:51 for ArmNN by
    <a href="http://www.doxygen.org/index.html">
    <img class="footer" src="doxygen.png" alt="doxygen"/></a> 1.8.13 </li>
  </ul>
</div>
</body>
</html>